Snippets Collections
= Sql.Database("USORASPSQLINT", "CLARINS_AUDIT",
               [Query="SELECT #(lf)    TASKDETAILKEY,#(lf)    TASKTYPE,#(lf)    DESCRIPTION,#(lf)    STORERKEY,#(lf)    SKU,#(lf)    UOM,#(lf)    UOMQTY,#(lf)    FROMLOC,#(lf)    FROMID,#(lf)    TOLOC,#(lf)    TOID,#(lf)    ORDERKEY,#(lf)    ORDERLINENUMBER,#(lf)    WAVEKEY,#(lf)    PRIORITY,#(lf)    STATUS,#(lf)    ROUTE,#(lf)    CONVERT(DATE, ADDDATE) AS ADDDATE,#(lf)    ADDWHO,#(lf)    CONVERT(DATE, EDITDATE) AS EDITDATE,#(lf)    EDITWHO,#(lf)    STARTTIME,#(lf)    CONVERT(DATE, ENDTIME) AS ENDTIME#(lf)
               FROM CLARINS_AUDIT.SCE.vw_TASKDETAIL_Pending;#(lf)"])
                
package com.modeln.channelcollab.junit;

import com.modeln.channelnetwork.junit.graphql.AbstractChannelNetworkTest;
import com.modeln.channelnetwork.junit.graphql.GraphQLClient;
import com.modeln.channelnetwork.junit.graphql.SubmissionScheduleClient;
import io.restassured.path.json.JsonPath;
import io.restassured.response.Response;
import org.junit.Assert;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.junit.jupiter.SpringExtension;

import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.*;

@ExtendWith(SpringExtension.class)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
@Tag("graphql")
public class SubmissionResultsTest extends AbstractChannelNetworkTest
{
    public static final String SUBMISSION_SCHEDULE_FILE_NAME_PREFIX = "Submission_Schedule_";
    public static String newSubmissionSchedulefilename, invoiceDate;

    public static final Integer DEFAULT_OFFSET = 0, DEFAULT_LIMIT = 100;

    public static SubmissionScheduleClient adminClient;

    public static BigDecimal sid;

    @Autowired
    protected JdbcTemplate jdbcTemplate;

    public static final String SUBMISSION_SCHEDULE_QUERY_OUTPUT_JSON = "SELECT distinct JSON_OBJECT('sid' value ss.sid,'reporting_partner_name' value rp_ovw.entity_name,'name' value name,'periodRule' value period_rule,'dataType' value dt.type ,'expectedDay' value expected_day,'isInPeriodReporter' value case when is_in_period_reporter = '1' then 'true' Else 'false' end,'weekOfMonth' value week_of_month,'monthOfQuarter' value month_of_quarter,'startDate' value start_date,'endDate' value end_date) AS \" \" FROM submission_schedule ss join reporting_partner rp on rp.sid = ss.reporting_partner_sid left join data_type dt on dt.sid = ss.data_type_sid left join rp_csr_overlay_v rp_ovw on rp_ovw.ip_sid = rp.inow_profile_sid";

    public static String NO_DATA_REASON = "SYSTEM ISSUE", noDataReason;

    public static String GET_SID_TO_UPDATE = "select sid from submission_period";

    public static final String ACQUIRE_LOCK_GRAPHQL_MUTATION = "mutation acquireLock{" +
            "  acquireLock(objectType: SUBMISSION_PERIOD, sid: $sid) {" +
            "    userId" +
            "    serviceName" +
            "    sid" +
            "    expiration" +
            "  }" +
            "}";

    public static String MARK_NO_DATA_MUTATION = "mutation markNoData{" +
            "  markNoData(data: [{" +
            "        sid: $sid, " +
            "    noDataReason: \"$noDataReason\"" +
            "  }]) {" +
            "    sid "+
            "    status" +
            "    code" +
            "    message" +
            "  }" +
            "}";

    public static final String CHECK_NO_DATA_FOR_SID = "select no_data from submission_period where sid = :sid";

    public static final String CHECK_NO_DATA_REASON_FOR_SID = "select no_data_reason from submission_period where sid = :sid";

    @BeforeAll
    public void createClient() throws Exception
    {
        String endPoint = getEnvironment().getProperty(
                GraphQLClient.CHANNEL_NETWORK_GRAPHQL_URI);
        adminClient = new SubmissionScheduleClient(endPoint, generateUser1Token());
        List fromDb = jdbcTemplate.queryForList(SUBMISSION_SCHEDULE_QUERY_OUTPUT_JSON);
        if (fromDb.size() == 0)
        {
            uploadFileToCreateNewSubmissionSchedule();
        }
    }

    public void uploadFileToCreateNewSubmissionSchedule() throws Exception
    {
        newSubmissionSchedulefilename = getFilename(SUBMISSION_SCHEDULE_FILE_NAME_PREFIX, XLS_EXTENSION);
        LocalDate localDate = LocalDate.now();
        invoiceDate = DateTimeFormatter.ofPattern("MM/dd/yy").format(localDate.plusMonths(1).withDayOfMonth(1));
        String date2 = DateTimeFormatter.ofPattern("MM/dd/yy").format(localDate.withDayOfMonth(1));

        String[] rowData1 =
                { "CTHULHU", "Y", "\"ARROW PARTNER Transaction WEEKLY 20000000\"", "", "WEEKLY",
                        "Monday", "ARROW", "transaction", "No",
                        date2, "",
                        "ci_testuserint_email@cdmutlmail.aws.modeln.com", "Y", "", "Y",
                        "Y", "", "" };

        String[] rowData2 =
                { "CTHULHU", "Y", "\"CC SS BASE PARTNER Inventory MONTHLY 20000000\"", "", "MONTHLY",
                        "1", "BASE", "inventory", "Yes",
                        date2, "",
                        "ci_testuserint_email@cdmutlmail.aws.modeln.com", "Y", "", "Y",
                        "Y", "", "" };

        String[] rowData3 =
                { "CTHULHU", "Y", "\"AMSDIRSAP PARTNER Inventory DAILY 20000000\"", "", "QUARTERLY",
                        "1", "AMSDIRSAP", "inventory", "No",
                        date2, "",
                        "ci_testuserint_email@cdmutlmail.aws.modeln.com", "Y", "", "Y",
                        "Y", "", "1" };

        String[] rowData4 =
                { "CTHULHU", "Y", "\"BASE PARTNER 2 Transaction MONTHLY 20000000\"", "", "MONTHLY",
                        "1", "BASE2", "transaction", "No",
                        date2, "",
                        "ci_testuserint_email@cdmutlmail.aws.modeln.com", "Y", "", "Y",
                        "Y", "", "1" };

        String[][] fileData = new String[][]
                { CTH_SUBMISSION_SCHEDULE_FIELDS, rowData1, rowData2, rowData3, rowData4 };

        createXlsFile(getEnvironment(), fileData,
                newSubmissionSchedulefilename);
        invokeFileScanner(CLIENT_ID);
        waitForFileUploadSuccess(newSubmissionSchedulefilename);
        Thread.sleep(10000);
    }

    @Test
    public void submissionResultsTest() throws Exception
    {
        List<Map<String, Object>> sidList = jdbcTemplate.queryForList(GET_SID_TO_UPDATE);
        Map<String, Object> params = new HashMap<String, Object>();
        for(int i=0;i<sidList.size();i++)
        {
            sid = (BigDecimal) sidList.get(i).get("SID");
            params.put("sid", sid);
            String noDataForSid = getNamedParamJdbcTemplate()
                    .queryForObject(CHECK_NO_DATA_FOR_SID, params, String.class);
            Map<String, Object> mutationVariables = new HashMap<>();
            mutationVariables.put("sid", sid);
            mutationVariables.put("noDataReason", NO_DATA_REASON);
            String acquireLockMutation = ACQUIRE_LOCK_GRAPHQL_MUTATION.replace("$sid", sid.toString());

            Response response = adminClient.submissionScheduleQueryrunner(DEFAULT_LIMIT,
                    DEFAULT_OFFSET, null, null,
                    acquireLockMutation);
            noDataReason = getNamedParamJdbcTemplate()
                    .queryForObject(CHECK_NO_DATA_REASON_FOR_SID, params, String.class);

            if (Integer.parseInt(noDataForSid) == 0)
            {
                String markNoDataMutation = MARK_NO_DATA_MUTATION.replace("$sid", sid.toString()).replace("$noDataReason", NO_DATA_REASON);

            adminClient.submissionScheduleQueryrunner(DEFAULT_LIMIT,
                    DEFAULT_OFFSET, null, null,
                        markNoDataMutation);

            Thread.sleep(1000);
        }

        else {

            String markNoDataMutation = MARK_NO_DATA_MUTATION.replace("$sid", sid.toString()).replace("$noDataReason", NO_DATA_REASON);

            Response response1 = adminClient.submissionScheduleQueryrunner(DEFAULT_LIMIT,
                    DEFAULT_OFFSET, null, null,
                        markNoDataMutation);

            String message = JsonPath.with(response1.getBody().asString())
                    .get("data.markNoData[0].message");

            Assert.assertEquals(message,"Submission Period already has reported data, so No-Data-To-Report is not applicable.");
        }

            noDataForSid = getNamedParamJdbcTemplate()
                    .queryForObject(CHECK_NO_DATA_FOR_SID, params, String.class);
        Assert.assertEquals(Integer.parseInt(noDataForSid), 1);
        noDataReason = getNamedParamJdbcTemplate()
                    .queryForObject(CHECK_NO_DATA_REASON_FOR_SID, params, String.class);
        Assert.assertEquals(noDataReason, NO_DATA_REASON);
    }
    }
}
WITH cte AS (
    SELECT *, ROW_NUMBER() OVER (ORDER BY (SELECT NULL)) rn
    FROM (SELECT attribute AS attr1, value AS val1 FROM temp WHERE attribute = 'height') t1
    CROSS JOIN (SELECT attribute AS attr2, value AS val2 FROM temp WHERE attribute = 'weight') t2
    CROSS JOIN (SELECT attribute AS attr3, value AS val3 FROM temp WHERE attribute = 'gender') t3
)

SELECT rn AS Combination, attr1 AS Attribute, val1 AS Value FROM cte WHERE attr1 = 'height' UNION ALL
SELECT rn, attr2, val2 FROM cte WHERE attr2 = 'weight' UNION ALL
SELECT rn, attr3, val3 FROM cte WHERE attr3 = 'gender'
ORDER BY Combination, Attribute, Value;
SELECT
   CCY1, CCY2, CER.Exchange_Rate
FROM
    ( 
    SELECT
        c1.rec_id AS rec_id1, c1.currency AS CCY1,
        c2.rec_id AS rec_id2, c2.currency AS CCY2
    FROM
        currency c1
        CROSS JOIN --all combinations...
        currency c2
    WHERE
        c1.rec_id <> c2rec_id -- ...removes same pairs
    ) foo
    LEFT JOIN -- ...get matching FX pairs
    CurrencyExchangeRate CER ON foo.rec_id1 = cer.[from] AND foo.rec_id2 = cer.[to]
SELECT 
  c1.currency AS [From], c2.currency AS [To] , cer.Exchange_Rate
FROM
  currency c1 JOIN currency c2 ON c1.rec_id <> c2.rec_id
  LEFT OUTER JOIN CurrencyExchangeRate cer ON c1.rec_id = cer.[from] 
         AND c2.rec_id = cer.[to]
LOAD DATA INFILE '{  [file_path]  /  [file_name].csv}'
INTO TABLE table_name
FIELDS TERMINATED BY ','
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 ROWS;
dateadd(DAY,0, datediff(day,0, created)) will return the day created

for example, if the sale created on '2009-11-02 06:12:55.000', dateadd(DAY,0, datediff(day,0, created)) return '2009-11-02 00:00:00.000'

select sum(amount) as total, dateadd(DAY,0, datediff(day,0, created)) as created
from sales
group by dateadd(DAY,0, datediff(day,0, created))
query SubmissionPeriod {
  submissionPeriods(
    limit: 10
    offset: 0
    sort: {
      numberOfFiles:{
        order:1,
        direction:ASC
      }
    }
    filters: {
    status:{
      operator:EQUAL,
      value:"On-time"
    },
      submissionSchedule:{
        reportingPartner:{
          partnerOverlayView:{
            name:{
              operator:CONTAINS,
              value:"%WAV RISE%"
            }
          }
        }
      }
      
      
    }
  ) {
    sid
    createDate
    updateDate
    customerSid
    expectedDay
    expectedDate
    isInPeriodReporter
    noData
    noDataCreateDate
    noDataReason
    onTimeOverride
    periodEndDate
    periodStartDate
    reportedFlag
    status
    trackingLevel
    workingDays
    numberOfFiles
    dataFileSummaryInfo {
      numberOfPOSLines
      numberOfInventoryLines
      receivedDate
      dataFile {
        id
        createDate
        fileName
        dataType
        __typename
      }
      __typename
    }
    noDataServiceUser {
      sid
      firstName
      lastName
      email
      __typename
    }
    submissionPeriodLineItemView {
      salesLineItemCount
      invLineItemCount
      earliestFileSubmissionDate
      __typename
    }
    submissionSchedule {
      sid
      name
      periodRule
      dataType {
        type
        __typename
      }
      reportingPartner {
        id
        partnerOverlayView {
          name
          street1
          street2
          city
          stateprovince
          postalcode
          country
          __typename
        }
        __typename
      }
      __typename
    }
    __typename
  }
}
(select
      count(df.id) as number_of_files
      from SUBMISSION_SCHEDULE ss1
      left join DATA_FILE_SUMMARY_INFO dfsi on 
                                       dfsi.SUBMISSION_PERIOD_SID =  :sid
                                       AND dfsi.CUSTOMER_SID = :cs
      left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
      left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                          AND dfsi.DATA_FILE_SID = df.SID
                          AND df.DELETED = 0
                          --AND df.DATA_TYPE = dt1.TYPE
      where ss1.SID = :spssd
      AND ss1.CUSTOMER_SID= :cs);
SELECT 
  content.title,
  content.body,
  content.published,
  author.url,
FROM 
  `3000_forum_boards___updated_daily___english.socialgist_boards_english_public`
  -- Make sure these parameters match the names you set for the dataset and tables
WHERE 
  content.published BETWEEN '2023-01-01' AND '2024-02-29'
  AND (content.body LIKE '%Super Bowl%' OR content.title LIKE '%Super Bowl%')
LIMIT 10;
SELECT
  content.title,
  content.body,
  content.published,
  author.url
FROM
  `200_boards___updated_daily___chinese.socialgist_boards_chinese_public`
-- Make sure the dataset and table selectors use the same name you set when you linked the dataset.
WHERE
  LOWER(content.body) LIKE '%经济%' 
-- The keyword is the word 'economy' in Chinese characters. You can change this to any keyword you wish to scan for in the content body. 
ORDER BY
  content.published DESC
LIMIT 10 -- Limits the results to 100 for demonstration purposes.
CREATE OR REPLACE TEMPORARY TABLE TBL_NUMBER
(
	NUM FLOAT
)
AS
SELECT
	*
FROM
	VALUES(2),(3),(4);

SELECT
    EXP
    (
      SUM
      (
        LN(NUM)
      )
    ) AS RESULT
FROM
	TBL_NUMBER
SELECT DISTINCT  V1.SKU AS SKU, V2.DESCR AS ItemName, SUM(V1.SHIPPEDQTY) 
FROM SCE.vw_ORDERDETAIL_1 V1
INNER JOIN SCE.vw_SKU V2 ON V1.SKU = V2.SKU 
WHERE V1.ACTUALSHIPDATE BETWEEN '2024-01-01 00:00:00' AND GETDATE()
GROUP BY v1.SKU, V2.DESCR
HAVING SUM(V1.SHIPPEDQTY) > 0
DECLARE @pattern varchar(52) = '0123456789 abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
SELECT 
   v.[Text], 
   REPLACE(
      TRANSLATE(
         v.[Text],
         REPLACE(TRANSLATE(v.[Text], @pattern, REPLICATE('a', LEN(@pattern))), 'a', ''),
		 REPLICATE('~', LEN(REPLACE(REPLACE(TRANSLATE(v.[Text], @pattern, REPLICATE('a', LEN(@pattern))), 'a', ''), ' ', '.')))
      ),
      '~',
      ''
   ) AS AlphaNumericCharacters
FROM (VALUES
   ('abc01234def5678ghi90jkl#@$&"'),
   ('1234567890'),
   ('JAHDBESBN%*#*@*($E*sd55bn')
) v ([Text]);
[image]=cast(Replace([image],'data:image/jpeg;base64,','')as xml).value('xs:base64Binary(.)', 'varbinary(max)')
function(config) { 
   config.defaultGridViewOptions = { 
      footer: false } 
return config; } 
        
  WITH NUMBEROFTOTALORDER AS (
  
	SELECT DISTINCT Count(*) As NumberofTotalOrder
	From [SCE].[vw_ORDERS_1] 
	WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
	AND vw_ORDERS_1.STATUS NOT IN (95,98,99) 
	/* 95 = Shipped Complete, 98 = Cancelled Externally, 99 = Canceled Internally */ 
	
  ), PICKED AS (
  
    SELECT DISTINCT Count(*) As Picked
	From [SCE].[vw_ORDERS_1] 
	WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
	AND vw_ORDERS_1.STATUS IN (55) 
	
	/* 15 =  Part Allocated / Part Picked
	 * 25 = Part Released/Part Picked
	 * 51 = In Picking
	 * 52 = Part Picked
	 * 53 = Part Picked / Part Shipped
	 * 55 = Picked Complete
	 * 57 = Picked / Part Shipped
	 *  */ 
  
  ), PACKED AS (
  
	SELECT DISTINCT Count(*) As Packed
	From [SCE].[vw_ORDERS_1] 
	WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
	AND vw_ORDERS_1.STATUS IN (68) 
	
	/* 61 =  In Packing
	 * 68 = Pack Complete
	 *  */
	
	), PRIORITY AS (
	
	SELECT DISTINCT Count(*) As Priority
	From [SCE].[vw_ORDERS_1] 
	WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
	AND vw_ORDERS_1.STATUS NOT IN (95,98,99) 
	AND vw_ORDERS_1.PRIORITY IN (1,2,3)
	
	/* 95 = Shipped Complete, 98 = Cancelled Externally, 99 = Canceled Internally */ 
	/* 1 = Highest Priority, 3 = Normal Priority */
 
    ), ACTUALORDER AS (
    
    SELECT DISTINCT Count(*) As ActualOrder
	From [SCE].[vw_ORDERS_1] 
	WHERE vw_ORDERS_1.TYPE in ('ECOM','ECOMAPP')
	AND vw_ORDERS_1.ORDERDATE = DATEADD(dd, -1, CAST( GETDATE() AS Date)) 

	), ORDERSHIPPED24HOUROLD AS (
	
	SELECT OrderShipped24HourOld, 
	CASE 
		WHEN DATENAME(weekday, GETDATE()) = 'Monday' THEN 
		(
			SELECT DISTINCT COUNT(*) As OrderShipped24HourOld
			From [SCE].[vw_ORDERS_1] 
			WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
			AND vw_ORDERS_1.ACTUALSHIPDATE=  DATEADD(dd,-3,CAST( GETDATE() AS Date))
			AND vw_ORDERS_1.ORDERDATE = DATEADD(dd,-4,CAST( GETDATE() AS Date ))
		
		)
		
		
	
	
	
	), ORDERSHIPPEDGREATERTHAN24HOUROLD AS 
	
		IF (DATENAME(weekday, GETDATE()) IN ('Monday'))
			BEGIN
				SELECT DISTINCT Count(*) As OrdersShippedGreaterThan24HoursOldForMONDAY
				FROM [SCE].[vw_ORDERS_1] 
				WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
				AND vw_ORDERS_1.ACTUALSHIPDATE = DATEADD(dd,-3,CAST( GETDATE() AS Date ))
				AND vw_ORDERS_1.ORDERDATE < DATEADD(dd,-4,CAST( GETDATE() AS Date ))
			END
		ELSE IF (DATENAME(weekday, GETDATE()) IN ('Tuesday'))
			BEGIN
				SELECT DISTINCT Count(*) As OrdersShippedGreaterThan24HoursOldForTuesday
				FROM [SCE].[vw_ORDERS_1] 
				WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
				AND vw_ORDERS_1.ACTUALSHIPDATE = DATEADD(dd,-1,CAST( GETDATE() AS Date ))
				AND vw_ORDERS_1.ORDERDATE < DATEADD(dd,-4,CAST( GETDATE() AS Date ))
			END
		ELSE
			BEGIN
				SELECT DISTINCT Count(*) As OrderShippedGreaterThan24HourOld
				FROM [SCE].[vw_ORDERS_1] 
				WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
				AND vw_ORDERS_1.ACTUALSHIPDATE = DATEADD(dd,-1,CAST( GETDATE() AS Date ))
				AND vw_ORDERS_1.ORDERDATE < DATEADD(dd,-2,CAST( GETDATE() AS Date ))
			END
	)
  	SELECT NUMBEROFTOTALORDER.NumberofTotalOrder, PICKED.Picked, PACKED.Packed, PRIORITY.Priority, ACTUALORDER.ActualOrder, ORDERSHIPPED24HOUROLD.OrderShipped24HourOld,
  	ORDERSHIPPEDGREATERTHAN24HOUROLD.OrderShippedGreaterThan24HourOld
	FROM DAYSTART, PICKED, PACKED, PRIORITY, ACTUALORDER, ORDERSHIPPED24HOUROLD, ORDERSHIPPEDGREATERTHAN24HOUROLD
	

IF (DATENAME(weekday, GETDATE()) = 'Monday')
		BEGIN
			SELECT DISTINCT COUNT(*) As OrderShipped24HourOld
			From [SCE].[vw_ORDERS_1] 
			WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
			AND vw_ORDERS_1.ACTUALSHIPDATE=  DATEADD(dd,-3,CAST( GETDATE() AS Date))
			AND vw_ORDERS_1.ORDERDATE = DATEADD(dd,-4,CAST( GETDATE() AS Date ))	
		END
	ELSE IF (DATENAME(weekday, GETDATE()) = 'Tuesday')
		BEGIN
			SELECT DISTINCT Count(*) As OrderShipped24HourOld
			From [SCE].[vw_ORDERS_1] 
			WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
			AND vw_ORDERS_1.ACTUALSHIPDATE=  DATEADD(dd,-1,CAST( GETDATE() AS Date ))
			AND vw_ORDERS_1.ORDERDATE = DATEADD(dd,-4,CAST( GETDATE() AS Date ))
		END
	ELSE
		BEGIN
			SELECT DISTINCT Count(*) As OrderShipped24HourOld
			From [SCE].[vw_ORDERS_1] 
			WHERE vw_ORDERS_1.TYPE IN ('ECOM','ECOMAPP')
			AND vw_ORDERS_1.ACTUALSHIPDATE = DATEADD(dd,-1,CAST( GETDATE() AS Date ))
			AND vw_ORDERS_1.ORDERDATE = DATEADD(dd,-2,CAST( GETDATE() AS Date ))	
		END	 
	
	
	
	

// Go through the stack overflow code to determine the best solution
Check Link

https://learn.microsoft.com/en-us/sql/t-sql/language-elements/while-transact-sql?view=sql-server-ver16

https://learn.microsoft.com/en-us/sql/t-sql/language-elements/if-else-transact-sql?view=sql-server-ver16


Instead of reading the table twice, we only need to read it once and can return one set of results. This query also has the same 30046 logical reads.

PIVOT 

Learn how to use Pivot to do this
WITH cte1 AS(
 SELECT COUNT(*) cnt 
 FROM table1
 WHERE....
), cte2 AS(
 SELECT COUNT(*) cnt
 FROM table1
 WHERE....
), cte3 AS(
 SELECT COUNT(*) cnt
 FROM table 1
 WHERE....
)
SELECT cte1.cnt AS cte1, cte2.cnt AS cte2, cte3.cnt AS cte3
FROM cte1, cte2, cte3

SELECT
  (SELECT COUNT(*) FROM table1 WHERE....) as cte1,
  (SELECT COUNT(*) FROM table1 WHERE....) as cte2,
  (SELECT COUNT(*) FROM table1 WHERE....) as cte3
version: "1.0"

name: ChannelNetwork

# All actions

actions:
  - name: VIEW
  - name: UPDATE

# All resources

resources:
  # UI Resource for access to kpis tab
  - name: ProductKpiTab
    actions: [VIEW]

    ### Begin productKpi fields

  - name: ProductKpi
    group: productKpi
    actions: [VIEW]

  - name: ProductKpi/*
    group: productKpi
    actions: [VIEW]

    ### End productKpi fields

    ### Begin salesKpi fields

  - name: SalesKpi
    group: salesKpi
    actions: [VIEW]

  - name: SalesKpi/*
    group: salesKpi
    actions: [VIEW]

    ### End salesKpi fields

    ### Begin inventoryKpi fields

  - name: InventoryKpi
    group: inventoryKpi
    actions: [VIEW]

  - name: InventoryKpi/*
    group: inventoryKpi
    actions: [VIEW]

    ### End inventoryKpi fields

    ### Begin filesKpi fields

  - name: FilesKpi
    group: filesKpi
    actions: [VIEW]

  - name: FilesKpi/*
    group: filesKpi
    actions: [VIEW]

    ### End filesKpi fields

    ### Begin ssKpi fields

  - name: SSKpi
    group: ssKpi
    actions: [VIEW]

  - name: SSKpi/*
    group: ssKpi
    actions: [VIEW]

    ### End ssKpi fields


    # UI Resource for access to Products tab
  - name: ProductTab
    actions: [VIEW, UPDATE]

    ### Begin Product Fields

  - name: Product
    group: product
    actions: [VIEW, UPDATE]

  - name: Product/sid
    group: product
    actions: [VIEW]

  - name: Product/createDate
    group: product
    actions: [VIEW]

  - name: Product/updateDate
    group: product
    actions: [VIEW]

  - name: Product/customerSid
    group: product-internal
    actions: [VIEW]

  - name: Product/sku
    group: product
    actions: [VIEW]

  - name: Product/name
    group: product
    actions: [VIEW]

  - name: Product/description
    group: product
    actions: [VIEW]

  - name: Product/productFamily
    group: product
    actions: [VIEW]

  - name: Product/productLine
    group: product
    actions: [VIEW]

  - name: Product/startDate
    group: product
    actions: [VIEW]

  - name: Product/endDate
    group: product
    actions: [VIEW]

  - name: Product/serialized
    group: product-internal
    actions: [VIEW]

  - name: Product/aggregation
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesLineCount
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesQuantity
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/oldestInvoiceDate
    group: product-pos-aggr
    actions: [VIEW]

    ## Begin Product Dynamic Attrs


  - name: Product/dynamicAttrs
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/sid
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/updateDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/createDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/attributeType
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/STRING_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_11
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_12
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_13
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_14
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_15
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_16
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_17
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_18
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_19
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_20
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_21
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_22
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_23
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_24
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_25
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_26
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_27
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_28
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_29
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_30
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/NUM_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/DATE_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
    ## End Product Dynamic Attrs

    ### End Product Fields

    # UI Resource for access to POS tab
  - name: SalesTab
    actions: [VIEW, UPDATE]

    ### Begin POS Fields

  - name: Sales
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/sid
    group: sales
    actions: [VIEW]

  - name: Sales/createDate
    group: sales
    actions: [VIEW]

  - name: Sales/updateDate
    group: sales
    actions: [VIEW]

  - name: Sales/customerSid
    group: sales-internal
    actions: [VIEW]

  - name: Sales/deleted
    group: sales-internal
    actions: [VIEW]

  - name: Sales/branchId
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceNumber
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceDate
    group: sales
    actions: [VIEW]

  - name: Sales/quantity
    group: sales
    actions: [VIEW]

  - name: Sales/reportedSku
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/productDescription
    group: sales
    actions: [VIEW]

  - name: Sales/transactionId
    group: sales-internal
    actions: [VIEW]

  - name: Sales/vendorPartNumber
    group: sales
    actions: [VIEW]

  - name: Sales/accountRepresentative
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/boolExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/bookUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/customerOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/debitExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/debitUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/designRegistrationNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorId
    group: sales
    actions: [VIEW]

  - name: Sales/distributorName
    group: sales
    actions: [VIEW]

  - name: Sales/distributorShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorWarehouseId
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeDate
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeRate
    group: sales
    actions: [VIEW]

  - name: Sales/globalProductClassCode
    group: sales
    actions: [VIEW]

  - name: Sales/legacySalesRecordId
    group: sales
    actions: [VIEW]

  - name: Sales/lengthOfProduction
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureId
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureName
    group: sales
    actions: [VIEW]

  - name: Sales/manufacturerShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/orderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/originalId
    group: sales
    actions: [VIEW]

  - name: Sales/price
    group: sales
    actions: [VIEW]

  - name: Sales/purchaseOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/r2rDuplicateType
    group: sales
    actions: [VIEW]

  - name: Sales/regionTerritory
    group: sales
    actions: [VIEW]

  - name: Sales/reportEndingDate
    group: sales
    actions: [VIEW]

  - name: Sales/reportType
    group: sales
    actions: [VIEW]

  - name: Sales/resaleExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resaeExtension
    group: sales
    actions: [VIEW]

  - name: Sales/resaleUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resubmitted
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductFamily
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductLine
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductName
    group: sales
    actions: [VIEW]

  - name: Sales/shipDate
    group: sales
    actions: [VIEW]

  - name: Sales/shipDebitNumber
    group: sales
    actions: [VIEW]

  - name: Sales/shippingMethod
    group: sales
    actions: [VIEW]

  - name: Sales/spaNumber
    group: sales
    actions: [VIEW]

  - name: Sales/tier
    group: sales
    actions: [VIEW]

  - name: Sales/transactionType
    group: sales
    actions: [VIEW]

  - name: Sales/unitOfMeasure
    group: sales
    actions: [VIEW]

  - name: Sales/vendorPartDescription
    group: sales
    actions: [VIEW]

  - name: Sales/validationCodes
    group: sales
    actions: [VIEW]

  - name: Sales/serialNumbers
    group: sales
    actions: [VIEW]

    # Bill to address


  - name: Sales/billToAddress
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/billToAddress/entityName
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street1
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street2
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/city
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/stateProvince
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/postalCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/reportedCountry
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/name
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/twoCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/threeCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddressExternalId
    group: sales-bill-to
    actions: [VIEW]

    # sold to address

  - name: Sales/soldToAddress
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/soldToAddress/entityName
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street1
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street2
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/city
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/stateProvince
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/postalCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/reportedCountry
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/name
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/twoCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/threeCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddressExternalId
    group: sales-sold-to
    actions: [VIEW]

    # ship to address

  - name: Sales/shipToAddress
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipToAddress/entityName
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street1
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street2
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/city
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/stateProvince
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/postalCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/reportedCountry
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/name
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/twoCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/threeCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddressExternalId
    group: sales-ship-to
    actions: [VIEW]

    # sell from address

  - name: Sales/sellFromAddress
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/sellFromAddress/entityName
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street1
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street2
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/city
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/stateProvince
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/postalCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/reportedCountry
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/name
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/twoCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/threeCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddressExternalId
    group: sales-sell-from
    actions: [VIEW]

    # ship from address

  - name: Sales/shipFromAddress
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipFromAddress/entityName
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street1
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street2
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/city
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/stateProvince
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/postalCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/reportedCountry
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/name
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/twoCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/threeCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddressExternalId
    group: sales-ship-from
    actions: [VIEW]

    # sales in address

  - name: Sales/salesInAddress
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/salesInAddress/entityName
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street1
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street2
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/city
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/stateProvince
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/postalCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/reportedCountry
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/name
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/twoCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/threeCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddressExternalId
    group: sales-sales-in
    actions: [VIEW]

    # purchasing customer address

  - name: Sales/purchasingCustomerAddress
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/entityName
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street1
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street2
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/city
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/stateProvince
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/postalCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/reportedCountry
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/name
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/twoCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/threeCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerExternalId
    group: sales-purchasing-customer
    actions: [VIEW]

    # derived end customer address

  - name: Sales/derivedEndCustomerAddress
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/entityName
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street1
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street2
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/city
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/stateProvince
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/postalCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/reportedCountry
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/name
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/twoCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/threeCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddressExternalId
    group: sales-derived-end-customer
    actions: [VIEW]

    # data file

  - name: Sales/dataFile
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/dataFile/loadDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/reportDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/id
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/fileName
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/recordCount
    group: sales-data-file
    actions: [VIEW]

    # match info

  - name: Sales/productMatchInfo
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct/sku
    group: sales-product-match-info
    actions: [VIEW]

    # reporting partner

  - name: Sales/reportingPartner
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers/value
    group: sales-reporting-partner
    actions: [VIEW]

    # currency

  - name: Sales/currency
    group: sales-currency
    actions: [VIEW]

  - name: Sales/currency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/currency/name
    group: sales-currency
    actions: [VIEW]

    # resale currency

  - name: Sales/resaleCurrency
    group: sales-resale-currency
    actions: [VIEW]

  - name: Sales/resaleCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/resaleCurrency/name
    group: sales-resale-currency
    actions: [VIEW]

    # debit currency

  - name: Sales/debtCurrency
    group: sales-debit-currency
    actions: [VIEW]

  - name: Sales/debtCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/debtCurrency/name
    group: sales-debit-currency
    actions: [VIEW]

    # book currency

  - name: Sales/bookCurrency
    group: sales-book-currency
    actions: [VIEW]

  - name: Sales/bookCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/bookCurrency/name
    group: sales-book-currency
    actions: [VIEW]

    # acquisition currency

  - name: Sales/acquisitionCurrency
    group: sales-acquisition-currency
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/name
    group: sales-acquisition-currency
    actions: [VIEW]

    ## Begin POS Dynamic Attrs

  - name: Sales/dynamicAttrs
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/*
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/STRING_COL_1
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

    ## End POS Dynamic Attrs

    ### End POS Fields

    # UI Resource for access to INV tab
  - name: InventoryTab
    actions: [VIEW, UPDATE]

    ### Begin INV Fields

  - name: Inventory
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/sid
    group: inventory
    actions: [VIEW]

  - name: Inventory/createDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/updateDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/customerSid
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/deleted
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/productName
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientDescription
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientSku
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/reportedSku
    group: inventory
    actions: [VIEW]

  - name: Inventory/inventoryDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/unitOfMeasure
    group: inventory
    actions: [VIEW]

  - name: Inventory/id
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/originalId
    group: inventory
    actions: [VIEW]

  - name: Inventory/lineNumber
    group: inventory-internal
    actions: [VIEW]

    # data file

  - name: Inventory/dataFile
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/dataFile/loadDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/reportDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/id
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/fileName
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/recordCount
    group: inventory-data-file
    actions: [VIEW]

    # reporting partner

  - name: Inventory/reportingPartner
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers/value
    group: inventory-reporting-partner
    actions: [VIEW]

    # submission period

  - name: Inventory/submissionPeriod
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/submissionPeriod/expectedDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodStartDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodEndDate
    group: inventory-submission-period
    actions: [VIEW]

    # quantities

  - name: Inventory/inventoryQuantities
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inventoryQuantities/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onHandQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/committedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/floatQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/returnedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/value
    group: inventory-quantity
    actions: [VIEW]

    # prices

  - name: Inventory/inventoryPrices
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/inventoryPrices/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice/price
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency/name
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency/name
    group: inventory-price
    actions: [VIEW]

    ## Begin INV Dynamic Attrs

  - name: Inventory/dynamicAttrs
    group: inventory-dynamic-attrs
    actions: [VIEW]

  - name: Inventory/dynamicAttrs/*
    group: inventory-dynamic-attrs
    actions: [VIEW]

    ## End INV Dynamic Attrs

    ### End INV Fields

  # UI Resource for access to File Tab
  - name: FilesTab
    actions: [VIEW, UPDATE]

  ## Begin Partner

  - name: Partner
    group: reporting-partner
    actions: [VIEW]

  - name: Partner/*
    group: reporting-partner
    actions: [VIEW]

  ## End Partner

  # Begin File fields
  - name: DataFile
    group: file-management
    actions: [VIEW, UPDATE]

  - name: DataFile/sid
    group: file-management
    actions: [VIEW]

  - name: DataFile/loadDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/reportDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileName
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileType
    group: file-management
    actions: [VIEW]

  - name: DataFile/dataType
    group: file-management
    actions: [VIEW]

  - name: DataFile/id
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileSize
    group: file-management
    actions: [VIEW]

  - name: DataFile/source
    group: file-management
    actions: [VIEW]

  - name: DataFile/recordCount
    group: file-management
    actions: [VIEW]

  - name: DataFile/deletedLines
    group: file-management
    actions: [VIEW]

  - name: DataFile/download
    group: file-download
    actions: [VIEW]

  - name: DataFile/validationDownload
    group: file-validation-download
    actions: [VIEW]

  - name: DataFile/upload
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadDataTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadFileTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/reportingPartner
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/dataFileState
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/*
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/sid
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/createDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/updateDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/parserAttempt
    group: file-parser-attempt
    actions: [VIEW]

  - name: DataFile/parserAttempt/*
    group: file-parser-attempt
    actions: [VIEW]

  # End of File fields
  
  # UI Resource for access to Submission Schedule
  - name: SubmissionTrackingTab
    actions: [VIEW, UPDATE]

  - name: SubmissionResultsTab
    actions: [VIEW, UPDATE]

  # Begin Submission Schedule 
  - name: SubmissionSchedule
    group: submission-schedule
    actions: [VIEW, UPDATE]

  - name: SubmissionSchedule/sid
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/createDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/updateDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/name
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/dataType
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/dataType/*
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/periodRule
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/expectedDay
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/startDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/endDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/isInPeriodReporter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/weekOfMonth
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/monthOfQuarter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/workingDays
    group: submission-schedule
    actions: [VIEW]

  # End Submission Schedule 

  # Begin Submission Schedule Notification
  - name: SubmissionScheduleNotification
    group: submission-schedule-notification
    actions: [VIEW]
  
  - name: SubmissionScheduleNotification/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser/*
    group: submission-schedule-notification
    actions: [VIEW]

  # END Submission Schedule Notification

  # Begin Submission Period
  - name: SubmissionPeriod
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/sid
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodStartDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodEndDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/createDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/updateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noData
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataReason
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataCreateDate
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/onTimeOverride
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDay
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/workingDays
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/isInPeriodReporter
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/trackingLevel
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/status
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/reportedFlag
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/fileIds
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileName
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileCreateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileId
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/deleted
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataServiceUser/sid
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/firstName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/lastName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/email
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView/*
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo/*
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/sid
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/name
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/periodRule
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType
    group: sp-data-type
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType/*
    group: sp-data-type
    actions: [VIEW]

  # End Submission Period

  # Begin Export Request

  - name: ExportRequest
    group: export
    actions: [VIEW, UPDATE]

  - name: ExportRequest/*
    group: export
    actions: [VIEW, UPDATE]

  # End Export Request

  ### Begin Base resources

  - name: About
    group: about
    actions: [VIEW]

  - name: About/*
    group: about
    actions: [VIEW]

  - name: UserEvent
    group: user-event
    actions: [VIEW, UPDATE]

  - name: UserEvent/*
    group: user-event
    actions: [VIEW, UPDATE]

  - name: ObjectLock
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLock/*
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLockResponse
    group: object-lock-response
    actions: [VIEW]

  - name: ObjectLockResponse/*
    group: object-lock-response
    actions: [VIEW]

  - name: MutationResponse
    group: mutation-response
    actions: [VIEW]

  - name: MutationResponse/*
    group: mutation-response
    actions: [VIEW]

  - name: DynamicAttrMetadata
    group: attr-metadata
    actions: [VIEW]

  - name: DynamicAttrMetadata/*
    group: attr-metadata
    actions: [VIEW]
    

  ### End Base resources

  ### Begin mutation resources

  - name: Product/mutation/*
    group: product-update
    actions: [UPDATE]

  - name: Sales/mutation/*
    group: sales-update
    actions: [UPDATE]

  - name: Inventory/mutation/*
    group: inventory-update
    actions: [UPDATE]

  - name: ExportRequest/mutation/*
    group: export
    actions: [UPDATE]

  - name: SubmissionPeriod/mutation/*
    group: submission-period-mutation
    actions: [UPDATE]

    ### End mutation resources

    ### Begin Customer Resources

  - name: INT
    actions: [VIEW]

  - name: ACS
    actions: [VIEW]

  - name: CAMB
    actions: [VIEW]

  - name: CYBERDYNE
    actions: [VIEW]

  - name: COR
    actions: [VIEW]

  - name: INT_CCD
    actions: [VIEW]

  - name: ACS_CCD
    actions: [VIEW]

  - name: CAMB_CCD
    actions: [VIEW]

  - name: QCOM_CCD
    actions: [VIEW]

  - name: COR_CCD
    actions: [VIEW]

    ### End Customer Resources

# Common permissions for all tenants
permissions:

  - name: BasicViewUpdate
    displayName: Background Permissions
    description: Every User needs this permission
    resource-actions:
      - about:[VIEW]
      - user-event:[VIEW, UPDATE]
      - object-lock:[VIEW, UPDATE]
      - object-lock-response:[VIEW]
      - mutation-response:[VIEW]
      - attr-metadata:[VIEW]

  - name: KpiView
    displayName: KPI Tab
    description: KPI Permission Set
    resource-actions:
      - ProductKpiTab:[VIEW]

  - name: SSKpiFields
    displayName: Submission KPI Fields
    description: Submission KPI Fields Permission Set
    resource-actions:
      - ssKpi:[VIEW]

  - name: FilesKpiFields
    displayName: Files KPI Fields
    description: Files KPI Fields Permission Set
    resource-actions:
      - filesKpi:[VIEW]

  - name: FileTab
    displayName: File Tab
    description: File Permission Set
    resource-actions:
      - FilesTab:[VIEW]

  - name: FileUploadPartner
    displayName: File Upload for Partner
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW, UPDATE] 
      - file-upload:[VIEW]     

  - name: FileUpload
    displayName: File Upload 
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-upload:[VIEW]
      - reporting-partner:[VIEW]

  - name: FileDownload
    displayName: File Download
    description: File Download Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-download:[VIEW]
      - file-validation-download:[VIEW]

  - name: SubmissionTab
    displayName: Submission Tab
    description: File Permission Set
    resource-actions:
      - SubmissionTrackingTab:[VIEW]
      - SubmissionResultsTab:[VIEW]

  - name: FileManufactureView
    displayName: File Admin View
    description: File Admin Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-reporting-partner:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: FilePartnerView
    displayName: File Partner View
    description: File Partner Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: SubmissionManufactureView
    displayName: Submission Admin View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-reporting-partner:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-reporting-partner:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerView
    displayName: Submission Partner View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerUpdate
    displayName: Submission Partner Update
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]

  - name: SubmissionUpdate
    displayName: Submission Update 
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: [DataFileSummaryInfo]
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    receivedDate: Date
    dataFile: DataFile
  }

  type DataFile{
    fileName: String
    id: String
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    noDataCreateDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    noData: BooleanFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    noDataCreateDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    noData: SortOption
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
export const statusSql = (alias: string) => {
     return `(select
             CASE WHEN ( ${alias}."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < ${alias}."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( ${alias}."NO_DATA" = 1 ) 
                         AND ( ${alias}."NO_DATA_CREATE_DATE" < ${alias}."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( ${alias}."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = ${alias}."SID"
             AND "sp1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`;
};

export const reportedFlagSql = (alias: string) => {
     return `(select
             CASE WHEN MAX(${alias}."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX(${alias}."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const fileIdsSql = (alias: string) => { 
    return `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const filesCountSql = (alias: string) => {
     return `(select
      count(df.id) as number_of_files
      from SUBMISSION_SCHEDULE ss1
      left join DATA_FILE_SUMMARY_INFO dfsi on 
                                       dfsi.SUBMISSION_PERIOD_SID =  ${alias}.SID
                                       AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
      left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
      left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                          AND dfsi.DATA_FILE_SID = df.SID
                          AND df.DELETED = 0
                          AND df.DATA_TYPE = dt1.TYPE
      where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
      AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID)`
}
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo[]>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Customer } from './customer/Customer';
import { Partner } from './partner/Partner';
import { Product } from './product/Product';
import { DynamicAttrs } from './attribute/DynamicAttrs';
import { DynamicAttrMetadata } from './attribute/DynamicAttrMetadata';
import { Sales } from './sales/Sales';
import { Address } from './address/Address';
import { Currency } from './base/Currency';
import { DataFile } from './datafile/DataFile';
import { Country } from './address/Country';
import { SalesMatchInfo } from './sales/SalesMatchInfo';
import { ProductAggr } from './product/ProductAggr';
import { GsNumber } from './partner/GsNumber';
import { Inventory } from './inventory/Inventory';
import { SubmissionPeriod } from './submission/SubmissionPeriod';
import { InventoryQuantity } from './inventory/InventoryQuantity';
import { QuantityType } from './inventory/QuantityType';
import { InventoryPrice } from './inventory/InventoryPrice';
import { UserEvent } from './event/UserEvent';
import { UserSession } from './event/UserSession';
import { DataFileState } from './datafile/DataFileState';
import { ParserAttempt } from './datafile/ParserAttempt';
import { PartnerOverlayView } from './partner/PartnerOverlayView';
import { DataState } from './base/DataState';
import { SubmissionSchedule } from './submission/SubmissionSchedule';
import { DataType } from './base/DataType';
import { SubmissionPeriodLineItemView } from './submission/SubmissionPeriodLineItemView';
import { SubmissionScheduleNotification } from './submission/SubmissionScheduleNotification';
import { ServiceUser } from './user/ServiceUser';
import { AuditEvent } from './event/AuditEvent';
import { SubmissionScheduleAudit } from './submission/SubmissionScheduleAudit';
import { AuditType } from './event/AuditType';
import { DataFileSummaryInfo } from './datafile/DataFileSummaryInfo';

export const entities = [
  Customer,
  GsNumber,
  Partner,
  Product,
  ProductAggr,
  Sales,
  Inventory,
  SubmissionPeriod,
  SubmissionSchedule,
  SubmissionScheduleNotification,
  DataType,
  SubmissionPeriodLineItemView,
  InventoryQuantity,
  QuantityType,
  InventoryPrice,
  SalesMatchInfo,
  Address,
  Country,
  Currency,
  DataFile,
  DataFileState,
  DataState,
  ParserAttempt,
  PartnerOverlayView,
  DynamicAttrs,
  DynamicAttrMetadata,
  UserEvent,
  UserSession,
  ServiceUser,
  AuditEvent,
  SubmissionScheduleAudit,
  AuditType,
  DataFileSummaryInfo
];
import { Entity, Column, ManyToOne, JoinColumn, OneToOne} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import { DataFile, returnsDataFile } from './DataFile';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriods,returnsSummaryInfo)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    @OneToOne(returnsDataFile)
    @JoinColumn({ name: 'DATA_FILE_SID' })
    dataFile?: Promise<DataFile>;
}

import {
  fileIdsSql,
  reportedFlagSql,
  statusSql,
  filesCountSql
} from '../../../../src/domain/submission/SubmissionPeriodSql';

describe('SubmissionPeriodSql Tests', () => {
  test('statusSql', () => {
    const actual = statusSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             CASE WHEN ( "sp"."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < "sp"."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( "sp"."NO_DATA" = 1 ) 
                         AND ( "sp"."NO_DATA_CREATE_DATE" < "sp"."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( "sp"."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = "sp"."SID"
             AND "sp1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  
  test('reportedFlagSql', () => {
    const actual = reportedFlagSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             CASE WHEN MAX("sp"."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX("sp"."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = "sp"."SID"
                                              AND "dfsi"."CUSTOMER_SID" = "sp"."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = "sp"."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  
  test('fileIdsSql', () => {
    const actual = fileIdsSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = "sp"."SID"
                                              AND "dfsi"."CUSTOMER_SID" = "sp"."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = "sp"."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  test('filesCountSql', () => {
    const actual = filesCountSql(`"sp"`);

    expect(actual).toEqual(
      `(select
      count(df.id) as number_of_files
      from SUBMISSION_SCHEDULE ss1
      left join DATA_FILE_SUMMARY_INFO dfsi on 
                                       dfsi.SUBMISSION_PERIOD_SID =  "sp".SID
                                       AND dfsi.CUSTOMER_SID = "sp".CUSTOMER_SID
      left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
      left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                          AND dfsi.DATA_FILE_SID = df.SID
                          AND df.DELETED = 0
                          AND df.DATA_TYPE = dt1.TYPE
      where ss1.SID = "sp".SUBMISSION_SCHEDULE_SID
      AND ss1.CUSTOMER_SID= "sp".CUSTOMER_SID)`
    );
  });

});
import 'reflect-metadata';
import {
  dataFileSummaryInfoInverseSide,
  returnsDataFileSummaryInfo,
  returnsSubmissionPeriod,
  SubmissionPeriod
} from '../../../../src/domain/submission/SubmissionPeriod';
import { DataFileSummaryInfo } from '../../../../src/domain/datafile/DataFileSummaryInfo';

describe('Submission Period Unit Tests', () => {

  test('returnsSubmissionPeriod', () => {
    expect(returnsSubmissionPeriod()).toEqual(SubmissionPeriod);
  });

  test('constructor', () => {
    expect(new SubmissionPeriod()).toBeInstanceOf(SubmissionPeriod);
  });

  test('dataFileSummaryInfoInverseSide', () => {
    const dataFileSummaryInfo: DataFileSummaryInfo = new DataFileSummaryInfo();
    expect(dataFileSummaryInfoInverseSide(dataFileSummaryInfo)).toEqual(
      dataFileSummaryInfo.submissionPeriod
    );
  });

  test('returnsDataFileSummaryInfo', () => {
    expect(returnsDataFileSummaryInfo()).toEqual(DataFileSummaryInfo);
  });

});

import { returnsSubmissionPeriods, returnsSummaryInfo } from "../../../../src/domain/datafile/DataFileSummaryInfo";
import { SubmissionPeriod } from "../../../../src/domain/submission/SubmissionPeriod";

describe('Data File Summary Info Tests', () => {
    test('returnsSubmissionPeriods', () => {
      expect(returnsSubmissionPeriods()).toEqual(SubmissionPeriod);
    });

    test('returnsSummaryInfo', () => {
        const submissionPeriod = new SubmissionPeriod();
        expect(returnsSummaryInfo(submissionPeriod)).toEqual(
          submissionPeriod.dataFileSummaryInfo
        );
      });
}
)
// Date with time
GETDATE()
// Just the year
YEAR(GETDATE())
// Date without the time
CAST( GETDATE() AS Date )
select * from information_schema.columns 
where TABLE_SCHEMA ='SCE' and TABLE_NAME like'%Order%' and COLUMN_NAME like'%Order%'

select * from information_schema.columns 
WHERE TABLE_SCHEMA ='SCE' and TABLE_NAME like'vw_ORDERS_1' and COLUMN_NAME like'%Date%'
USE [AdventureWorks];
GO
SELECT name AS [Name], 
       SCHEMA_NAME(schema_id) AS schema_name, 
       type_desc, 
       create_date, 
       modify_date
FROM sys.objects
WHERE type ='u'
SELECT catalog_name AS DBName, 
    Schema_name, 
    schema_owner
FROM information_schema.SCHEMATA;
SELECT * from INFORMATION_SCHEMA.VIEWS
version: "1.0"

name: ChannelNetwork

# All actions

actions:
  - name: VIEW
  - name: UPDATE

# All resources

resources:
  # UI Resource for access to kpis tab
  - name: ProductKpiTab
    actions: [VIEW]

    ### Begin productKpi fields

  - name: ProductKpi
    group: productKpi
    actions: [VIEW]

  - name: ProductKpi/*
    group: productKpi
    actions: [VIEW]

    ### End productKpi fields

    ### Begin salesKpi fields

  - name: SalesKpi
    group: salesKpi
    actions: [VIEW]

  - name: SalesKpi/*
    group: salesKpi
    actions: [VIEW]

    ### End salesKpi fields

    ### Begin inventoryKpi fields

  - name: InventoryKpi
    group: inventoryKpi
    actions: [VIEW]

  - name: InventoryKpi/*
    group: inventoryKpi
    actions: [VIEW]

    ### End inventoryKpi fields

    ### Begin filesKpi fields

  - name: FilesKpi
    group: filesKpi
    actions: [VIEW]

  - name: FilesKpi/*
    group: filesKpi
    actions: [VIEW]

    ### End filesKpi fields

    ### Begin ssKpi fields

  - name: SSKpi
    group: ssKpi
    actions: [VIEW]

  - name: SSKpi/*
    group: ssKpi
    actions: [VIEW]

    ### End ssKpi fields


    # UI Resource for access to Products tab
  - name: ProductTab
    actions: [VIEW, UPDATE]

    ### Begin Product Fields

  - name: Product
    group: product
    actions: [VIEW, UPDATE]

  - name: Product/sid
    group: product
    actions: [VIEW]

  - name: Product/createDate
    group: product
    actions: [VIEW]

  - name: Product/updateDate
    group: product
    actions: [VIEW]

  - name: Product/customerSid
    group: product-internal
    actions: [VIEW]

  - name: Product/sku
    group: product
    actions: [VIEW]

  - name: Product/name
    group: product
    actions: [VIEW]

  - name: Product/description
    group: product
    actions: [VIEW]

  - name: Product/productFamily
    group: product
    actions: [VIEW]

  - name: Product/productLine
    group: product
    actions: [VIEW]

  - name: Product/startDate
    group: product
    actions: [VIEW]

  - name: Product/endDate
    group: product
    actions: [VIEW]

  - name: Product/serialized
    group: product-internal
    actions: [VIEW]

  - name: Product/aggregation
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesLineCount
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesQuantity
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/oldestInvoiceDate
    group: product-pos-aggr
    actions: [VIEW]

    ## Begin Product Dynamic Attrs


  - name: Product/dynamicAttrs
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/sid
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/updateDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/createDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/attributeType
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/STRING_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_11
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_12
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_13
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_14
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_15
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_16
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_17
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_18
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_19
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_20
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_21
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_22
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_23
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_24
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_25
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_26
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_27
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_28
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_29
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_30
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/NUM_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/DATE_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
    ## End Product Dynamic Attrs

    ### End Product Fields

    # UI Resource for access to POS tab
  - name: SalesTab
    actions: [VIEW, UPDATE]

    ### Begin POS Fields

  - name: Sales
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/sid
    group: sales
    actions: [VIEW]

  - name: Sales/createDate
    group: sales
    actions: [VIEW]

  - name: Sales/updateDate
    group: sales
    actions: [VIEW]

  - name: Sales/customerSid
    group: sales-internal
    actions: [VIEW]

  - name: Sales/deleted
    group: sales-internal
    actions: [VIEW]

  - name: Sales/branchId
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceNumber
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceDate
    group: sales
    actions: [VIEW]

  - name: Sales/quantity
    group: sales
    actions: [VIEW]

  - name: Sales/reportedSku
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/productDescription
    group: sales
    actions: [VIEW]

  - name: Sales/transactionId
    group: sales-internal
    actions: [VIEW]

  - name: Sales/vendorPartNumber
    group: sales
    actions: [VIEW]

  - name: Sales/accountRepresentative
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/boolExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/bookUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/customerOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/debitExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/debitUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/designRegistrationNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorId
    group: sales
    actions: [VIEW]

  - name: Sales/distributorName
    group: sales
    actions: [VIEW]

  - name: Sales/distributorShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorWarehouseId
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeDate
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeRate
    group: sales
    actions: [VIEW]

  - name: Sales/globalProductClassCode
    group: sales
    actions: [VIEW]

  - name: Sales/legacySalesRecordId
    group: sales
    actions: [VIEW]

  - name: Sales/lengthOfProduction
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureId
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureName
    group: sales
    actions: [VIEW]

  - name: Sales/manufacturerShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/orderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/originalId
    group: sales
    actions: [VIEW]

  - name: Sales/price
    group: sales
    actions: [VIEW]

  - name: Sales/purchaseOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/r2rDuplicateType
    group: sales
    actions: [VIEW]

  - name: Sales/regionTerritory
    group: sales
    actions: [VIEW]

  - name: Sales/reportEndingDate
    group: sales
    actions: [VIEW]

  - name: Sales/reportType
    group: sales
    actions: [VIEW]

  - name: Sales/resaleExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resaeExtension
    group: sales
    actions: [VIEW]

  - name: Sales/resaleUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resubmitted
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductFamily
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductLine
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductName
    group: sales
    actions: [VIEW]

  - name: Sales/shipDate
    group: sales
    actions: [VIEW]

  - name: Sales/shipDebitNumber
    group: sales
    actions: [VIEW]

  - name: Sales/shippingMethod
    group: sales
    actions: [VIEW]

  - name: Sales/spaNumber
    group: sales
    actions: [VIEW]

  - name: Sales/tier
    group: sales
    actions: [VIEW]

  - name: Sales/transactionType
    group: sales
    actions: [VIEW]

  - name: Sales/unitOfMeasure
    group: sales
    actions: [VIEW]

  - name: Sales/vendorPartDescription
    group: sales
    actions: [VIEW]

  - name: Sales/validationCodes
    group: sales
    actions: [VIEW]

  - name: Sales/serialNumbers
    group: sales
    actions: [VIEW]

    # Bill to address


  - name: Sales/billToAddress
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/billToAddress/entityName
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street1
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street2
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/city
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/stateProvince
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/postalCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/reportedCountry
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/name
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/twoCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/threeCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddressExternalId
    group: sales-bill-to
    actions: [VIEW]

    # sold to address

  - name: Sales/soldToAddress
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/soldToAddress/entityName
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street1
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street2
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/city
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/stateProvince
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/postalCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/reportedCountry
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/name
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/twoCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/threeCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddressExternalId
    group: sales-sold-to
    actions: [VIEW]

    # ship to address

  - name: Sales/shipToAddress
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipToAddress/entityName
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street1
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street2
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/city
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/stateProvince
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/postalCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/reportedCountry
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/name
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/twoCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/threeCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddressExternalId
    group: sales-ship-to
    actions: [VIEW]

    # sell from address

  - name: Sales/sellFromAddress
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/sellFromAddress/entityName
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street1
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street2
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/city
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/stateProvince
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/postalCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/reportedCountry
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/name
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/twoCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/threeCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddressExternalId
    group: sales-sell-from
    actions: [VIEW]

    # ship from address

  - name: Sales/shipFromAddress
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipFromAddress/entityName
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street1
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street2
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/city
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/stateProvince
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/postalCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/reportedCountry
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/name
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/twoCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/threeCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddressExternalId
    group: sales-ship-from
    actions: [VIEW]

    # sales in address

  - name: Sales/salesInAddress
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/salesInAddress/entityName
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street1
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street2
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/city
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/stateProvince
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/postalCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/reportedCountry
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/name
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/twoCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/threeCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddressExternalId
    group: sales-sales-in
    actions: [VIEW]

    # purchasing customer address

  - name: Sales/purchasingCustomerAddress
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/entityName
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street1
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street2
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/city
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/stateProvince
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/postalCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/reportedCountry
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/name
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/twoCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/threeCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerExternalId
    group: sales-purchasing-customer
    actions: [VIEW]

    # derived end customer address

  - name: Sales/derivedEndCustomerAddress
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/entityName
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street1
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street2
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/city
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/stateProvince
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/postalCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/reportedCountry
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/name
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/twoCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/threeCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddressExternalId
    group: sales-derived-end-customer
    actions: [VIEW]

    # data file

  - name: Sales/dataFile
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/dataFile/loadDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/reportDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/id
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/fileName
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/recordCount
    group: sales-data-file
    actions: [VIEW]

    # match info

  - name: Sales/productMatchInfo
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct/sku
    group: sales-product-match-info
    actions: [VIEW]

    # reporting partner

  - name: Sales/reportingPartner
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers/value
    group: sales-reporting-partner
    actions: [VIEW]

    # currency

  - name: Sales/currency
    group: sales-currency
    actions: [VIEW]

  - name: Sales/currency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/currency/name
    group: sales-currency
    actions: [VIEW]

    # resale currency

  - name: Sales/resaleCurrency
    group: sales-resale-currency
    actions: [VIEW]

  - name: Sales/resaleCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/resaleCurrency/name
    group: sales-resale-currency
    actions: [VIEW]

    # debit currency

  - name: Sales/debtCurrency
    group: sales-debit-currency
    actions: [VIEW]

  - name: Sales/debtCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/debtCurrency/name
    group: sales-debit-currency
    actions: [VIEW]

    # book currency

  - name: Sales/bookCurrency
    group: sales-book-currency
    actions: [VIEW]

  - name: Sales/bookCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/bookCurrency/name
    group: sales-book-currency
    actions: [VIEW]

    # acquisition currency

  - name: Sales/acquisitionCurrency
    group: sales-acquisition-currency
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/name
    group: sales-acquisition-currency
    actions: [VIEW]

    ## Begin POS Dynamic Attrs

  - name: Sales/dynamicAttrs
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/*
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/STRING_COL_1
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

    ## End POS Dynamic Attrs

    ### End POS Fields

    # UI Resource for access to INV tab
  - name: InventoryTab
    actions: [VIEW, UPDATE]

    ### Begin INV Fields

  - name: Inventory
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/sid
    group: inventory
    actions: [VIEW]

  - name: Inventory/createDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/updateDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/customerSid
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/deleted
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/productName
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientDescription
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientSku
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/reportedSku
    group: inventory
    actions: [VIEW]

  - name: Inventory/inventoryDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/unitOfMeasure
    group: inventory
    actions: [VIEW]

  - name: Inventory/id
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/originalId
    group: inventory
    actions: [VIEW]

  - name: Inventory/lineNumber
    group: inventory-internal
    actions: [VIEW]

    # data file

  - name: Inventory/dataFile
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/dataFile/loadDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/reportDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/id
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/fileName
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/recordCount
    group: inventory-data-file
    actions: [VIEW]

    # reporting partner

  - name: Inventory/reportingPartner
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers/value
    group: inventory-reporting-partner
    actions: [VIEW]

    # submission period

  - name: Inventory/submissionPeriod
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/submissionPeriod/expectedDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodStartDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodEndDate
    group: inventory-submission-period
    actions: [VIEW]

    # quantities

  - name: Inventory/inventoryQuantities
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inventoryQuantities/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onHandQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/committedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/floatQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/returnedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/value
    group: inventory-quantity
    actions: [VIEW]

    # prices

  - name: Inventory/inventoryPrices
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/inventoryPrices/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice/price
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency/name
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency/name
    group: inventory-price
    actions: [VIEW]

    ## Begin INV Dynamic Attrs

  - name: Inventory/dynamicAttrs
    group: inventory-dynamic-attrs
    actions: [VIEW]

  - name: Inventory/dynamicAttrs/*
    group: inventory-dynamic-attrs
    actions: [VIEW]

    ## End INV Dynamic Attrs

    ### End INV Fields

  # UI Resource for access to File Tab
  - name: FilesTab
    actions: [VIEW, UPDATE]

  ## Begin Partner

  - name: Partner
    group: reporting-partner
    actions: [VIEW]

  - name: Partner/*
    group: reporting-partner
    actions: [VIEW]

  ## End Partner

  # Begin File fields
  - name: DataFile
    group: file-management
    actions: [VIEW, UPDATE]

  - name: DataFile/sid
    group: file-management
    actions: [VIEW]

  - name: DataFile/loadDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/reportDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileName
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileType
    group: file-management
    actions: [VIEW]

  - name: DataFile/dataType
    group: file-management
    actions: [VIEW]

  - name: DataFile/id
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileSize
    group: file-management
    actions: [VIEW]

  - name: DataFile/source
    group: file-management
    actions: [VIEW]

  - name: DataFile/recordCount
    group: file-management
    actions: [VIEW]

  - name: DataFile/deletedLines
    group: file-management
    actions: [VIEW]

  - name: DataFile/download
    group: file-download
    actions: [VIEW]

  - name: DataFile/validationDownload
    group: file-validation-download
    actions: [VIEW]

  - name: DataFile/upload
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadDataTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadFileTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/reportingPartner
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/dataFileState
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/*
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/sid
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/createDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/updateDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/parserAttempt
    group: file-parser-attempt
    actions: [VIEW]

  - name: DataFile/parserAttempt/*
    group: file-parser-attempt
    actions: [VIEW]

  # End of File fields
  
  # UI Resource for access to Submission Schedule
  - name: SubmissionTrackingTab
    actions: [VIEW, UPDATE]

  - name: SubmissionResultsTab
    actions: [VIEW, UPDATE]

  # Begin Submission Schedule 
  - name: SubmissionSchedule
    group: submission-schedule
    actions: [VIEW, UPDATE]

  - name: SubmissionSchedule/sid
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/createDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/updateDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/name
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/dataType
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/dataType/*
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/periodRule
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/expectedDay
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/startDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/endDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/isInPeriodReporter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/weekOfMonth
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/monthOfQuarter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/workingDays
    group: submission-schedule
    actions: [VIEW]

  # End Submission Schedule 

  # Begin Submission Schedule Notification
  - name: SubmissionScheduleNotification
    group: submission-schedule-notification
    actions: [VIEW]
  
  - name: SubmissionScheduleNotification/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser/*
    group: submission-schedule-notification
    actions: [VIEW]

  # END Submission Schedule Notification

  # Begin Submission Period
  - name: SubmissionPeriod
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/sid
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodStartDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodEndDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/createDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/updateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noData
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataReason
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataCreateDate
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/onTimeOverride
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDay
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/workingDays
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/isInPeriodReporter
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/trackingLevel
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/status
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/reportedFlag
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/fileIds
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileName
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileCreateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileId
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/deleted
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataServiceUser/sid
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/firstName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/lastName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/email
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView/*
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo/*
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/sid
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/name
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/periodRule
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType
    group: sp-data-type
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType/*
    group: sp-data-type
    actions: [VIEW]

  # End Submission Period

  # Begin Export Request

  - name: ExportRequest
    group: export
    actions: [VIEW, UPDATE]

  - name: ExportRequest/*
    group: export
    actions: [VIEW, UPDATE]

  # End Export Request

  ### Begin Base resources

  - name: About
    group: about
    actions: [VIEW]

  - name: About/*
    group: about
    actions: [VIEW]

  - name: UserEvent
    group: user-event
    actions: [VIEW, UPDATE]

  - name: UserEvent/*
    group: user-event
    actions: [VIEW, UPDATE]

  - name: ObjectLock
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLock/*
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLockResponse
    group: object-lock-response
    actions: [VIEW]

  - name: ObjectLockResponse/*
    group: object-lock-response
    actions: [VIEW]

  - name: MutationResponse
    group: mutation-response
    actions: [VIEW]

  - name: MutationResponse/*
    group: mutation-response
    actions: [VIEW]

  - name: DynamicAttrMetadata
    group: attr-metadata
    actions: [VIEW]

  - name: DynamicAttrMetadata/*
    group: attr-metadata
    actions: [VIEW]
    

  ### End Base resources

  ### Begin mutation resources

  - name: Product/mutation/*
    group: product-update
    actions: [UPDATE]

  - name: Sales/mutation/*
    group: sales-update
    actions: [UPDATE]

  - name: Inventory/mutation/*
    group: inventory-update
    actions: [UPDATE]

  - name: ExportRequest/mutation/*
    group: export
    actions: [UPDATE]

  - name: SubmissionPeriod/mutation/*
    group: submission-period-mutation
    actions: [UPDATE]

    ### End mutation resources

    ### Begin Customer Resources

  - name: INT
    actions: [VIEW]

  - name: ACS
    actions: [VIEW]

  - name: CAMB
    actions: [VIEW]

  - name: CYBERDYNE
    actions: [VIEW]

  - name: COR
    actions: [VIEW]

  - name: INT_CCD
    actions: [VIEW]

  - name: ACS_CCD
    actions: [VIEW]

  - name: CAMB_CCD
    actions: [VIEW]

  - name: QCOM_CCD
    actions: [VIEW]

  - name: COR_CCD
    actions: [VIEW]

    ### End Customer Resources

# Common permissions for all tenants
permissions:

  - name: BasicViewUpdate
    displayName: Background Permissions
    description: Every User needs this permission
    resource-actions:
      - about:[VIEW]
      - user-event:[VIEW, UPDATE]
      - object-lock:[VIEW, UPDATE]
      - object-lock-response:[VIEW]
      - mutation-response:[VIEW]
      - attr-metadata:[VIEW]

  - name: KpiView
    displayName: KPI Tab
    description: KPI Permission Set
    resource-actions:
      - ProductKpiTab:[VIEW]

  - name: SSKpiFields
    displayName: Submission KPI Fields
    description: Submission KPI Fields Permission Set
    resource-actions:
      - ssKpi:[VIEW]

  - name: FilesKpiFields
    displayName: Files KPI Fields
    description: Files KPI Fields Permission Set
    resource-actions:
      - filesKpi:[VIEW]

  - name: FileTab
    displayName: File Tab
    description: File Permission Set
    resource-actions:
      - FilesTab:[VIEW]

  - name: FileUploadPartner
    displayName: File Upload for Partner
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW, UPDATE] 
      - file-upload:[VIEW]     

  - name: FileUpload
    displayName: File Upload 
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-upload:[VIEW]
      - reporting-partner:[VIEW]

  - name: FileDownload
    displayName: File Download
    description: File Download Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-download:[VIEW]
      - file-validation-download:[VIEW]

  - name: SubmissionTab
    displayName: Submission Tab
    description: File Permission Set
    resource-actions:
      - SubmissionTrackingTab:[VIEW]
      - SubmissionResultsTab:[VIEW]

  - name: FileManufactureView
    displayName: File Admin View
    description: File Admin Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-reporting-partner:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: FilePartnerView
    displayName: File Partner View
    description: File Partner Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: SubmissionManufactureView
    displayName: Submission Admin View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-reporting-partner:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-reporting-partner:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerView
    displayName: Submission Partner View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerUpdate
    displayName: Submission Partner Update
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]

  - name: SubmissionUpdate
    displayName: Submission Update 
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: [DataFileSummaryInfo]
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    receivedDate: Date
    dataFileId: String
    dataFileName: String
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    noDataCreateDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    noData: BooleanFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    noDataCreateDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    noData: SortOption
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
export const statusSql = (alias: string) => {
     return `(select
             CASE WHEN ( ${alias}."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < ${alias}."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( ${alias}."NO_DATA" = 1 ) 
                         AND ( ${alias}."NO_DATA_CREATE_DATE" < ${alias}."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( ${alias}."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = ${alias}."SID"
             AND "sp1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`;
};

export const reportedFlagSql = (alias: string) => {
     return `(select
             CASE WHEN MAX(${alias}."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX(${alias}."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const fileIdsSql = (alias: string) => { 
    return `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const filesCountSql = (alias: string) => {
     return `(select
      count(df.id) as number_of_files
      from SUBMISSION_SCHEDULE ss1
      left join DATA_FILE_SUMMARY_INFO dfsi on 
                                       dfsi.SUBMISSION_PERIOD_SID =  ${alias}.SID
                                       AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
      left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
      left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                          AND dfsi.DATA_FILE_SID = df.SID
                          AND df.DELETED = 0
                          AND df.DATA_TYPE = dt1.TYPE
      where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
      AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID)`
}
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo[]>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Customer } from './customer/Customer';
import { Partner } from './partner/Partner';
import { Product } from './product/Product';
import { DynamicAttrs } from './attribute/DynamicAttrs';
import { DynamicAttrMetadata } from './attribute/DynamicAttrMetadata';
import { Sales } from './sales/Sales';
import { Address } from './address/Address';
import { Currency } from './base/Currency';
import { DataFile } from './datafile/DataFile';
import { Country } from './address/Country';
import { SalesMatchInfo } from './sales/SalesMatchInfo';
import { ProductAggr } from './product/ProductAggr';
import { GsNumber } from './partner/GsNumber';
import { Inventory } from './inventory/Inventory';
import { SubmissionPeriod } from './submission/SubmissionPeriod';
import { InventoryQuantity } from './inventory/InventoryQuantity';
import { QuantityType } from './inventory/QuantityType';
import { InventoryPrice } from './inventory/InventoryPrice';
import { UserEvent } from './event/UserEvent';
import { UserSession } from './event/UserSession';
import { DataFileState } from './datafile/DataFileState';
import { ParserAttempt } from './datafile/ParserAttempt';
import { PartnerOverlayView } from './partner/PartnerOverlayView';
import { DataState } from './base/DataState';
import { SubmissionSchedule } from './submission/SubmissionSchedule';
import { DataType } from './base/DataType';
import { SubmissionPeriodLineItemView } from './submission/SubmissionPeriodLineItemView';
import { SubmissionScheduleNotification } from './submission/SubmissionScheduleNotification';
import { ServiceUser } from './user/ServiceUser';
import { AuditEvent } from './event/AuditEvent';
import { SubmissionScheduleAudit } from './submission/SubmissionScheduleAudit';
import { AuditType } from './event/AuditType';
import { DataFileSummaryInfo } from './datafile/DataFileSummaryInfo';

export const entities = [
  Customer,
  GsNumber,
  Partner,
  Product,
  ProductAggr,
  Sales,
  Inventory,
  SubmissionPeriod,
  SubmissionSchedule,
  SubmissionScheduleNotification,
  DataType,
  SubmissionPeriodLineItemView,
  InventoryQuantity,
  QuantityType,
  InventoryPrice,
  SalesMatchInfo,
  Address,
  Country,
  Currency,
  DataFile,
  DataFileState,
  DataState,
  ParserAttempt,
  PartnerOverlayView,
  DynamicAttrs,
  DynamicAttrMetadata,
  UserEvent,
  UserSession,
  ServiceUser,
  AuditEvent,
  SubmissionScheduleAudit,
  AuditType,
  DataFileSummaryInfo
];
export const getDataFileIdSql = (alias: string) => {
    return `SELECT df.id FROM DATA_FILE df WHERE df.SID = ${alias}.DATA_FILE_SID`;
};

export const getDataFileNameSql = (alias: string) => {
    return `SELECT df.file_name FROM DATA_FILE df WHERE df.SID = ${alias}.DATA_FILE_SID`;
};
import { Entity, Column, ManyToOne, JoinColumn, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import { getDataFileIdSql, getDataFileNameSql } from './DataFileSummaryInfoSql';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriods,returnsSummaryInfo)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;
    
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;
}

import {
  fileIdsSql,
  reportedFlagSql,
  statusSql,
  filesCountSql
} from '../../../../src/domain/submission/SubmissionPeriodSql';

describe('SubmissionPeriodSql Tests', () => {
  test('statusSql', () => {
    const actual = statusSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             CASE WHEN ( "sp"."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < "sp"."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( "sp"."NO_DATA" = 1 ) 
                         AND ( "sp"."NO_DATA_CREATE_DATE" < "sp"."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( "sp"."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = "sp"."SID"
             AND "sp1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  
  test('reportedFlagSql', () => {
    const actual = reportedFlagSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             CASE WHEN MAX("sp"."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX("sp"."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = "sp"."SID"
                                              AND "dfsi"."CUSTOMER_SID" = "sp"."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = "sp"."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  
  test('fileIdsSql', () => {
    const actual = fileIdsSql(`"sp"`);

    expect(actual).toEqual(
      `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = "sp"."SID"
                                              AND "dfsi"."CUSTOMER_SID" = "sp"."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = "sp"."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = "sp"."CUSTOMER_SID")`
    );
  });

  test('filesCountSql', () => {
    const actual = filesCountSql(`"sp"`);

    expect(actual).toEqual(
      `(select
      count(df.id) as number_of_files
      from SUBMISSION_SCHEDULE ss1
      left join DATA_FILE_SUMMARY_INFO dfsi on 
                                       dfsi.SUBMISSION_PERIOD_SID =  "sp".SID
                                       AND dfsi.CUSTOMER_SID = "sp".CUSTOMER_SID
      left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
      left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                          AND dfsi.DATA_FILE_SID = df.SID
                          AND df.DELETED = 0
                          AND df.DATA_TYPE = dt1.TYPE
      where ss1.SID = "sp".SUBMISSION_SCHEDULE_SID
      AND ss1.CUSTOMER_SID= "sp".CUSTOMER_SID)`
    );
  });

});
import 'reflect-metadata';
import {
  dataFileSummaryInfoInverseSide,
  returnsDataFileSummaryInfo,
  returnsSubmissionPeriod,
  SubmissionPeriod
} from '../../../../src/domain/submission/SubmissionPeriod';
import { DataFileSummaryInfo } from '../../../../src/domain/datafile/DataFileSummaryInfo';

describe('Submission Period Unit Tests', () => {

  test('returnsSubmissionPeriod', () => {
    expect(returnsSubmissionPeriod()).toEqual(SubmissionPeriod);
  });

  test('constructor', () => {
    expect(new SubmissionPeriod()).toBeInstanceOf(SubmissionPeriod);
  });

  test('dataFileSummaryInfoInverseSide', () => {
    const dataFileSummaryInfo: DataFileSummaryInfo = new DataFileSummaryInfo();
    expect(dataFileSummaryInfoInverseSide(dataFileSummaryInfo)).toEqual(
      dataFileSummaryInfo.submissionPeriod
    );
  });

  test('returnsDataFileSummaryInfo', () => {
    expect(returnsDataFileSummaryInfo()).toEqual(DataFileSummaryInfo);
  });

});

import { getDataFileIdSql, getDataFileNameSql } from "../../../../src/domain/datafile/DataFileSummaryInfoSql";

describe('DataFileSummaryInfoSql Tests', () => {
    test('getDataFileIdSql', () => {
      const actual = getDataFileIdSql(`"dfsi"`);
  
      expect(actual).toEqual(
        `SELECT df.id FROM DATA_FILE df WHERE df.SID = "dfsi".DATA_FILE_SID`
      );
    });

    test('getDataFileNameSql', () => {
        const actual = getDataFileNameSql(`"dfsi"`);
    
        expect(actual).toEqual(
          `SELECT df.file_name FROM DATA_FILE df WHERE df.SID = "dfsi".DATA_FILE_SID`
        );
      });
}
);
import { returnsSubmissionPeriods, returnsSummaryInfo } from "../../../../src/domain/datafile/DataFileSummaryInfo";
import { SubmissionPeriod } from "../../../../src/domain/submission/SubmissionPeriod";

describe('Data File Summary Info Tests', () => {
    test('returnsSubmissionPeriods', () => {
      expect(returnsSubmissionPeriods()).toEqual(SubmissionPeriod);
    });

    test('returnsSummaryInfo', () => {
        const submissionPeriod = new SubmissionPeriod();
        expect(returnsSummaryInfo(submissionPeriod)).toEqual(
          submissionPeriod.dataFileSummaryInfo
        );
      });
}
)
export const statusSql = (alias: string) => {
     return `(select
             CASE WHEN ( ${alias}."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < ${alias}."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( ${alias}."NO_DATA" = 1 ) 
                         AND ( ${alias}."NO_DATA_CREATE_DATE" < ${alias}."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( ${alias}."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = ${alias}."SID"
             AND "sp1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`;
};

export const reportedFlagSql = (alias: string) => {
     return `(select
             CASE WHEN MAX(${alias}."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX(${alias}."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const fileIdsSql = (alias: string) => { 
    return `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const filesCountSql = (alias: string) => {
     return `(select
          count(df.id) as number_of_files
          from SUBMISSION_SCHEDULE ss1
          left join DATA_FILE_SUMMARY_INFO dfsi on 
                                           dfsi.SUBMISSION_PERIOD_SID =  ${alias}.SID
                                           AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
          left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
          left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                              AND dfsi.DATA_FILE_SID = df.SID
                              AND df.DELETED = 0
                              AND df.DATA_TYPE = dt1.TYPE
          where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
          AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID)`
}

export const filesDataSql = (alias:string) => {
     return `(select
          df.id
          from SUBMISSION_SCHEDULE ss1
          left join DATA_FILE_SUMMARY_INFO dfsi on 
                                           dfsi.SUBMISSION_PERIOD_SID = ${alias}.SID
                                           AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
          left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
          left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                              AND dfsi.DATA_FILE_SID = df.SID
                              AND df.DELETED = 0
                              AND df.DATA_TYPE = dt1.TYPE
          where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
          AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID
          AND df.id IS NOT NULL)`
}
version: "1.0"

name: ChannelNetwork

# All actions

actions:
  - name: VIEW
  - name: UPDATE

# All resources

resources:
  # UI Resource for access to kpis tab
  - name: ProductKpiTab
    actions: [VIEW]

    ### Begin productKpi fields

  - name: ProductKpi
    group: productKpi
    actions: [VIEW]

  - name: ProductKpi/*
    group: productKpi
    actions: [VIEW]

    ### End productKpi fields

    ### Begin salesKpi fields

  - name: SalesKpi
    group: salesKpi
    actions: [VIEW]

  - name: SalesKpi/*
    group: salesKpi
    actions: [VIEW]

    ### End salesKpi fields

    ### Begin inventoryKpi fields

  - name: InventoryKpi
    group: inventoryKpi
    actions: [VIEW]

  - name: InventoryKpi/*
    group: inventoryKpi
    actions: [VIEW]

    ### End inventoryKpi fields

    ### Begin filesKpi fields

  - name: FilesKpi
    group: filesKpi
    actions: [VIEW]

  - name: FilesKpi/*
    group: filesKpi
    actions: [VIEW]

    ### End filesKpi fields

    ### Begin ssKpi fields

  - name: SSKpi
    group: ssKpi
    actions: [VIEW]

  - name: SSKpi/*
    group: ssKpi
    actions: [VIEW]

    ### End ssKpi fields


    # UI Resource for access to Products tab
  - name: ProductTab
    actions: [VIEW, UPDATE]

    ### Begin Product Fields

  - name: Product
    group: product
    actions: [VIEW, UPDATE]

  - name: Product/sid
    group: product
    actions: [VIEW]

  - name: Product/createDate
    group: product
    actions: [VIEW]

  - name: Product/updateDate
    group: product
    actions: [VIEW]

  - name: Product/customerSid
    group: product-internal
    actions: [VIEW]

  - name: Product/sku
    group: product
    actions: [VIEW]

  - name: Product/name
    group: product
    actions: [VIEW]

  - name: Product/description
    group: product
    actions: [VIEW]

  - name: Product/productFamily
    group: product
    actions: [VIEW]

  - name: Product/productLine
    group: product
    actions: [VIEW]

  - name: Product/startDate
    group: product
    actions: [VIEW]

  - name: Product/endDate
    group: product
    actions: [VIEW]

  - name: Product/serialized
    group: product-internal
    actions: [VIEW]

  - name: Product/aggregation
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesLineCount
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesQuantity
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/oldestInvoiceDate
    group: product-pos-aggr
    actions: [VIEW]

    ## Begin Product Dynamic Attrs


  - name: Product/dynamicAttrs
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/sid
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/updateDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/createDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/attributeType
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/STRING_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_11
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_12
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_13
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_14
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_15
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_16
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_17
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_18
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_19
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_20
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_21
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_22
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_23
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_24
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_25
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_26
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_27
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_28
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_29
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_30
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/NUM_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/DATE_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
    ## End Product Dynamic Attrs

    ### End Product Fields

    # UI Resource for access to POS tab
  - name: SalesTab
    actions: [VIEW, UPDATE]

    ### Begin POS Fields

  - name: Sales
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/sid
    group: sales
    actions: [VIEW]

  - name: Sales/createDate
    group: sales
    actions: [VIEW]

  - name: Sales/updateDate
    group: sales
    actions: [VIEW]

  - name: Sales/customerSid
    group: sales-internal
    actions: [VIEW]

  - name: Sales/deleted
    group: sales-internal
    actions: [VIEW]

  - name: Sales/branchId
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceNumber
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceDate
    group: sales
    actions: [VIEW]

  - name: Sales/quantity
    group: sales
    actions: [VIEW]

  - name: Sales/reportedSku
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/productDescription
    group: sales
    actions: [VIEW]

  - name: Sales/transactionId
    group: sales-internal
    actions: [VIEW]

  - name: Sales/vendorPartNumber
    group: sales
    actions: [VIEW]

  - name: Sales/accountRepresentative
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/boolExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/bookUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/customerOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/debitExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/debitUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/designRegistrationNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorId
    group: sales
    actions: [VIEW]

  - name: Sales/distributorName
    group: sales
    actions: [VIEW]

  - name: Sales/distributorShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorWarehouseId
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeDate
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeRate
    group: sales
    actions: [VIEW]

  - name: Sales/globalProductClassCode
    group: sales
    actions: [VIEW]

  - name: Sales/legacySalesRecordId
    group: sales
    actions: [VIEW]

  - name: Sales/lengthOfProduction
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureId
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureName
    group: sales
    actions: [VIEW]

  - name: Sales/manufacturerShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/orderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/originalId
    group: sales
    actions: [VIEW]

  - name: Sales/price
    group: sales
    actions: [VIEW]

  - name: Sales/purchaseOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/r2rDuplicateType
    group: sales
    actions: [VIEW]

  - name: Sales/regionTerritory
    group: sales
    actions: [VIEW]

  - name: Sales/reportEndingDate
    group: sales
    actions: [VIEW]

  - name: Sales/reportType
    group: sales
    actions: [VIEW]

  - name: Sales/resaleExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resaeExtension
    group: sales
    actions: [VIEW]

  - name: Sales/resaleUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resubmitted
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductFamily
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductLine
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductName
    group: sales
    actions: [VIEW]

  - name: Sales/shipDate
    group: sales
    actions: [VIEW]

  - name: Sales/shipDebitNumber
    group: sales
    actions: [VIEW]

  - name: Sales/shippingMethod
    group: sales
    actions: [VIEW]

  - name: Sales/spaNumber
    group: sales
    actions: [VIEW]

  - name: Sales/tier
    group: sales
    actions: [VIEW]

  - name: Sales/transactionType
    group: sales
    actions: [VIEW]

  - name: Sales/unitOfMeasure
    group: sales
    actions: [VIEW]

  - name: Sales/vendorPartDescription
    group: sales
    actions: [VIEW]

  - name: Sales/validationCodes
    group: sales
    actions: [VIEW]

  - name: Sales/serialNumbers
    group: sales
    actions: [VIEW]

    # Bill to address


  - name: Sales/billToAddress
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/billToAddress/entityName
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street1
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street2
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/city
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/stateProvince
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/postalCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/reportedCountry
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/name
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/twoCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/threeCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddressExternalId
    group: sales-bill-to
    actions: [VIEW]

    # sold to address

  - name: Sales/soldToAddress
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/soldToAddress/entityName
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street1
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street2
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/city
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/stateProvince
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/postalCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/reportedCountry
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/name
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/twoCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/threeCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddressExternalId
    group: sales-sold-to
    actions: [VIEW]

    # ship to address

  - name: Sales/shipToAddress
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipToAddress/entityName
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street1
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street2
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/city
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/stateProvince
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/postalCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/reportedCountry
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/name
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/twoCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/threeCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddressExternalId
    group: sales-ship-to
    actions: [VIEW]

    # sell from address

  - name: Sales/sellFromAddress
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/sellFromAddress/entityName
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street1
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street2
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/city
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/stateProvince
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/postalCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/reportedCountry
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/name
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/twoCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/threeCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddressExternalId
    group: sales-sell-from
    actions: [VIEW]

    # ship from address

  - name: Sales/shipFromAddress
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipFromAddress/entityName
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street1
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street2
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/city
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/stateProvince
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/postalCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/reportedCountry
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/name
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/twoCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/threeCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddressExternalId
    group: sales-ship-from
    actions: [VIEW]

    # sales in address

  - name: Sales/salesInAddress
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/salesInAddress/entityName
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street1
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street2
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/city
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/stateProvince
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/postalCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/reportedCountry
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/name
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/twoCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/threeCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddressExternalId
    group: sales-sales-in
    actions: [VIEW]

    # purchasing customer address

  - name: Sales/purchasingCustomerAddress
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/entityName
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street1
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street2
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/city
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/stateProvince
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/postalCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/reportedCountry
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/name
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/twoCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/threeCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerExternalId
    group: sales-purchasing-customer
    actions: [VIEW]

    # derived end customer address

  - name: Sales/derivedEndCustomerAddress
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/entityName
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street1
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street2
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/city
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/stateProvince
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/postalCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/reportedCountry
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/name
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/twoCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/threeCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddressExternalId
    group: sales-derived-end-customer
    actions: [VIEW]

    # data file

  - name: Sales/dataFile
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/dataFile/loadDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/reportDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/id
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/fileName
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/recordCount
    group: sales-data-file
    actions: [VIEW]

    # match info

  - name: Sales/productMatchInfo
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct/sku
    group: sales-product-match-info
    actions: [VIEW]

    # reporting partner

  - name: Sales/reportingPartner
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers/value
    group: sales-reporting-partner
    actions: [VIEW]

    # currency

  - name: Sales/currency
    group: sales-currency
    actions: [VIEW]

  - name: Sales/currency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/currency/name
    group: sales-currency
    actions: [VIEW]

    # resale currency

  - name: Sales/resaleCurrency
    group: sales-resale-currency
    actions: [VIEW]

  - name: Sales/resaleCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/resaleCurrency/name
    group: sales-resale-currency
    actions: [VIEW]

    # debit currency

  - name: Sales/debtCurrency
    group: sales-debit-currency
    actions: [VIEW]

  - name: Sales/debtCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/debtCurrency/name
    group: sales-debit-currency
    actions: [VIEW]

    # book currency

  - name: Sales/bookCurrency
    group: sales-book-currency
    actions: [VIEW]

  - name: Sales/bookCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/bookCurrency/name
    group: sales-book-currency
    actions: [VIEW]

    # acquisition currency

  - name: Sales/acquisitionCurrency
    group: sales-acquisition-currency
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/name
    group: sales-acquisition-currency
    actions: [VIEW]

    ## Begin POS Dynamic Attrs

  - name: Sales/dynamicAttrs
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/*
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/STRING_COL_1
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

    ## End POS Dynamic Attrs

    ### End POS Fields

    # UI Resource for access to INV tab
  - name: InventoryTab
    actions: [VIEW, UPDATE]

    ### Begin INV Fields

  - name: Inventory
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/sid
    group: inventory
    actions: [VIEW]

  - name: Inventory/createDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/updateDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/customerSid
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/deleted
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/productName
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientDescription
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientSku
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/reportedSku
    group: inventory
    actions: [VIEW]

  - name: Inventory/inventoryDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/unitOfMeasure
    group: inventory
    actions: [VIEW]

  - name: Inventory/id
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/originalId
    group: inventory
    actions: [VIEW]

  - name: Inventory/lineNumber
    group: inventory-internal
    actions: [VIEW]

    # data file

  - name: Inventory/dataFile
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/dataFile/loadDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/reportDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/id
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/fileName
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/recordCount
    group: inventory-data-file
    actions: [VIEW]

    # reporting partner

  - name: Inventory/reportingPartner
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers/value
    group: inventory-reporting-partner
    actions: [VIEW]

    # submission period

  - name: Inventory/submissionPeriod
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/submissionPeriod/expectedDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodStartDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodEndDate
    group: inventory-submission-period
    actions: [VIEW]

    # quantities

  - name: Inventory/inventoryQuantities
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inventoryQuantities/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onHandQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/committedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/floatQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/returnedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/value
    group: inventory-quantity
    actions: [VIEW]

    # prices

  - name: Inventory/inventoryPrices
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/inventoryPrices/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice/price
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency/name
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency/name
    group: inventory-price
    actions: [VIEW]

    ## Begin INV Dynamic Attrs

  - name: Inventory/dynamicAttrs
    group: inventory-dynamic-attrs
    actions: [VIEW]

  - name: Inventory/dynamicAttrs/*
    group: inventory-dynamic-attrs
    actions: [VIEW]

    ## End INV Dynamic Attrs

    ### End INV Fields

  # UI Resource for access to File Tab
  - name: FilesTab
    actions: [VIEW, UPDATE]

  ## Begin Partner

  - name: Partner
    group: reporting-partner
    actions: [VIEW]

  - name: Partner/*
    group: reporting-partner
    actions: [VIEW]

  ## End Partner

  # Begin File fields
  - name: DataFile
    group: file-management
    actions: [VIEW, UPDATE]

  - name: DataFile/sid
    group: file-management
    actions: [VIEW]

  - name: DataFile/loadDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/reportDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileName
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileType
    group: file-management
    actions: [VIEW]

  - name: DataFile/dataType
    group: file-management
    actions: [VIEW]

  - name: DataFile/id
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileSize
    group: file-management
    actions: [VIEW]

  - name: DataFile/source
    group: file-management
    actions: [VIEW]

  - name: DataFile/recordCount
    group: file-management
    actions: [VIEW]

  - name: DataFile/deletedLines
    group: file-management
    actions: [VIEW]

  - name: DataFile/download
    group: file-download
    actions: [VIEW]

  - name: DataFile/validationDownload
    group: file-validation-download
    actions: [VIEW]

  - name: DataFile/upload
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadDataTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadFileTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/reportingPartner
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/dataFileState
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/*
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/sid
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/createDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/updateDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/parserAttempt
    group: file-parser-attempt
    actions: [VIEW]

  - name: DataFile/parserAttempt/*
    group: file-parser-attempt
    actions: [VIEW]

  # End of File fields
  
  # UI Resource for access to Submission Schedule
  - name: SubmissionTrackingTab
    actions: [VIEW, UPDATE]

  - name: SubmissionResultsTab
    actions: [VIEW, UPDATE]

  # Begin Submission Schedule 
  - name: SubmissionSchedule
    group: submission-schedule
    actions: [VIEW, UPDATE]

  - name: SubmissionSchedule/sid
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/createDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/updateDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/name
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/dataType
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/dataType/*
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/periodRule
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/expectedDay
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/startDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/endDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/isInPeriodReporter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/weekOfMonth
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/monthOfQuarter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/workingDays
    group: submission-schedule
    actions: [VIEW]

  # End Submission Schedule 

  # Begin Submission Schedule Notification
  - name: SubmissionScheduleNotification
    group: submission-schedule-notification
    actions: [VIEW]
  
  - name: SubmissionScheduleNotification/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser/*
    group: submission-schedule-notification
    actions: [VIEW]

  # END Submission Schedule Notification

  # Begin Submission Period
  - name: SubmissionPeriod
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/sid
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodStartDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodEndDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/createDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/updateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noData
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataReason
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataCreateDate
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/onTimeOverride
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDay
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/workingDays
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/isInPeriodReporter
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/trackingLevel
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/status
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/reportedFlag
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/fileIds
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileName
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileCreateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileId
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/deleted
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataServiceUser/sid
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/firstName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/lastName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/email
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView/*
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo/*
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/sid
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/name
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/periodRule
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType
    group: sp-data-type
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType/*
    group: sp-data-type
    actions: [VIEW]

  # End Submission Period

  # Begin Export Request

  - name: ExportRequest
    group: export
    actions: [VIEW, UPDATE]

  - name: ExportRequest/*
    group: export
    actions: [VIEW, UPDATE]

  # End Export Request

  ### Begin Base resources

  - name: About
    group: about
    actions: [VIEW]

  - name: About/*
    group: about
    actions: [VIEW]

  - name: UserEvent
    group: user-event
    actions: [VIEW, UPDATE]

  - name: UserEvent/*
    group: user-event
    actions: [VIEW, UPDATE]

  - name: ObjectLock
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLock/*
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLockResponse
    group: object-lock-response
    actions: [VIEW]

  - name: ObjectLockResponse/*
    group: object-lock-response
    actions: [VIEW]

  - name: MutationResponse
    group: mutation-response
    actions: [VIEW]

  - name: MutationResponse/*
    group: mutation-response
    actions: [VIEW]

  - name: DynamicAttrMetadata
    group: attr-metadata
    actions: [VIEW]

  - name: DynamicAttrMetadata/*
    group: attr-metadata
    actions: [VIEW]
    

  ### End Base resources

  ### Begin mutation resources

  - name: Product/mutation/*
    group: product-update
    actions: [UPDATE]

  - name: Sales/mutation/*
    group: sales-update
    actions: [UPDATE]

  - name: Inventory/mutation/*
    group: inventory-update
    actions: [UPDATE]

  - name: ExportRequest/mutation/*
    group: export
    actions: [UPDATE]

  - name: SubmissionPeriod/mutation/*
    group: submission-period-mutation
    actions: [UPDATE]

    ### End mutation resources

    ### Begin Customer Resources

  - name: INT
    actions: [VIEW]

  - name: ACS
    actions: [VIEW]

  - name: CAMB
    actions: [VIEW]

  - name: CYBERDYNE
    actions: [VIEW]

  - name: COR
    actions: [VIEW]

  - name: INT_CCD
    actions: [VIEW]

  - name: ACS_CCD
    actions: [VIEW]

  - name: CAMB_CCD
    actions: [VIEW]

  - name: QCOM_CCD
    actions: [VIEW]

  - name: COR_CCD
    actions: [VIEW]

    ### End Customer Resources

# Common permissions for all tenants
permissions:

  - name: BasicViewUpdate
    displayName: Background Permissions
    description: Every User needs this permission
    resource-actions:
      - about:[VIEW]
      - user-event:[VIEW, UPDATE]
      - object-lock:[VIEW, UPDATE]
      - object-lock-response:[VIEW]
      - mutation-response:[VIEW]
      - attr-metadata:[VIEW]

  - name: KpiView
    displayName: KPI Tab
    description: KPI Permission Set
    resource-actions:
      - ProductKpiTab:[VIEW]

  - name: SSKpiFields
    displayName: Submission KPI Fields
    description: Submission KPI Fields Permission Set
    resource-actions:
      - ssKpi:[VIEW]

  - name: FilesKpiFields
    displayName: Files KPI Fields
    description: Files KPI Fields Permission Set
    resource-actions:
      - filesKpi:[VIEW]

  - name: FileTab
    displayName: File Tab
    description: File Permission Set
    resource-actions:
      - FilesTab:[VIEW]

  - name: FileUploadPartner
    displayName: File Upload for Partner
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW, UPDATE] 
      - file-upload:[VIEW]     

  - name: FileUpload
    displayName: File Upload 
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-upload:[VIEW]
      - reporting-partner:[VIEW]

  - name: FileDownload
    displayName: File Download
    description: File Download Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-download:[VIEW]
      - file-validation-download:[VIEW]

  - name: SubmissionTab
    displayName: Submission Tab
    description: File Permission Set
    resource-actions:
      - SubmissionTrackingTab:[VIEW]
      - SubmissionResultsTab:[VIEW]

  - name: FileManufactureView
    displayName: File Admin View
    description: File Admin Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-reporting-partner:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: FilePartnerView
    displayName: File Partner View
    description: File Partner Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: SubmissionManufactureView
    displayName: Submission Admin View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-reporting-partner:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-reporting-partner:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerView
    displayName: Submission Partner View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerUpdate
    displayName: Submission Partner Update
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]

  - name: SubmissionUpdate
    displayName: Submission Update 
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: [DataFileSummaryInfo]
    submissionPeriodInfoView: SubmissionPeriodInfoView
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type SubmissionPeriodInfoView {
    numberOfInventoryLines: Float
    numberOfPOSLines:Float
    fileName: String
    fileId: Float
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    receivedDate: Date
    dataFileId: String
    dataFileName: String
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    noDataCreateDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    dataFileSummaryInfo: DataFileSummaryInfoFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input DataFileSummaryInfoFilter{
    numberOfPOSLines: NumberFilter
    numberOfInventoryLines: NumberFilter
    submissionPeriodSid: NumberFilter
    dataFileSid: NumberFilter
    receivedDate: DateFilter
    fileCreateDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    noDataCreateDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    dataFileSummaryInfo: DataFileSummaryInfoSort
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input DataFileSummaryInfoSort{
    numberOfPOSLines: SortOption
    numberOfInventoryLines: SortOption
    submissionPeriodSid: SortOption
    dataFileSid: SortOption
    receivedDate: SortOption
    fileCreateDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  filesCountSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo[]>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
export const getDataFileIdSql = (alias: string) => {
    return `SELECT df.id FROM DATA_FILE df WHERE df.SID = ${alias}.DATA_FILE_SID`;
};

export const getDataFileNameSql = (alias: string) => {
    return `SELECT df.file_name FROM DATA_FILE df WHERE df.SID = ${alias}.DATA_FILE_SID`;
};
import { Inject, Service } from 'typedi';
import { DomainEntityService } from '../base/DomainEntityService';
import { DataFileSummaryInfo } from './DataFileSummaryInfo';
import { CustomerService } from '../customer/CustomerService';
import { SubmissionPeriodService } from '../submission/SubmissionPeriodService';

@Service()
export class DataFileSummaryInfoService extends DomainEntityService<DataFileSummaryInfo> {
  @Inject()
  protected customerService: CustomerService;

  @Inject()
  protected submissionPeriodService: SubmissionPeriodService;

  constructor() {
    super(DataFileSummaryInfo);
  }

  getServiceName() {
    return 'DataFileSummaryInfo';
  }

  async createDataFileSummaryInfo(
    custId: string,
    submissionPeriodSid: number,
    dataFileSid: number,
    numberOfPOSLines: number,
    numberOfInventoryLines: number
  ) {
    const cust = await this.customerService.findOneById(custId);

    await this.create({
      customerSid: cust.sid,
      submissionPeriodSid: submissionPeriodSid,
      dataFileSid: dataFileSid,
      numberOfPOSLines: numberOfPOSLines,
      numberOfInventoryLines: numberOfInventoryLines,
    });
}
}
import { Entity, Column, ManyToOne, JoinColumn, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import { getDataFileIdSql, getDataFileNameSql } from './DataFileSummaryInfoSql';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriods,returnsSummaryInfo)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;
    
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;
}

import { Customer } from './customer/Customer';
import { Partner } from './partner/Partner';
import { Product } from './product/Product';
import { DynamicAttrs } from './attribute/DynamicAttrs';
import { DynamicAttrMetadata } from './attribute/DynamicAttrMetadata';
import { Sales } from './sales/Sales';
import { Address } from './address/Address';
import { Currency } from './base/Currency';
import { DataFile } from './datafile/DataFile';
import { Country } from './address/Country';
import { SalesMatchInfo } from './sales/SalesMatchInfo';
import { ProductAggr } from './product/ProductAggr';
import { GsNumber } from './partner/GsNumber';
import { Inventory } from './inventory/Inventory';
import { SubmissionPeriod } from './submission/SubmissionPeriod';
import { InventoryQuantity } from './inventory/InventoryQuantity';
import { QuantityType } from './inventory/QuantityType';
import { InventoryPrice } from './inventory/InventoryPrice';
import { UserEvent } from './event/UserEvent';
import { UserSession } from './event/UserSession';
import { DataFileState } from './datafile/DataFileState';
import { ParserAttempt } from './datafile/ParserAttempt';
import { PartnerOverlayView } from './partner/PartnerOverlayView';
import { DataState } from './base/DataState';
import { SubmissionSchedule } from './submission/SubmissionSchedule';
import { DataType } from './base/DataType';
import { SubmissionPeriodLineItemView } from './submission/SubmissionPeriodLineItemView';
import { SubmissionScheduleNotification } from './submission/SubmissionScheduleNotification';
import { ServiceUser } from './user/ServiceUser';
import { AuditEvent } from './event/AuditEvent';
import { SubmissionScheduleAudit } from './submission/SubmissionScheduleAudit';
import { AuditType } from './event/AuditType';
import { DataFileSummaryInfo } from './datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './submission/SubmissionPeriodInfoView';

export const entities = [
  Customer,
  GsNumber,
  Partner,
  Product,
  ProductAggr,
  Sales,
  Inventory,
  SubmissionPeriod,
  SubmissionSchedule,
  SubmissionScheduleNotification,
  DataType,
  SubmissionPeriodLineItemView,
  InventoryQuantity,
  QuantityType,
  InventoryPrice,
  SalesMatchInfo,
  Address,
  Country,
  Currency,
  DataFile,
  DataFileState,
  DataState,
  ParserAttempt,
  PartnerOverlayView,
  DynamicAttrs,
  DynamicAttrMetadata,
  UserEvent,
  UserSession,
  ServiceUser,
  AuditEvent,
  SubmissionScheduleAudit,
  AuditType,
  DataFileSummaryInfo
];
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: [DataFileSummaryInfo]
    submissionPeriodInfoView: SubmissionPeriodInfoView
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type SubmissionPeriodInfoView {
    numberOfInventoryLines: Float
    numberOfPOSLines:Float
    fileName: String
    fileId: Float
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    submissionPeriodSid: Float
    dataFileSid: Float
    receivedDate: Date
    fileCreateDate: Date
    dataFileId: String
    dataFileName: String
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    noDataCreateDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    dataFileSummaryInfo: DataFileSummaryInfoFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input DataFileSummaryInfoFilter{
    numberOfPOSLines: NumberFilter
    numberOfInventoryLines: NumberFilter
    submissionPeriodSid: NumberFilter
    dataFileSid: NumberFilter
    receivedDate: DateFilter
    fileCreateDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    noDataCreateDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    dataFileSummaryInfo: DataFileSummaryInfoSort
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input DataFileSummaryInfoSort{
    numberOfPOSLines: SortOption
    numberOfInventoryLines: SortOption
    submissionPeriodSid: SortOption
    dataFileSid: SortOption
    receivedDate: SortOption
    fileCreateDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany, ManyToMany, ManyToOne } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  // @VirtualColumn( {query : fileIdsSql} )
  // fileIds?: string;

  // firstFileId?: string;

  // firstFileName?: string;

  // firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo[]>;

  // this is the aggregation that matches the reporting partner
  // dataFileSummaryInfo: DataFileSummaryInfo;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, ManyToOne, JoinColumn, OneToMany, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriods,returnsSummaryInfo)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod: SubmissionPeriod;

    // @ManyToOne(returnsUserSession)
    // @JoinColumn({ name: 'USER_SESSION_SID' })
    // userSession?: Promise<UserSession>;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany, ManyToMany, ManyToOne } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  // @VirtualColumn( {query : fileIdsSql} )
  // fileIds?: string;

  // firstFileId?: string;

  // firstFileName?: string;

  // firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo: Promise<DataFileSummaryInfo[]>;

  // this is the aggregation that matches the reporting partner
  // dataFileSummaryInfo: DataFileSummaryInfo;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, ManyToOne, JoinColumn, OneToMany, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriods,returnsSummaryInfo)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod: SubmissionPeriod;

    // @ManyToOne(returnsUserSession)
    // @JoinColumn({ name: 'USER_SESSION_SID' })
    // userSession?: Promise<UserSession>;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: DataFileSummaryInfo
    submissionPeriodInfoView: SubmissionPeriodInfoView
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type SubmissionPeriodInfoView {
    numberOfInventoryLines: Float
    numberOfPOSLines:Float
    fileName: String
    fileId: Float
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    submissionPeriodSid: Float
    dataFileSid: Float
    receivedDate: Date
    fileCreateDate: Date
    dataFileId: String
    dataFileName: String
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    dataFileSummaryInfo: DataFileSummaryInfoFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input DataFileSummaryInfoFilter{
    numberOfPOSLines: NumberFilter
    numberOfInventoryLines: NumberFilter
    submissionPeriodSid: NumberFilter
    dataFileSid: NumberFilter
    receivedDate: DateFilter
    fileCreateDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    dataFileSummaryInfo: DataFileSummaryInfoSort
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input DataFileSummaryInfoSort{
    numberOfPOSLines: SortOption
    numberOfInventoryLines: SortOption
    submissionPeriodSid: SortOption
    dataFileSid: SortOption
    receivedDate: SortOption
    fileCreateDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany, ManyToMany, ManyToOne } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;

export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

  export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo: Promise<DataFileSummaryInfo>

  // this is the aggregation that matches the reporting partner
  // dataFileSummaryInfo: DataFileSummaryInfo;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, ManyToOne, JoinColumn, OneToMany, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';

export const returnsSubmissionPeriods = () => SubmissionPeriod;
export const returnsSummaryInfo = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;


@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriod)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName:'sid'})
    submissionPeriod?: Promise<SubmissionPeriod>;

    // @ManyToOne(returnsUserSession)
    // @JoinColumn({ name: 'USER_SESSION_SID' })
    // userSession?: Promise<UserSession>;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany, ManyToMany, ManyToOne } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsDataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';


export const returnsSubmissionPeriod = () => SubmissionPeriod;
export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, ManyToOne, JoinColumn, OneToMany, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsSubmissionPeriods = (submissionPeriod) => submissionPeriod.filesData;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriod, returnsSubmissionPeriods)
    @JoinColumn([
        { name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' },
        { name: 'CUSTOMER_SID', referencedColumnName: 'customerSid' }
    ])
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

import { Entity, Column, ManyToOne, JoinColumn, OneToMany, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { dataFileSummaryInfoInverseSide, returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';
import { DataFile } from './DataFile';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';
import {
    filesCountSql, filesDataSql,
  } from '../submission/SubmissionPeriodSql'
import { Alias } from 'typeorm/query-builder/Alias';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsSubmissionPeriods = (submissionPeriod) => submissionPeriod.filesData;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @OneToMany(returnsDataFileSummaryInfo,dataFileSummaryInfoInverseSide)
    @JoinColumn([
        { name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' },
        { name: 'CUSTOMER_SID', referencedColumnName: 'customerSid' }
    ])
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany, ManyToMany, ManyToOne } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsDataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';


export const returnsSubmissionPeriod = () => SubmissionPeriod;
export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @ManyToOne(() => DataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsDataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';


export const returnsSubmissionPeriod = () => SubmissionPeriod;
export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodInfoView?: Promise<SubmissionPeriodInfoView>;

  @OneToMany(() => DataFileSummaryInfo, dataFileSummaryInfo => dataFileSummaryInfo.submissionPeriod)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: DataFileSummaryInfo[];

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { Entity, Column, ManyToOne, JoinColumn, VirtualColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { SubmissionPeriod, returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import {getDataFileIdSql, getDataFileNameSql} from './DataFileSummaryInfoSql';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsInverseSubmissionPeriods = (submissionPeriod) => submissionPeriod.dataFileSummaryInfo;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(() => SubmissionPeriod)
    @JoinColumn({ name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' })
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name: 'CUSTOMER_SID'})
    customerSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;

    @Column({name: 'CREATE_DATE'})
    receivedDate: Date;
    
    // specifying that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileIdSql})
    dataFileId?: string;

    // VirtualColumn to get DataFile information
    @VirtualColumn({ query: getDataFileNameSql})
    dataFileName?: string;

    // @VirtualColumn( {query : fileIdSql} )
    // fileId?: number[];

    // @VirtualColumn( {query : filesCountSql} )
    // numberOfFiles?: number;
}

select * from information_schema.columns 
where table_name='table1'and column_name like'a%'
version: "1.0"

name: ChannelNetwork

# All actions

actions:
  - name: VIEW
  - name: UPDATE

# All resources

resources:
  # UI Resource for access to kpis tab
  - name: ProductKpiTab
    actions: [VIEW]

    ### Begin productKpi fields

  - name: ProductKpi
    group: productKpi
    actions: [VIEW]

  - name: ProductKpi/*
    group: productKpi
    actions: [VIEW]

    ### End productKpi fields

    ### Begin salesKpi fields

  - name: SalesKpi
    group: salesKpi
    actions: [VIEW]

  - name: SalesKpi/*
    group: salesKpi
    actions: [VIEW]

    ### End salesKpi fields

    ### Begin inventoryKpi fields

  - name: InventoryKpi
    group: inventoryKpi
    actions: [VIEW]

  - name: InventoryKpi/*
    group: inventoryKpi
    actions: [VIEW]

    ### End inventoryKpi fields

    ### Begin filesKpi fields

  - name: FilesKpi
    group: filesKpi
    actions: [VIEW]

  - name: FilesKpi/*
    group: filesKpi
    actions: [VIEW]

    ### End filesKpi fields

    ### Begin ssKpi fields

  - name: SSKpi
    group: ssKpi
    actions: [VIEW]

  - name: SSKpi/*
    group: ssKpi
    actions: [VIEW]

    ### End ssKpi fields


    # UI Resource for access to Products tab
  - name: ProductTab
    actions: [VIEW, UPDATE]

    ### Begin Product Fields

  - name: Product
    group: product
    actions: [VIEW, UPDATE]

  - name: Product/sid
    group: product
    actions: [VIEW]

  - name: Product/createDate
    group: product
    actions: [VIEW]

  - name: Product/updateDate
    group: product
    actions: [VIEW]

  - name: Product/customerSid
    group: product-internal
    actions: [VIEW]

  - name: Product/sku
    group: product
    actions: [VIEW]

  - name: Product/name
    group: product
    actions: [VIEW]

  - name: Product/description
    group: product
    actions: [VIEW]

  - name: Product/productFamily
    group: product
    actions: [VIEW]

  - name: Product/productLine
    group: product
    actions: [VIEW]

  - name: Product/startDate
    group: product
    actions: [VIEW]

  - name: Product/endDate
    group: product
    actions: [VIEW]

  - name: Product/serialized
    group: product-internal
    actions: [VIEW]

  - name: Product/aggregation
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesLineCount
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/totalSalesQuantity
    group: product-pos-aggr
    actions: [VIEW]

  - name: Product/aggregation/oldestInvoiceDate
    group: product-pos-aggr
    actions: [VIEW]

    ## Begin Product Dynamic Attrs


  - name: Product/dynamicAttrs
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/sid
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/updateDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/createDate
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/attributeType
    group: product-dynamicAttrs-internal
    actions: [VIEW]
  - name: Product/dynamicAttrs/STRING_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_11
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_12
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_13
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_14
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_15
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_16
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_17
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_18
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_19
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_20
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_21
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_22
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_23
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_24
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_25
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_26
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_27
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_28
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_29
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/STRING_COL_30
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/NUM_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_2
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_3
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_4
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_5
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_6
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_7
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_8
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_9
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
  - name: Product/dynamicAttrs/NUM_COL_10
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]

  - name: Product/dynamicAttrs/DATE_COL_1
    group: product-dynamicAttrs
    actions: [VIEW, UPDATE]
    ## End Product Dynamic Attrs

    ### End Product Fields

    # UI Resource for access to POS tab
  - name: SalesTab
    actions: [VIEW, UPDATE]

    ### Begin POS Fields

  - name: Sales
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/sid
    group: sales
    actions: [VIEW]

  - name: Sales/createDate
    group: sales
    actions: [VIEW]

  - name: Sales/updateDate
    group: sales
    actions: [VIEW]

  - name: Sales/customerSid
    group: sales-internal
    actions: [VIEW]

  - name: Sales/deleted
    group: sales-internal
    actions: [VIEW]

  - name: Sales/branchId
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceNumber
    group: sales
    actions: [VIEW]

  - name: Sales/invoiceDate
    group: sales
    actions: [VIEW]

  - name: Sales/quantity
    group: sales
    actions: [VIEW]

  - name: Sales/reportedSku
    group: sales
    actions: [VIEW, UPDATE]

  - name: Sales/productDescription
    group: sales
    actions: [VIEW]

  - name: Sales/transactionId
    group: sales-internal
    actions: [VIEW]

  - name: Sales/vendorPartNumber
    group: sales
    actions: [VIEW]

  - name: Sales/accountRepresentative
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/acquisitionUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/boolExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/bookUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/customerOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/debitExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/debitUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/designRegistrationNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorId
    group: sales
    actions: [VIEW]

  - name: Sales/distributorName
    group: sales
    actions: [VIEW]

  - name: Sales/distributorShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/distributorWarehouseId
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeDate
    group: sales
    actions: [VIEW]

  - name: Sales/exchangeRate
    group: sales
    actions: [VIEW]

  - name: Sales/globalProductClassCode
    group: sales
    actions: [VIEW]

  - name: Sales/legacySalesRecordId
    group: sales
    actions: [VIEW]

  - name: Sales/lengthOfProduction
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureId
    group: sales
    actions: [VIEW]

  - name: Sales/manufactureName
    group: sales
    actions: [VIEW]

  - name: Sales/manufacturerShipmentNumber
    group: sales
    actions: [VIEW]

  - name: Sales/orderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/originalId
    group: sales
    actions: [VIEW]

  - name: Sales/price
    group: sales
    actions: [VIEW]

  - name: Sales/purchaseOrderNumber
    group: sales
    actions: [VIEW]

  - name: Sales/r2rDuplicateType
    group: sales
    actions: [VIEW]

  - name: Sales/regionTerritory
    group: sales
    actions: [VIEW]

  - name: Sales/reportEndingDate
    group: sales
    actions: [VIEW]

  - name: Sales/reportType
    group: sales
    actions: [VIEW]

  - name: Sales/resaleExtendedPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resaeExtension
    group: sales
    actions: [VIEW]

  - name: Sales/resaleUnitPrice
    group: sales
    actions: [VIEW]

  - name: Sales/resubmitted
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductFamily
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductLine
    group: sales
    actions: [VIEW]

  - name: Sales/reportedProductName
    group: sales
    actions: [VIEW]

  - name: Sales/shipDate
    group: sales
    actions: [VIEW]

  - name: Sales/shipDebitNumber
    group: sales
    actions: [VIEW]

  - name: Sales/shippingMethod
    group: sales
    actions: [VIEW]

  - name: Sales/spaNumber
    group: sales
    actions: [VIEW]

  - name: Sales/tier
    group: sales
    actions: [VIEW]

  - name: Sales/transactionType
    group: sales
    actions: [VIEW]

  - name: Sales/unitOfMeasure
    group: sales
    actions: [VIEW]

  - name: Sales/vendorPartDescription
    group: sales
    actions: [VIEW]

  - name: Sales/validationCodes
    group: sales
    actions: [VIEW]

  - name: Sales/serialNumbers
    group: sales
    actions: [VIEW]

    # Bill to address


  - name: Sales/billToAddress
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/billToAddress/entityName
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street1
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/street2
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/city
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/stateProvince
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/postalCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/reportedCountry
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/name
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/twoCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddress/country/threeCharCode
    group: sales-bill-to
    actions: [VIEW]

  - name: Sales/billToAddressExternalId
    group: sales-bill-to
    actions: [VIEW]

    # sold to address

  - name: Sales/soldToAddress
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/soldToAddress/entityName
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street1
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/street2
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/city
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/stateProvince
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/postalCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/reportedCountry
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/name
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/twoCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddress/country/threeCharCode
    group: sales-sold-to
    actions: [VIEW]

  - name: Sales/soldToAddressExternalId
    group: sales-sold-to
    actions: [VIEW]

    # ship to address

  - name: Sales/shipToAddress
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipToAddress/entityName
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street1
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/street2
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/city
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/stateProvince
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/postalCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/reportedCountry
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/name
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/twoCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddress/country/threeCharCode
    group: sales-ship-to
    actions: [VIEW]

  - name: Sales/shipToAddressExternalId
    group: sales-ship-to
    actions: [VIEW]

    # sell from address

  - name: Sales/sellFromAddress
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/sellFromAddress/entityName
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street1
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/street2
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/city
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/stateProvince
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/postalCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/reportedCountry
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/name
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/twoCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddress/country/threeCharCode
    group: sales-sell-from
    actions: [VIEW]

  - name: Sales/sellFromAddressExternalId
    group: sales-sell-from
    actions: [VIEW]

    # ship from address

  - name: Sales/shipFromAddress
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/shipFromAddress/entityName
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street1
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/street2
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/city
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/stateProvince
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/postalCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/reportedCountry
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/name
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/twoCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddress/country/threeCharCode
    group: sales-ship-from
    actions: [VIEW]

  - name: Sales/shipFromAddressExternalId
    group: sales-ship-from
    actions: [VIEW]

    # sales in address

  - name: Sales/salesInAddress
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/salesInAddress/entityName
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street1
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/street2
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/city
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/stateProvince
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/postalCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/reportedCountry
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/name
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/twoCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddress/country/threeCharCode
    group: sales-sales-in
    actions: [VIEW]

  - name: Sales/salesInAddressExternalId
    group: sales-sales-in
    actions: [VIEW]

    # purchasing customer address

  - name: Sales/purchasingCustomerAddress
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/entityName
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street1
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/street2
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/city
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/stateProvince
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/postalCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/reportedCountry
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/name
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/twoCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerAddress/country/threeCharCode
    group: sales-purchasing-customer
    actions: [VIEW]

  - name: Sales/purchasingCustomerExternalId
    group: sales-purchasing-customer
    actions: [VIEW]

    # derived end customer address

  - name: Sales/derivedEndCustomerAddress
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/entityName
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street1
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/street2
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/city
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/stateProvince
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/postalCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/reportedCountry
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/name
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/twoCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddress/country/threeCharCode
    group: sales-derived-end-customer
    actions: [VIEW]

  - name: Sales/derivedEndCustomerAddressExternalId
    group: sales-derived-end-customer
    actions: [VIEW]

    # data file

  - name: Sales/dataFile
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/dataFile/loadDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/reportDate
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/id
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/fileName
    group: sales-data-file
    actions: [VIEW]

  - name: Sales/dataFile/recordCount
    group: sales-data-file
    actions: [VIEW]

    # match info

  - name: Sales/productMatchInfo
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct
    group: sales-product-match-info
    actions: [VIEW]

  - name: Sales/productMatchInfo/matchedProduct/sku
    group: sales-product-match-info
    actions: [VIEW]

    # reporting partner

  - name: Sales/reportingPartner
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers
    group: sales-reporting-partner
    actions: [VIEW]

  - name: Sales/reportingPartner/gsNumbers/value
    group: sales-reporting-partner
    actions: [VIEW]

    # currency

  - name: Sales/currency
    group: sales-currency
    actions: [VIEW]

  - name: Sales/currency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/currency/name
    group: sales-currency
    actions: [VIEW]

    # resale currency

  - name: Sales/resaleCurrency
    group: sales-resale-currency
    actions: [VIEW]

  - name: Sales/resaleCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/resaleCurrency/name
    group: sales-resale-currency
    actions: [VIEW]

    # debit currency

  - name: Sales/debtCurrency
    group: sales-debit-currency
    actions: [VIEW]

  - name: Sales/debtCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/debtCurrency/name
    group: sales-debit-currency
    actions: [VIEW]

    # book currency

  - name: Sales/bookCurrency
    group: sales-book-currency
    actions: [VIEW]

  - name: Sales/bookCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/bookCurrency/name
    group: sales-book-currency
    actions: [VIEW]

    # acquisition currency

  - name: Sales/acquisitionCurrency
    group: sales-acquisition-currency
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/*
    group: sales-internal
    actions: [VIEW]

  - name: Sales/acquisitionCurrency/name
    group: sales-acquisition-currency
    actions: [VIEW]

    ## Begin POS Dynamic Attrs

  - name: Sales/dynamicAttrs
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/*
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

  - name: Sales/dynamicAttrs/STRING_COL_1
    group: sales-dynamic-attrs
    actions: [VIEW, UPDATE]

    ## End POS Dynamic Attrs

    ### End POS Fields

    # UI Resource for access to INV tab
  - name: InventoryTab
    actions: [VIEW, UPDATE]

    ### Begin INV Fields

  - name: Inventory
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/sid
    group: inventory
    actions: [VIEW]

  - name: Inventory/createDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/updateDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/customerSid
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/deleted
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/productName
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientDescription
    group: inventory
    actions: [VIEW]

  - name: Inventory/clientSku
    group: inventory
    actions: [VIEW, UPDATE]

  - name: Inventory/reportedSku
    group: inventory
    actions: [VIEW]

  - name: Inventory/inventoryDate
    group: inventory
    actions: [VIEW]

  - name: Inventory/unitOfMeasure
    group: inventory
    actions: [VIEW]

  - name: Inventory/id
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/originalId
    group: inventory
    actions: [VIEW]

  - name: Inventory/lineNumber
    group: inventory-internal
    actions: [VIEW]

    # data file

  - name: Inventory/dataFile
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/dataFile/loadDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/reportDate
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/id
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/fileName
    group: inventory-data-file
    actions: [VIEW]

  - name: Inventory/dataFile/recordCount
    group: inventory-data-file
    actions: [VIEW]

    # reporting partner

  - name: Inventory/reportingPartner
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers
    group: inventory-reporting-partner
    actions: [VIEW]

  - name: Inventory/reportingPartner/gsNumbers/value
    group: inventory-reporting-partner
    actions: [VIEW]

    # submission period

  - name: Inventory/submissionPeriod
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/submissionPeriod/expectedDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodStartDate
    group: inventory-submission-period
    actions: [VIEW]

  - name: Inventory/submissionPeriod/periodEndDate
    group: inventory-submission-period
    actions: [VIEW]

    # quantities

  - name: Inventory/inventoryQuantities
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inventoryQuantities/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onHandQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onHandQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/onOrderQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/committedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/committedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/floatQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/floatQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/backorderedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/returnedQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/returnedQuantity/value
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity
    group: inventory-quantity
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/inTransitQuantity/value
    group: inventory-quantity
    actions: [VIEW]

    # prices

  - name: Inventory/inventoryPrices
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/inventoryPrices/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/*
    group: inventory-internal
    actions: [VIEW]

  - name: Inventory/unitPrice/price
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/fromCurrency/name
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency
    group: inventory-price
    actions: [VIEW]

  - name: Inventory/unitPrice/toCurrency/name
    group: inventory-price
    actions: [VIEW]

    ## Begin INV Dynamic Attrs

  - name: Inventory/dynamicAttrs
    group: inventory-dynamic-attrs
    actions: [VIEW]

  - name: Inventory/dynamicAttrs/*
    group: inventory-dynamic-attrs
    actions: [VIEW]

    ## End INV Dynamic Attrs

    ### End INV Fields

  # UI Resource for access to File Tab
  - name: FilesTab
    actions: [VIEW, UPDATE]

  ## Begin Partner

  - name: Partner
    group: reporting-partner
    actions: [VIEW]

  - name: Partner/*
    group: reporting-partner
    actions: [VIEW]

  ## End Partner

  # Begin File fields
  - name: DataFile
    group: file-management
    actions: [VIEW, UPDATE]

  - name: DataFile/sid
    group: file-management
    actions: [VIEW]

  - name: DataFile/loadDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/reportDate
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileName
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileType
    group: file-management
    actions: [VIEW]

  - name: DataFile/dataType
    group: file-management
    actions: [VIEW]

  - name: DataFile/id
    group: file-management
    actions: [VIEW]

  - name: DataFile/fileSize
    group: file-management
    actions: [VIEW]

  - name: DataFile/source
    group: file-management
    actions: [VIEW]

  - name: DataFile/recordCount
    group: file-management
    actions: [VIEW]

  - name: DataFile/deletedLines
    group: file-management
    actions: [VIEW]

  - name: DataFile/download
    group: file-download
    actions: [VIEW]

  - name: DataFile/validationDownload
    group: file-validation-download
    actions: [VIEW]

  - name: DataFile/upload
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadDataTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/uploadFileTypes
    group: file-upload
    actions: [VIEW]

  - name: DataFile/reportingPartner
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/reportingPartner/partnerOverlayView/*
    group: file-reporting-partner
    actions: [VIEW]

  - name: DataFile/dataFileState
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/*
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/sid
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/createDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/dataFileState/updateDate
    group: file-data-file-state
    actions: [VIEW]

  - name: DataFile/parserAttempt
    group: file-parser-attempt
    actions: [VIEW]

  - name: DataFile/parserAttempt/*
    group: file-parser-attempt
    actions: [VIEW]

  # End of File fields
  
  # UI Resource for access to Submission Schedule
  - name: SubmissionTrackingTab
    actions: [VIEW, UPDATE]

  - name: SubmissionResultsTab
    actions: [VIEW, UPDATE]

  # Begin Submission Schedule 
  - name: SubmissionSchedule
    group: submission-schedule
    actions: [VIEW, UPDATE]

  - name: SubmissionSchedule/sid
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/createDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/updateDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/name
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/reportingPartner/partnerOverlayView/*
    group: ss-reporting-partner
    actions: [VIEW]

  - name: SubmissionSchedule/dataType
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/dataType/*
    group: ss-data-type
    actions: [VIEW]

  - name: SubmissionSchedule/periodRule
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/expectedDay
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/startDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/endDate
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/isInPeriodReporter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/weekOfMonth
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/monthOfQuarter
    group: submission-schedule
    actions: [VIEW]

  - name: SubmissionSchedule/workingDays
    group: submission-schedule
    actions: [VIEW]

  # End Submission Schedule 

  # Begin Submission Schedule Notification
  - name: SubmissionScheduleNotification
    group: submission-schedule-notification
    actions: [VIEW]
  
  - name: SubmissionScheduleNotification/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/notificationType/*
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser
    group: submission-schedule-notification
    actions: [VIEW]

  - name: SubmissionScheduleNotification/serviceUser/*
    group: submission-schedule-notification
    actions: [VIEW]

  # END Submission Schedule Notification

  # Begin Submission Period
  - name: SubmissionPeriod
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/sid
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodStartDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/periodEndDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/createDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/updateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noData
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataReason
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataCreateDate
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/onTimeOverride
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/expectedDay
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/workingDays
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/isInPeriodReporter
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/trackingLevel
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/status
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/reportedFlag
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/fileIds
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileName
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileCreateDate
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/firstFileId
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/deleted
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser
    group: submission-period-update
    actions: [VIEW, UPDATE]

  - name: SubmissionPeriod/noDataServiceUser/sid
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/firstName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/lastName
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/noDataServiceUser/email
    group: sp-no-data-user
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionPeriodLineItemView/*
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/dataFileSummaryInfo/*
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/sid
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/name
    group: submission-period
    actions: [VIEW]
    
  - name: SubmissionPeriod/submissionSchedule/periodRule
    group: submission-period
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/reportingPartner/partnerOverlayView/*
    group: sp-reporting-partner
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType
    group: sp-data-type
    actions: [VIEW]

  - name: SubmissionPeriod/submissionSchedule/dataType/*
    group: sp-data-type
    actions: [VIEW]

  # End Submission Period

  # Begin Export Request

  - name: ExportRequest
    group: export
    actions: [VIEW, UPDATE]

  - name: ExportRequest/*
    group: export
    actions: [VIEW, UPDATE]

  # End Export Request

  ### Begin Base resources

  - name: About
    group: about
    actions: [VIEW]

  - name: About/*
    group: about
    actions: [VIEW]

  - name: UserEvent
    group: user-event
    actions: [VIEW, UPDATE]

  - name: UserEvent/*
    group: user-event
    actions: [VIEW, UPDATE]

  - name: ObjectLock
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLock/*
    group: object-lock
    actions: [VIEW, UPDATE]

  - name: ObjectLockResponse
    group: object-lock-response
    actions: [VIEW]

  - name: ObjectLockResponse/*
    group: object-lock-response
    actions: [VIEW]

  - name: MutationResponse
    group: mutation-response
    actions: [VIEW]

  - name: MutationResponse/*
    group: mutation-response
    actions: [VIEW]

  - name: DynamicAttrMetadata
    group: attr-metadata
    actions: [VIEW]

  - name: DynamicAttrMetadata/*
    group: attr-metadata
    actions: [VIEW]
    

  ### End Base resources

  ### Begin mutation resources

  - name: Product/mutation/*
    group: product-update
    actions: [UPDATE]

  - name: Sales/mutation/*
    group: sales-update
    actions: [UPDATE]

  - name: Inventory/mutation/*
    group: inventory-update
    actions: [UPDATE]

  - name: ExportRequest/mutation/*
    group: export
    actions: [UPDATE]

  - name: SubmissionPeriod/mutation/*
    group: submission-period-mutation
    actions: [UPDATE]

    ### End mutation resources

    ### Begin Customer Resources

  - name: INT
    actions: [VIEW]

  - name: ACS
    actions: [VIEW]

  - name: CAMB
    actions: [VIEW]

  - name: CYBERDYNE
    actions: [VIEW]

  - name: COR
    actions: [VIEW]

  - name: INT_CCD
    actions: [VIEW]

  - name: ACS_CCD
    actions: [VIEW]

  - name: CAMB_CCD
    actions: [VIEW]

  - name: QCOM_CCD
    actions: [VIEW]

  - name: COR_CCD
    actions: [VIEW]

    ### End Customer Resources

# Common permissions for all tenants
permissions:

  - name: BasicViewUpdate
    displayName: Background Permissions
    description: Every User needs this permission
    resource-actions:
      - about:[VIEW]
      - user-event:[VIEW, UPDATE]
      - object-lock:[VIEW, UPDATE]
      - object-lock-response:[VIEW]
      - mutation-response:[VIEW]
      - attr-metadata:[VIEW]

  - name: KpiView
    displayName: KPI Tab
    description: KPI Permission Set
    resource-actions:
      - ProductKpiTab:[VIEW]

  - name: SSKpiFields
    displayName: Submission KPI Fields
    description: Submission KPI Fields Permission Set
    resource-actions:
      - ssKpi:[VIEW]

  - name: FilesKpiFields
    displayName: Files KPI Fields
    description: Files KPI Fields Permission Set
    resource-actions:
      - filesKpi:[VIEW]

  - name: FileTab
    displayName: File Tab
    description: File Permission Set
    resource-actions:
      - FilesTab:[VIEW]

  - name: FileUploadPartner
    displayName: File Upload for Partner
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW, UPDATE] 
      - file-upload:[VIEW]     

  - name: FileUpload
    displayName: File Upload 
    description: File Upload Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-upload:[VIEW]
      - reporting-partner:[VIEW]

  - name: FileDownload
    displayName: File Download
    description: File Download Permission Set
    resource-actions:
      - DataFile:[VIEW]
      - file-download:[VIEW]
      - file-validation-download:[VIEW]

  - name: SubmissionTab
    displayName: Submission Tab
    description: File Permission Set
    resource-actions:
      - SubmissionTrackingTab:[VIEW]
      - SubmissionResultsTab:[VIEW]

  - name: FileManufactureView
    displayName: File Admin View
    description: File Admin Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-reporting-partner:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: FilePartnerView
    displayName: File Partner View
    description: File Partner Permission Set
    resource-actions:
      - file-management:[VIEW]
      - file-data-file-state:[VIEW]
      - file-parser-attempt:[VIEW]

  - name: SubmissionManufactureView
    displayName: Submission Admin View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-reporting-partner:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-reporting-partner:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerView
    displayName: Submission Partner View
    description: Submission Full Permission Set
    resource-actions:
      - submission-schedule:[VIEW]
      - submission-schedule-notification:[VIEW]
      - ss-data-type:[VIEW]
      - submission-period:[VIEW]
      - sp-data-type:[VIEW]
      - sp-no-data-user:[VIEW]
      - submission-period-update:[VIEW]

  - name: SubmissionPartnerUpdate
    displayName: Submission Partner Update
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]

  - name: SubmissionUpdate
    displayName: Submission Update 
    description: Submission Update Permission Set
    resource-actions:
      - submission-period-update:[UPDATE]
      - submission-period-mutation:[UPDATE]
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    dataFileSummaryInfo: DataFileSummaryInfo
    submissionPeriodInfoView: SubmissionPeriodInfoView
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  type SubmissionPeriodInfoView {
    numberOfInventoryLines: Float
    numberOfPOSLines:Float
    fileName: String
    fileId: Float
  }

  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
    submissionPeriodSid: Float
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
import { Inject, Service } from 'typedi';
import { FindOptions } from '../base/FindOptions';
import { PartnerService } from '../partner/PartnerService';
import { SubmissionPeriod } from './SubmissionPeriod';
import { SubmissionPeriodInput } from './SubmissionPeriodInput';
import { Operator } from '../base/filters/Operator';
import { ServiceError } from '../base/ServiceError';
import { User } from '../user/User';
import { AuditEventService } from '../event/AuditEventService';
import { CustomerDomainEntityService } from '../base/CustomerDomainEntityService';
import { SubmissionScheduleAuditService } from './SubmissionScheduleAuditService';
import { AuditEvent } from '../event/AuditEvent';
import { ServiceUserService } from '../user/ServiceUserService';
import { AuditTypeService } from '../event/AuditTypeService';
import { SubmissionScheduleAudit } from './SubmissionScheduleAudit';
import { AuditType } from '../event/AuditType';

export const NO_DATA_ACTION_TYPE = 'SubmissionScheduleNoDataReported';

@Service()
export class SubmissionPeriodService extends CustomerDomainEntityService<SubmissionPeriod> {
  @Inject()
  protected partnerService: PartnerService;

  @Inject()
  protected auditEventService: AuditEventService;

  @Inject()
  protected submissionScheduleAuditService: SubmissionScheduleAuditService;

  @Inject()
  protected serviceUserService: ServiceUserService;

  @Inject()
  protected auditTypeService: AuditTypeService;

  constructor() {
    super(SubmissionPeriod);
  }

  getServiceName() {
    return 'SubmissionPeriod';
  }

  async findSubmissionPeriods(
    customerId: string,
    partnerId: string,
    options: FindOptions = new FindOptions()
  ): Promise<SubmissionPeriod[]> {
    const customer = await this.customerService.findOneById(customerId);
    const partner = await this.partnerService.findOneById(partnerId);
    const { offset, limit, filters = {}, sort } = options;

    // return empty array if customer or partner is invalid
    if (!customer || (!partner && !PartnerService.isAll(partnerId))) {
      console.error(
        'Customer ' + customerId + ' or partner ' + partnerId + ' are invalid'
      );
      return [];
    }

    const query = this.repository
      .createQueryBuilder('sp')
      .innerJoinAndSelect(
        'sp.submissionSchedule',
        'ss',
        '"sp"."CUSTOMER_SID" = "ss"."CUSTOMER_SID"'
      )
      .innerJoinAndSelect('ss.dataType', 'dt')
      .innerJoinAndSelect('ss.reportingPartner', 'rp')
      .innerJoinAndSelect('rp.partnerOverlayView', 'csr')
      .leftJoinAndSelect(
        'sp.submissionPeriodLineItemView',
        'spli',
        '"sp"."CUSTOMER_SID" = "spli"."CUSTOMER_SID"'
      )
      .leftJoinAndSelect(
        'sp.dataFileSummaryInfo',
        'dfsi',
        '"sp"."CUSTOMER_SID" = "dfsi"."CUSTOMER_SID"'
      )
      .offset(offset)
      .limit(limit);

    this.buildWhere(filters, query);

    query.andWhere(`"sp"."CUSTOMER_SID" = ${customer.sid}`)
      .andWhere(`"sp"."DELETED" = 0`)
      .andWhere(`"csr"."CUSTOMER_SID" = ${customer.sid}`);

    if (partner) {
      query.andWhere('"ss"."REPORTING_PARTNER_SID" = ' + partner.sid);
    }
    this.addOrderBys(query, sort ?? {});
    let periods: SubmissionPeriod[] = await query.getMany();
    periods = periods.map((period) => {
      if (period.fileIds) {
        period.firstFileId = period.fileIds.split(',')[0];
      }
      return period;
    });

    periods = await this.loadFirstFileFields(customer.sid, periods);

    return periods;
  }

  async loadFirstFileFields(
    customerSid: number,
    periods: SubmissionPeriod[]
  ): Promise<SubmissionPeriod[]> {

    if (periods.length === 0) return periods;

    // construct SQL
    const sql =
      ` select ID "id", FILE_NAME "file_name", CREATE_DATE "create_date" 
        from data_file 
        where customer_sid = ${customerSid} and id in (:ids) `;

    // execute SQL
    let rows = new Map();

    for (let i = 0; i < periods.length; i += 1000) {
      let subSet = periods.slice(i, i + 1000);
      let dfIds = [];
      for (let j = 0; j < subSet.length; j++) {
        if (subSet[j].firstFileId) {
          dfIds.push(`'${subSet[j].firstFileId}'`);
        }
      }
      if (dfIds.length === 0) continue;
      //not a param to avoid bind var peaking
      let inListSql = sql.replace(':ids', dfIds.join(','));

      let results = await this.repository.query(inListSql);
      results.map((r) => {
        rows.set(r.id, r);
      });
    }
    // map SQL result
    return periods.map((period) => {
      if (period.firstFileId) {
        period.firstFileName = rows.get(period.firstFileId).file_name;
        period.firstFileCreateDate = rows.get(period.firstFileId).create_date;
      }
      return period;
    });
  }

  async markNoData(
    customerId: string,
    user: User,
    data: Partial<SubmissionPeriodInput>[]
  ): Promise<ServiceError[]> {
    let sids: number[] = data.map((input: SubmissionPeriodInput) => {
      return input.sid;
    });

    const options: FindOptions = {
      offset: 0,
      limit: 1000,
      filters: {
        sid: {
          operator: Operator.IN,
          values: sids
        }
      }
    };

    const submissionPeriods = await this.findSubmissionPeriods(
      customerId,
      user.partnerId,
      options
    );

    if (submissionPeriods.length <= 0) {
      throw new Error(`Customer ${customerId} or partner ${user.partnerId} are invalid`);
    }

    const serviceUser = await this.serviceUserService.findByLogin(user.nucleusUsername);
    if (!serviceUser) {
      throw new Error(`Service user not found for login ${user.id}`);
    }

    const auditType = await this.auditTypeService.findOneByName(
      NO_DATA_ACTION_TYPE
    );

    let errors: Array<ServiceError> = [];
    let submissionPeriodMap: Map<Number, SubmissionPeriod> = new Map();
    let validInput: Partial<SubmissionPeriodInput>[] = [];

    submissionPeriods.forEach((submissionPeriod) => {
      submissionPeriodMap.set(Number(submissionPeriod.sid), submissionPeriod);
    });

    data.forEach((submissionPeriodInput) => {
      submissionPeriodInput.noData = true;
      submissionPeriodInput.noDataCreateDate = new Date();
      submissionPeriodInput.noDataServiceUserSid = serviceUser.sid;

      let submissionPeriod = submissionPeriodMap.get(Number(submissionPeriodInput.sid))
      let err = submissionPeriod ? this.validateForMarkNoData(submissionPeriod) : null;
      if (submissionPeriod && !err) validInput.push(submissionPeriodInput);
      errors.push(err);
    });

    if (validInput.length === 0) return errors;

    const updateServiceErrors: Array<ServiceError> = await this.updateSubmissionPeriods(
      customerId,
      user,
      validInput,
      auditType
    );

    let dataIndex = 0;
    let validDataIndex = 0;
    data.forEach((submissionPeriodInput) => {
      if (validDataIndex < validInput.length && submissionPeriodInput.sid == validInput[validDataIndex].sid) {
        if (!errors[dataIndex]) {
          errors[dataIndex++] = updateServiceErrors[validDataIndex++];
        }
      } else {
        dataIndex++;
      }
    });

    return errors;
  }

  validateForMarkNoData(submissionPeriod: SubmissionPeriod): ServiceError | null {
    let comparisonDate: Date = new Date();
    comparisonDate.setDate(comparisonDate.getDate() - 31);
    if (submissionPeriod.reportedFlag) {
      return new ServiceError('REPORTED_DATA_ERROR', 'Submission Period already has reported data, so No-Data-To-Report is not applicable.');
    } else if (submissionPeriod.noDataReason != null) {
      return new ServiceError('ALREAD_MARKED_NO_DATA_ERROR', 'Submission Period is already marked as No-Data-To-Report.');
    } else if (submissionPeriod.fileIds) {
      return new ServiceError('LINE_COUNT_ERROR', 'Submission Period already has line items count, so No-Data-To-Report is not applicable.');
    } else if (submissionPeriod.periodEndDate.getTime() < comparisonDate.getTime()) {
      return new ServiceError('GRACE_PERIOD_ERROR', 'Submission Period is beyond the grace period to mark as No-Data-To-Report.');
    }
    return null;
  }

  async updateSubmissionPeriods(
    customerId: string,
    user: User,
    data: Partial<SubmissionPeriodInput>[],
    auditType?: AuditType,
  ): Promise<ServiceError[]> {
    const customer = await this.customerService.findOneById(customerId);
    const partner = await this.partnerService.findOneById(user.partnerId);
    const serviceUser = await this.serviceUserService.findByLogin(user.nucleusUsername);

    if (!serviceUser || serviceUser.sid === 0) {
      throw new Error(`Service user not found for login ${user.id}`);
    }

    let sids: number[] = data.map((input: SubmissionPeriodInput) => {
      return input.sid;
    });

    const options: FindOptions = {
      offset: 0,
      limit: 1000,
      filters: {
        sid: {
          operator: Operator.IN,
          values: sids
        }
      }
    };

    const submissionPeriods = await this.findSubmissionPeriods(
      customerId,
      user.partnerId,
      options
    );

    let submissionPeriodMap = new Map<number, SubmissionPeriod>();
    submissionPeriods.map((submissionPeriod) => {
      submissionPeriodMap.set(Number(submissionPeriod.sid), submissionPeriod);
    });

    return await Promise.all(
      data.map(async (submissionPeriodInput) => {
        try {
          const submissionPeriod = submissionPeriodMap.get(
            Number(submissionPeriodInput.sid)
          );
          if (submissionPeriod) {
            await this.update(
              submissionPeriod.sid,
              Object.assign({}, submissionPeriodInput)
            );
          }

          if (auditType) {
            let auditEvent = new AuditEvent();
            auditEvent = await this.auditEventService.createAuditEvent(
              Object.assign({}, auditEvent, {
                serviceUserSid: serviceUser.sid,
                eventTimeStamp: new Date(),
                customerSid: customer.sid,
                reportingPartnerSid: partner ? partner.sid : null,
                auditTypeSid: auditType.sid,
                parentSid: null,
                createDate: new Date()
              })
            );

            const submissionScheduleAudit = new SubmissionScheduleAudit();
            await this.submissionScheduleAuditService.createSubmissionScheduleAudit(
              Object.assign({}, submissionScheduleAudit, {
                submissionPeriodSid: submissionPeriod.sid,
                submissionScheduleSid: submissionPeriod.submissionScheduleSid,
                customerSid: customer.sid,
                details: null,
                auditEventSid: auditEvent.sid,
                createDate: new Date()
              })
            );
          }

          return null;
        } catch (err) {
          console.log(err);
          return new ServiceError(
            'SUBMISSION_PERIOD_ERR',
            `Submission period could not be updated for : ${submissionPeriodInput.sid}`
          );
        }
      })
    );
  }
}
import { ViewEntity, ViewColumn } from 'typeorm';

@ViewEntity({
  name: 'SUBMISSION_PERIOD_INFO_VIEW',
  expression: `
    SELECT
      ss1.SID AS submissionScheduleId,
      df.ID AS fileId,
      df.FILE_NAME AS fileName,
      df.CREATE_DATE AS fileCreateDate,
      dfsi.NUM_SLIS AS numberOfPOSLines,
      dfsi.NUM_ILIS AS numberOfInventoryLines
    FROM
      SUBMISSION_SCHEDULE ss1
      LEFT JOIN DATA_FILE_SUMMARY_INFO dfsi ON dfsi.SUBMISSION_PERIOD_SID = :sid AND dfsi.CUSTOMER_SID = :cs
      LEFT JOIN DATA_TYPE dt1 ON ss1.DATA_TYPE_SID = dt1.SID
      LEFT JOIN DATA_FILE df ON dfsi.CUSTOMER_SID = df.CUSTOMER_SID AND dfsi.DATA_FILE_SID = df.SID
                              AND df.DELETED = 0 AND df.DATA_TYPE = dt1.TYPE
    WHERE
      ss1.SID = :spssd AND ss1.CUSTOMER_SID = :cs AND df.ID IS NOT NULL
  `,
})
export class SubmissionPeriodInfoView {
  @ViewColumn()
  submissionScheduleId: number;

  @ViewColumn()
  fileId: number;

  @ViewColumn()
  fileName: string;

  @ViewColumn()
  fileCreateDate: Date;

  @ViewColumn()
  numberOfPOSLines: number;

  @ViewColumn()
  numberOfInventoryLines: number;
}
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsDataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';
import { SubmissionPeriodInfoView } from './SubmissionPeriodInfoView';


export const returnsSubmissionPeriod = () => SubmissionPeriod;
export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToMany(() => SubmissionPeriodInfoView, submissionPeriodInfoView => submissionPeriodInfoView.submissionScheduleId)
  submissionPeriodInfoView?: SubmissionPeriodInfoView[];

  @OneToOne(returnsDataFileSummaryInfo)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  dataFileSummaryInfo?: Promise<DataFileSummaryInfo>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
import { DomainEntityService } from '../base/DomainEntityService';
import { Inventory } from './Inventory';
import { Service, Inject } from 'typedi';
import { FindOptions } from '../base/FindOptions';
import { Equal, In } from 'typeorm';
import { ConfigurationService } from '../configuration/ConfigurationService';
import { CustomerService } from '../customer/CustomerService';
import { PartnerService } from '../partner/PartnerService';
import { DynamicAttrsService } from '../attribute/DynamicAttrsService';
import { DataFileService } from '../datafile/DataFileService';
import { SubmissionPeriodService } from '../submission/SubmissionPeriodService';
import { InventoryQuantityService } from './InventoryQuantityService';
import { InventoryPriceService } from './InventoryPriceService';
import { Writer } from '../../writer/Writer';
import { InventoryInput } from './InventoryInput';
import { ServiceError } from '../base/ServiceError';
import { Operator } from '../base/filters/Operator';
import { AppDataSource } from '../../platform/DataSource';

@Service()
export class InventoryService extends DomainEntityService<Inventory> {
  @Inject()
  protected configurationService: ConfigurationService;

  @Inject()
  protected customerService: CustomerService;

  @Inject()
  protected partnerService: PartnerService;

  @Inject()
  protected dynamicAttrsService: DynamicAttrsService;

  @Inject()
  protected dataFileService: DataFileService;

  @Inject()
  protected submissionPeriodService: SubmissionPeriodService;

  @Inject()
  protected inventoryQuantityService: InventoryQuantityService;

  @Inject()
  protected inventoryPriceService: InventoryPriceService;

  @Inject('Writer')
  protected writer: Writer;

  constructor() {
    super(Inventory);
  }

  getServiceName(): string {
    return 'Inventory';
  }

  async findNeedCorrectionInventory(
    customerId: string,
    partnerId: string,
    options: FindOptions = new FindOptions()
  ): Promise<Inventory[]> {
    const customer = await this.customerService.findOneById(customerId);
    const partner = await this.partnerService.findOneById(partnerId);
    const { offset, limit, filters = {}, sort } = options;

    if (!customer || (!partner && !PartnerService.isAll(partnerId))) {
      return Promise.resolve([]);
    }
    const config = await this.configurationService.getConfiguration(customerId);
    const queueName = config.get('needCorrectionInventoryQueueName', 'resign');
    const maxAge = config.get('nmiMaxAgeInDays', '30');

    const query = this.repository
      .createQueryBuilder('inv')
      .leftJoinAndSelect(
        'inv.dynamicAttrs',
        'da',
        '"da"."ATTRIBUTE_TYPE" = \'IL\' and "da"."CUSTOMER_SID" = "inv"."CUSTOMER_SID"'
      )
      .innerJoinAndSelect(
        'inv.dataFile',
        'df',
        '"df"."CUSTOMER_SID" = "inv"."CUSTOMER_SID"'
      )
      .leftJoinAndSelect(
        'inv.submissionPeriod',
        'sp',
        '"sp"."CUSTOMER_SID" = "inv"."CUSTOMER_SID"'
      )      
      .leftJoinAndSelect(
        'sp.submissionSchedule',
        'ss',
        '"sp"."CUSTOMER_SID" = "ss"."CUSTOMER_SID"'
      )
      .leftJoinAndSelect('ss.dataType', 'dt')
      .leftJoinAndSelect(
        'sp.submissionPeriodLineItemView',
        'spli',
        '"sp"."CUSTOMER_SID" = "spli"."CUSTOMER_SID"'
      )
      .leftJoinAndSelect(
        'sp.dataFileSummaryInfo',
        'dfsi',
        '"sp"."CUSTOMER_SID" = "dfsi"."CUSTOMER_SID"'
      )
      .leftJoinAndMapOne(
        'inv.onHandQuantity',
        'inv.inventoryQuantities',
        'onHandInvQuantity',
        '"onHandInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'ON HAND\')'
      )
      .leftJoinAndMapOne(
        'inv.onOrderQuantity',
        'inv.inventoryQuantities',
        'onOrderInvQuantity',
        '"onOrderInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'ON ORDER\')'
      )
      .leftJoinAndMapOne(
        'inv.committedQuantity',
        'inv.inventoryQuantities',
        'committedInvQuantity',
        '"committedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'COMMITTED\')'
      )
      .leftJoinAndMapOne(
        'inv.floatQuantity',
        'inv.inventoryQuantities',
        'floatInvQuantity',
        '"floatInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'FLOAT\')'
      )
      .leftJoinAndMapOne(
        'inv.backorderedQuantity',
        'inv.inventoryQuantities',
        'backorderedInvQuantity',
        '"backorderedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'BACKORDERED\')'
      )
      .leftJoinAndMapOne(
        'inv.returnedQuantity',
        'inv.inventoryQuantities',
        'returnedInvQuantity',
        '"returnedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'RETURNED\')'
      )
      .leftJoinAndMapOne(
        'inv.inTransitQuantity',
        'inv.inventoryQuantities',
        'inTransitInvQuantity',
        '"inTransitInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'TRANSIT\')'
      )
      .leftJoinAndMapOne(
        'inv.unitPrice',
        'inv.inventoryPrices',
        'invPrice',
        '"invPrice"."PRICE_TYPE_SID" = ( SELECT SID FROM PRICE_TYPE WHERE ' +
          'NAME = \'REPORTED_PRICE\') AND "invPrice"."DELETED" = 0'
      )
      .innerJoin(
        'PEH_INV_QUEUE',
        'piq',
        '"piq"."CLIENT_SKU" = "inv"."CLIENT_SKU" and "piq"."INV_LINE_ITEM_SID" = "inv"."SID" ' +
          'and "piq"."CUSTOMER_SID" = "inv"."CUSTOMER_SID"'
      )
      .offset(offset)
      .limit(limit);    

    // add filters for quantity filters
    let quantityFiltersMap = new Map();
    quantityFiltersMap.set('onOrderQuantity', 'onOrderInvQuantity');
    quantityFiltersMap.set('onHandQuantity', 'onHandInvQuantity');
    quantityFiltersMap.set('committedQuantity', 'committedInvQuantity');
    quantityFiltersMap.set('floatQuantity', 'floatInvQuantity');
    quantityFiltersMap.set('backorderedQuantity', 'backorderedInvQuantity');
    quantityFiltersMap.set('returnedQuantity', 'returnedInvQuantity');
    quantityFiltersMap.set('inTransitQuantity', 'inTransitInvQuantity');

    for (let [key, value] of quantityFiltersMap) {
      const quantityFilters = filters[key];
      delete filters[key];
      if (quantityFilters) {
        const quantityWhere =
          this.inventoryQuantityService.buildWhereExpression(
            quantityFilters,
            value
          );
        query.andWhere(quantityWhere);
      }
    }    

    // add unit price condition
    const unitPriceFilters = filters['unitPrice'];
    delete filters['unitPrice'];
    if (unitPriceFilters) {
      const unitPriceWhere = this.inventoryPriceService.buildWhereExpression(
        unitPriceFilters,
        'invPrice'
      );
      query.andWhere(unitPriceWhere);
    }    

    this.buildWhere(filters, query);
    query.andWhere('"inv"."CUSTOMER_SID" = ' + customer.sid)
    .andWhere('"inv"."INVENTORY_DATE" >= sysdate - ' + maxAge)
    .andWhere('"piq"."QUEUE_NAME" = \'' + queueName + "'")
    .andWhere( { 'deleted' : Equal(0) } );

    if (!PartnerService.isAll(partnerId)) {
      query.andWhere('"inv"."REPORTING_PARTNER_SID" = ' + partner.sid);
    }
    
    // add order by
    this.addOrderBys(query, sort);

    return query.getMany();
  }

  async updateInventory(
    customerId: string,
    partnerId: string,
    data: InventoryInput[]
  ): Promise<ServiceError[]> {
    let inventoryToCreate: Inventory[] = [];
    let sids: number[] = data.map((input: InventoryInput) => {
      return input.sid;
    });
    let errors: ServiceError[] = [];

    // Check if partner has access to ili
    let options: FindOptions = {
      offset: 0,
      limit: 1000,
      filters: {
        sid: {
          operator: Operator.IN,
          values: sids
        }
      }
    };
    let inventory: Inventory[] = await this.findNeedCorrectionInventory(
      customerId,
      partnerId,
      options
    );

    let sidsToUpdate: number[] = [];
    errors = sids.map((sid) => {
      if (inventory) {
        let s: Inventory = inventory.find((inv: Inventory) => {
          return inv.sid.toString() === sid.toString();
        });
        if (s) {
          sidsToUpdate.push(sid);
          return null;
        }
      }
      return new ServiceError(
        'INVENTORY_NOT_FOUND',
        `Inventory line sid : ${sid} not found`
      );
    });

    if (inventory && inventory.length > 0) {
      let invLineItems: Inventory[] = await this.repository
        .createQueryBuilder('inv')
        // load partner and gs number
        .innerJoinAndSelect('inv.reportingPartner', 'rp')
        .leftJoinAndSelect('rp.gsNumbers', 'gs')
        // load dynamic attrs
        .leftJoinAndSelect('inv.dynamicAttrs', 'da')
        .leftJoinAndMapOne(
          'inv.onHandQuantity',
          'inv.inventoryQuantities',
          'onHandInvQuantity',
          '"onHandInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'ON HAND\')'
        )
        .leftJoinAndMapOne(
          'inv.onOrderQuantity',
          'inv.inventoryQuantities',
          'onOrderInvQuantity',
          '"onOrderInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'ON ORDER\')'
        )
        .leftJoinAndMapOne(
          'inv.committedQuantity',
          'inv.inventoryQuantities',
          'committedInvQuantity',
          '"committedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'COMMITTED\')'
        )
        .leftJoinAndMapOne(
          'inv.floatQuantity',
          'inv.inventoryQuantities',
          'floatInvQuantity',
          '"floatInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'FLOAT\')'
        )
        .leftJoinAndMapOne(
          'inv.backorderedQuantity',
          'inv.inventoryQuantities',
          'backorderedInvQuantity',
          '"backorderedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'BACKORDERED\')'
        )
        .leftJoinAndMapOne(
          'inv.returnedQuantity',
          'inv.inventoryQuantities',
          'returnedInvQuantity',
          '"returnedInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'RETURNED\')'
        )
        .leftJoinAndMapOne(
          'inv.inTransitQuantity',
          'inv.inventoryQuantities',
          'inTransitInvQuantity',
          '"inTransitInvQuantity"."QUANTITY_TYPE_SID" = ( SELECT SID FROM QUANTITY_TYPE WHERE QUANTITY_TYPE = \'TRANSIT\')'
        )
        .leftJoinAndMapOne(
          'inv.unitPrice',
          'inv.inventoryPrices',
          'invPrice',
          '"invPrice"."PRICE_TYPE_SID" = ( SELECT SID FROM PRICE_TYPE WHERE NAME = \'REPORTED_PRICE\') AND "invPrice"."DELETED" = 0'
        )
        .where({ sid: In(sidsToUpdate) })
        .andWhere('"gs"."CUSTOMER_SID" = :customerSid', {
          customerSid: inventory[0].customerSid
        })
        .getMany();

      await Promise.all(
        invLineItems.map(async (invLineItem) => {
          // load serial numbers

          let input: InventoryInput = data.find((inv: InventoryInput) => {
            return inv.sid.toString() === invLineItem.sid.toString();
          });

          invLineItem = Object.assign({}, invLineItem, input);

          // add inv line to list to write to S3.
          inventoryToCreate.push(invLineItem);
        })
      );

      console.log(`Inventory to write to S3 : ${inventoryToCreate.length}`);
      // Write to S3
      if (inventoryToCreate.length > 0) {
        await this.writer.write(
          customerId,
          partnerId,
          this.getServiceName(),
          inventoryToCreate
        );
      }

      // It is safe to delete from PEH after writing to S3, just in case if delete fails,
      // the inv will still be deleted from PEH the json file from S3 is loaded into nucleus.
      // If we delete first and for some reason the writing to S3 fails, there is no way to roll back the db update.
      await Promise.all(
        inventoryToCreate.map(async (invLineItem) => {
          try {
            // delete inventory line from PEH
            await AppDataSource.query(
              'delete from peh_inv_queue where inv_line_item_sid = :sid',
              [invLineItem.sid]
            );
            console.log(`Deleted line item : ${invLineItem.sid} from PEH`);
          } catch (e) {
            console.log(
              `Error while deleting from PEH for inv line : ${JSON.stringify(
                invLineItem
              )} for Customer : ${customerId}`
            );
            console.error(e);
            // eat the error as if delete fails, the only con is User will still be able to see the inv line
            // until the file on S3 is loaded into nucleus
          }
        })
      );
    }

    return errors;
  }
}
import { Inject, Service } from 'typedi';
import { DomainEntityService } from '../base/DomainEntityService';
import { DataFileSummaryInfo } from './DataFileSummaryInfo';
import { CustomerService } from '../customer/CustomerService';
import { SubmissionPeriodService } from '../submission/SubmissionPeriodService';

@Service()
export class DataFileSummaryInfoService extends DomainEntityService<DataFileSummaryInfo> {
  @Inject()
  protected customerService: CustomerService;

  @Inject()
  protected submissionPeriodService: SubmissionPeriodService;

  constructor() {
    super(DataFileSummaryInfo);
  }

  getServiceName() {
    return 'DataFileSummaryInfo';
  }

  async createDataFileSummaryInfo(
    custId: string,
    submissionPeriodSid: number,
    dataFileSid: number,
    numberOfPOSLines: number,
    numberOfInventoryLines: number
  ) {
    const cust = await this.customerService.findOneById(custId);

    await this.create({
      customerSid: cust.sid,
      submissionPeriodSid: submissionPeriodSid,
      dataFileSid: dataFileSid,
      numberOfPOSLines: numberOfPOSLines,
      numberOfInventoryLines: numberOfInventoryLines,
    });
}

}
import { Entity, Column, ManyToOne, JoinColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsSubmissionPeriods = (submissionPeriod) => submissionPeriod.filesData;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(() => SubmissionPeriod)
    @JoinColumn([
        { name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' },
        { name: 'CUSTOMER_SID', referencedColumnName: 'customerSid' }
    ])
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;
    
    // Specify that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;
}

import { Entity, Column, ManyToOne, JoinColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';
import { SubmissionPeriod } from '../submission/SubmissionPeriod';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsSubmissionPeriods = (submissionPeriod) => submissionPeriod.filesData;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(() => SubmissionPeriod)
    @JoinColumn([
        { name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' },
        { name: 'CUSTOMER_SID', referencedColumnName: 'customerSid' }
    ])
    submissionPeriod: SubmissionPeriod;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;
    
    // Specify that submissionPeriodSid can be null
    @Column({ name: 'SUBMISSION_PERIOD_SID', nullable: true })
    submissionPeriodSid: number;
}

import { Entity, Column, ManyToOne, JoinColumn} from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import { returnsSubmissionPeriod } from '../submission/SubmissionPeriod';

export const returnsDataFileSummaryInfo = () => DataFileSummaryInfo;
export const returnsSubmissionPeriods = (submissionPeriod) => submissionPeriod.filesData;

@Entity({ name: 'DATA_FILE_SUMMARY_INFO' })
export class DataFileSummaryInfo extends CustomerDomainEntity {

    @ManyToOne(returnsSubmissionPeriod, returnsSubmissionPeriods)
    @JoinColumn([
        { name: 'SUBMISSION_PERIOD_SID', referencedColumnName: 'sid' },
        { name: 'CUSTOMER_SID', referencedColumnName: 'customerSid' }
    ])
    submissionPeriodSid: number;

    @Column({name:'DATA_FILE_SID'})
    dataFileSid: number;

    @Column({name:'NUM_SLIS'})
    numberOfPOSLines: number;

    @Column({name:'NUM_ILIS'})
    numberOfInventoryLines: number;
    
}

import { Entity, Column, JoinColumn, OneToOne, VirtualColumn, OneToMany } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  fileIdsSql,
  filesCountSql,
  filesDataSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';
import { DataFileSummaryInfo, returnsDataFileSummaryInfo } from '../datafile/DataFileSummaryInfo';

export const returnsSubmissionPeriod = () => SubmissionPeriod;
export const dataFileSummaryInfoInverseSide = (dataFileSummaryInfo) =>
  dataFileSummaryInfo.submissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : fileIdsSql} )
  fileIds?: string;

  firstFileId?: string;

  firstFileName?: string;

  firstFileCreateDate?: Date;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles?: number;

  @OneToMany(returnsDataFileSummaryInfo, dataFileSummaryInfoInverseSide)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  filesData?: Promise<DataFileSummaryInfo>;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: Float
    filesData: DataFileSummaryInfo
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
    noDataServiceUser: ServiceUser
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }

  
  type DataFileSummaryInfo{
    numberOfPOSLines: Float
    numberOfInventoryLines: Float
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
    numberOfFiles: NumberFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
    numberOfFiles: SortOption
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
ALTER TABLE your_table_name
RENAME COLUMN old_column_name TO new_column_name;
ALTER TABLE your_table_name
ADD COLUMN new_column_name INT;
export const deletedLinesSql = (alias: string) => {
  return `
  (SELECT sum(CNT) AS "DELETED_COUNT"
  FROM (
      SELECT 
        COUNT("sli"."SID") "CNT" 
      FROM 
        data_file "df1" 
          LEFT JOIN sales_line_item "sli"
              ON "sli"."DATA_FILE_SID" = "df1"."SID" 
                  AND "sli"."CUSTOMER_SID" = "df1"."CUSTOMER_SID" 
                  AND "sli"."DELETED" = 1 
                  AND "sli"."RESUBMITTED" = 0 
      WHERE "df1"."SID" = ${alias}."SID"
          AND "df1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
      GROUP BY "df1"."SID" 
      UNION ALL 
      SELECT 
        COUNT("ili"."SID") "CNT" 
      FROM 
        data_file "df1" 
          LEFT JOIN inv_line_item "ili"
              ON "ili"."DATA_FILE_SID" = "df1"."SID" 
                  AND "ili"."CUSTOMER_SID" = "df1"."CUSTOMER_SID" 
                  AND "ili"."DELETED" = 1 
                  AND "ili"."RESUBMITTED" = 0 
      WHERE 
        "df1"."SID" = ${alias}."SID"
        and "df1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
      group by 
        "df1"."SID"
    ))
`;
};
select data_file_sid,customer_sid from sales_line_item where deleted=1 and data_file_sid != 8423737 and data_file_sid!=8424107;

SELECT sum(cnt) AS DELETED_COUNT
  FROM (
      SELECT 
        COUNT(sli.sid) cnt 
      FROM 
        data_file df1 
          LEFT JOIN sales_line_item sli
              ON sli.data_file_sid = df1.sid 
                  AND sli.customer_sid = df1.customer_sid 
                  AND sli.DELETED = 1 
                  AND sli.RESUBMITTED = 0 
      WHERE df1.sid = :sid
          AND df1.customer_sid = :cs
      GROUP BY df1.sid 
      UNION ALL 
      SELECT 
        COUNT(ili.sid) cnt 
      FROM 
        data_file df1 
          LEFT JOIN inv_line_item ili
              ON ili.data_file_sid = df1.sid 
                  AND ili.customer_sid = df1.customer_sid 
                  AND ili.DELETED = 1 
                  AND ili.RESUBMITTED = 0 
      WHERE 
        df1.sid = :sid
        and df1.customer_sid = :cs
      group by 
        df1.sid)
registry=http://registry.npmjs.org/
//npm.pkg.github.com/:_authToken=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

{
    // Use IntelliSense to learn about possible attributes.
    // Hover to view descriptions of existing attributes.
    // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
    "version": "0.2.0",
    "configurations": [
        {
            "name": "Launch via NPM-thanuj",
            "type": "node",
            "request": "launch",
            "cwd": "${workspaceRoot}",
            "runtimeExecutable": "npm",
            "runtimeArgs": [
                "run",  "sam"
            ],
            "port": 5858,
            "env": {
                "NODE_PATH": "${cwd}",
                "DBHOST": "vi-cdm-tdbp03.aws.modeln.com",
                "DBSERVICE": "cdmsbx",
                "DBUSER": "nucleus",
                "DBPASS": "notFunnyYoke",
                "AWS_PROFILE":"qa",
                "AUTHORIZER": "DEV",
                "CACHE_STORE": "memory"
            }
        }
  
    ]
  }
import { Entity, Column, JoinColumn, OneToOne, VirtualColumn } from 'typeorm';
import { CustomerDomainEntity } from '../base/CustomerDomainEntity';
import {
  returnsSubmissionPeriodLineItemView,
  SubmissionPeriodLineItemView
} from './SubmissionPeriodLineItemView';
import {
  reportedFlagSql,
  statusSql,
  filesCountSql
} from './SubmissionPeriodSql';
import {
  returnsSubmissionSchedule,
  SubmissionSchedule
} from './SubmissionSchedule';
import { ServiceUser, returnsServiceUser } from '../user/ServiceUser';

export const returnsSubmissionPeriod = () => SubmissionPeriod;

@Entity({ name: 'SUBMISSION_PERIOD' })
export class SubmissionPeriod extends CustomerDomainEntity {
  @Column({ name: 'EXPECTED_DATE' })
  expectedDate: Date;

  @Column({ name: 'PERIOD_START_DATE' })
  periodStartDate: Date;

  @Column({ name: 'PERIOD_END_DATE' })
  periodEndDate: Date;

  @Column({ name: 'SUBMISSION_SCHEDULE_SID', select: false })
  submissionScheduleSid: number;

  @Column({ name: 'ON_TIME_OVERRIDE' })
  onTimeOverride: boolean;

  @Column({ name: 'NO_DATA' })
  noData: boolean;

  @Column({ name: 'NO_DATA_REASON' })
  noDataReason: string;

  @Column({ name: 'NO_DATA_CREATE_DATE' })
  noDataCreateDate: Date;

  @Column({ name: 'NO_DATA_SERVICE_USER_SID', select: false })
  noDataServiceUserSid: number;

  @OneToOne(returnsServiceUser)
  @JoinColumn({ name: 'NO_DATA_SERVICE_USER_SID' })
  noDataServiceUser: Promise<ServiceUser>;

  @Column({ name: 'TRACKING_LEVEL' })
  trackingLevel: string;

  @Column({ name: 'EXPECTED_DAY' })
  expectedDay: number;

  @Column({ name: 'WORKING_DAYS' })
  workingDays: string;

  @Column({ name: 'IS_IN_PERIOD_REPORTER' })
  isInPeriodReporter: number;

  @Column({ name: 'DELETED' })
  deleted: boolean;

  @OneToOne(returnsSubmissionSchedule)
  @JoinColumn({ name: 'SUBMISSION_SCHEDULE_SID' })
  submissionSchedule?: Promise<SubmissionSchedule>;

  @VirtualColumn({ query: statusSql })
  status?: string;

  @VirtualColumn({ query: reportedFlagSql })
  reportedFlag: boolean;

  @VirtualColumn( {query : filesCountSql} )
  numberOfFiles: number;

  @OneToOne(returnsSubmissionPeriodLineItemView)
  @JoinColumn({ name: 'SID', referencedColumnName: 'submissionPeriodSid' })
  submissionPeriodLineItemView?: Promise<SubmissionPeriodLineItemView>;

}
export const statusSql = (alias: string) => {
     return `(select
             CASE WHEN ( ${alias}."ON_TIME_OVERRIDE" = 1) 
                  THEN 'On-time'
                  WHEN ( "spli1"."EARLIEST_FILE_SUBMISSION_DATE" < ${alias}."EXPECTED_DATE" )
                  THEN 'On-time'
                  WHEN ( ( ${alias}."NO_DATA" = 1 ) 
                         AND ( ${alias}."NO_DATA_CREATE_DATE" < ${alias}."EXPECTED_DATE"))
                  THEN 'On-time'
                  WHEN ( ${alias}."EXPECTED_DATE" > SYSTIMESTAMP)
                  THEN 'Pending'
                  ELSE 'Late'
             END
             from SUBMISSION_PERIOD "sp1"
             left join SUBMISSION_PERIOD_LINE_ITEM_V "spli1" on 
                                                     "spli1"."SUBMISSION_PERIOD_SID" = "sp1"."SID"
                                                     AND "spli1"."CUSTOMER_SID" = "sp1"."CUSTOMER_SID"
             where "sp1"."SID" = ${alias}."SID"
             AND "sp1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`;
};

export const reportedFlagSql = (alias: string) => {
     return `(select
             CASE WHEN MAX(${alias}."ON_TIME_OVERRIDE") = 1
                  THEN 1
                  WHEN MAX(${alias}."NO_DATA") = 1
                  THEN 1
                  WHEN COUNT("df"."ID") > 0
                  THEN 1
                  ELSE 0
             END
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const fileIdsSql = (alias: string) => { 
    return `(select
             listagg("df"."ID",',') within group (ORDER BY "df"."CREATE_DATE")
             from SUBMISSION_SCHEDULE "ss1"
             left join DATA_FILE_SUMMARY_INFO "dfsi" on 
                                              "dfsi"."SUBMISSION_PERIOD_SID" = ${alias}."SID"
                                              AND "dfsi"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID"
             left join DATA_TYPE "dt1" on "ss1"."DATA_TYPE_SID" = "dt1"."SID"
             left join DATA_FILE "df" on "dfsi"."CUSTOMER_SID" = "df"."CUSTOMER_SID"
                                 AND "dfsi"."DATA_FILE_SID" = "df"."SID"
                                 AND "df"."DELETED" = 0
                                 AND "df"."DATA_TYPE" = "dt1"."TYPE"  
             where "ss1"."SID" = ${alias}."SUBMISSION_SCHEDULE_SID"
             AND "ss1"."CUSTOMER_SID" = ${alias}."CUSTOMER_SID")`
};

export const filesCountSql = (alias: string) => {
     return `(select
          count(df.id) as number_of_files
          from SUBMISSION_SCHEDULE ss1
          left join DATA_FILE_SUMMARY_INFO dfsi on 
                                           dfsi.SUBMISSION_PERIOD_SID =  ${alias}.SID
                                           AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
          left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
          left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                              AND dfsi.DATA_FILE_SID = df.SID
                              AND df.DELETED = 0
                              AND df.DATA_TYPE = dt1.TYPE
          where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
          AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID)`
}

export const filesDataSql = (alias:string) => {
     return `(select
          df.id,df.file_name,df.record_count,dfsi.num_slis,dfsi.num_ilis
          from SUBMISSION_SCHEDULE ss1
          left join DATA_FILE_SUMMARY_INFO dfsi on 
                                           dfsi.SUBMISSION_PERIOD_SID = ${alias}.SID
                                           AND dfsi.CUSTOMER_SID = ${alias}.CUSTOMER_SID
          left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
          left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                              AND dfsi.DATA_FILE_SID = df.SID
                              AND df.DELETED = 0
                              AND df.DATA_TYPE = dt1.TYPE
          where ss1.SID = ${alias}.SUBMISSION_SCHEDULE_SID
          AND ss1.CUSTOMER_SID= ${alias}.CUSTOMER_SID)`
}
export const SubmissionDef = `
  type Query {
    submissionPeriods(
      offset: Float, 
      limit: Float,
      filters: SubmissionPeriodFilters,
      sort: SubmissionPeriodSort
    ): [SubmissionPeriod]
    submissionSchedules(
      offset: Float, 
      limit: Float,
      filters: SubmissionScheduleFilters, 
      sort: SubmissionScheduleSort
    ): [SubmissionSchedule]
    submissionScheduleNotifications(
      submissionScheduleSid: ID,
      offset: Float,
      limit: Float,
      filters: SubmissionScheduleNotificationFilters,
      sort: SubmissionScheduleNotificationSort
    ): [SubmissionScheduleNotification]
  }

  type Mutation {
    markNoData(data: [NoDataInput]): [MutationResponse] @auth(object: SubmissionPeriod)
  }

  type SubmissionPeriod {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    expectedDate: Date
    periodStartDate: Date
    periodEndDate: Date
    noData: Boolean
    noDataReason: String
    noDataCreateDate: Date
    onTimeOverride: Boolean
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Float
    trackingLevel: String
    submissionSchedule: SubmissionSchedule
    status: String
    reportedFlag: Boolean
    numberOfFiles: number
    submissionPeriodLineItemView: SubmissionPeriodLineItemView
  }

  type SubmissionPeriodLineItemView {
    salesLineItemCount: Float
    invLineItemCount: Float
    earliestFileSubmissionDate: Date
  }
  
  type SubmissionSchedule {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    dataType: DataType    
    reportingPartner: Partner
    periodRule: String
    name: String
    startDate: Date
    endDate: Date
    expectedDay: String
    workingDays: String
    isInPeriodReporter: Boolean
    weekOfMonth: Float
    monthOfQuarter: Float
  }

  type DataType {
    sid: ID
    createDate: Date
    updateDate: Date
    type: String
  }

  type SubmissionScheduleNotification {
    sid: ID
    createDate: Date
    updateDate: Date
    customerSid: ID
    submissionScheduleSid: ID
    notificationType: NotificationType
    serviceUser: ServiceUser
  }

  enum NotificationType {
    PARSE_SUCCESS
    LATE
    PARSE_FAIL
    EXPECTED
  }

  input SubmissionPeriodFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    expectedDate: DateFilter
    periodStartDate: DateFilter
    periodEndDate: DateFilter
    submissionSchedule: SubmissionScheduleFilters
    status: StringFilter
    reportedFlag: BooleanFilter
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewFilter
  }
  
  input SubmissionPeriodLineItemViewFilter {
    salesLineItemCount: NumberFilter
    invLineItemCount: NumberFilter
    earliestFileSubmissionDate: DateFilter
  }

  input SubmissionScheduleFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    reportingPartner: PartnerFilter
    name: StringFilter
    dataType: DataTypeFilter
    periodRule: StringFilter
    expectedDay: StringFilter
    workingDays: StringFilter
    startDate: DateFilter
    endDate: DateFilter
    isInPeriodReporter: BooleanFilter
    weekOfMonth: NumberFilter
    monthOfQuarter: NumberFilter
  }

  input DataTypeFilter {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    type: StringFilter
  }

  input SubmissionScheduleNotificationFilters {
    sid: IDFilter
    createDate: DateFilter
    updateDate: DateFilter
    customerSid: IDFilter
    submissionScheduleSid: IDFilter
    notificationType: StringFilter
    serviceUser: ServiceUserFilters
  }

  input SubmissionPeriodSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    expectedDate: SortOption
    periodStartDate: SortOption
    periodEndDate: SortOption
    submissionSchedule: SubmissionScheduleSort
    status: SortOption
    reportedFlag: SortOption
    submissionPeriodLineItemView: SubmissionPeriodLineItemViewSort
  }
  
  input SubmissionPeriodLineItemViewSort {
    salesLineItemCount: SortOption
    invLineItemCount: SortOption
    earliestFileSubmissionDate: SortOption
  }

  input SubmissionScheduleSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    customerSid: SortOption
    dataType: DataTypeSort
    reportingPartner: PartnerSort
    periodRule: SortOption
    name: SortOption
    startDate: SortOption
    endDate: SortOption
    expectedDay: SortOption
    workingDays: SortOption
    isInPeriodReporter: SortOption
    weekOfMonth: SortOption
    monthOfQuarter: SortOption
  }

  input DataTypeSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    type: SortOption
  }

  input SubmissionScheduleNotificationSort {
    sid: SortOption
    createDate: SortOption
    updateDate: SortOption
    notificationType: SortOption
    serviceUser: ServiceUserSort
  }

  input NoDataInput {
    sid: ID!
    noDataReason: String
  }
`;
(select
             df.id,df.file_name,df.record_count,dfsi.num_slis,dfsi.num_ilis
             from SUBMISSION_SCHEDULE ss1
             left join DATA_FILE_SUMMARY_INFO dfsi on 
                                              dfsi.SUBMISSION_PERIOD_SID = :spsid
                                              AND dfsi.CUSTOMER_SID = :spcustomerSid
             left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
             left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                                 AND dfsi.DATA_FILE_SID = df.SID
                                 AND df.DELETED = 0
                                 AND df.DATA_TYPE = dt1.TYPE
             where ss1.SID = :spsssid
             AND ss1.CUSTOMER_SID= :spcustomersid);--spsid is the input
select SUBMISSION_PERIOD_SID,dfsi.customer_sid,sp.submission_schedule_sid from DATA_FILE_SUMMARY_INFO dfsi right join submission_period sp on dfsi.SUBMISSION_PERIOD_SID = sp.sid order by SUBMISSION_PERIOD_SID asc;
select ss.sid as ss_sid,sp.sid as sp_sid,ss.customer_sid from submission_period sp join submission_schedule ss on ss.sid = sp.submission_schedule_sid;
(select
             count(df.id) as number_of_files
             from SUBMISSION_SCHEDULE ss1
             left join DATA_FILE_SUMMARY_INFO dfsi on 
                                              dfsi.SUBMISSION_PERIOD_SID = :spsid
                                              AND dfsi.CUSTOMER_SID = :spcustomerSid
             left join DATA_TYPE dt1 on ss1.DATA_TYPE_SID = dt1.SID
             left join DATA_FILE df on dfsi.CUSTOMER_SID = df.CUSTOMER_SID
                                 AND dfsi.DATA_FILE_SID = df.SID
                                 AND df.DELETED = 0
                                 AND df.DATA_TYPE = dt1.TYPE
             where ss1.SID = :spsssid
             AND ss1.CUSTOMER_SID= :spcustomersid);
/********* tables *************/

CREATE TABLE xsequences (
	id      int IDENTITY(1,1) NOT NULL,
	code    varchar(20) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL,
	prefix  varchar(20) COLLATE SQL_Latin1_General_CP1_CI_AS DEFAULT '' NOT NULL,
	digits  int DEFAULT 1 NOT NULL,
	currsec int DEFAULT 0 NOT NULL,
	PRIMARY KEY (id)
);

CREATE INDEX ndx_seq_code_prf ON xsequences(code, prefix);

CREATE TABLE HISDBTST.dbo.xsequences_format (
	id      int IDENTITY(1,1) NOT NULL,
	code    varchar(20) COLLATE SQL_Latin1_General_CP1_CI_AS NOT NULL,
	prefix  varchar(20) COLLATE SQL_Latin1_General_CP1_CI_AS DEFAULT '' NOT NULL,
	digits  int DEFAULT 1 NOT NULL,
	PRIMARY KEY (id)
);

CREATE INDEX ndx_seq_fmt_code ON xsequences_format(code);




/************************ p_sequence: Sequence generator ***********/
/*
code            prefix          digits (zero filled)
--------------  -----------     ------
PATRECORD             yyMM-	    4
EMERGENCY       "EM"yyMMdd-	    3
OUTPATIENT	    "OP"yyMMdd-	    3
INPATIENT   	"IP"yyMMdd-	    3
REQUIREMENT	    "RQ"yyMMdd-	    3
*/

CREATE procedure [dbo].p_sequence(
	@code   varchar(20), 
	@date   datetime, 
	@result varchar(100) output
)
as
begin
	declare @currsec int;
	declare @prefix varchar(20);
	declare @digits int;

	select @prefix=prefix,
	        @digits=digits
	from xsequences_format sf 
	where code=@code;

	set @prefix = format(isnull(@date,getdate()), @prefix);
	
	insert into xsequences(code, prefix, currsec)
	select @code, @prefix, 0
	WHERE NOT EXISTS(SELECT ID FROM xsequences WHERE code=@code and prefix=@prefix);	

	update dbo.xsequences 
	set currsec=currsec + 1, @currsec = currsec + 1
	WHERE code=@code and prefix=@prefix;

	set @result = isnull(@prefix,'') + dbo.lpad(@currsec, @digits);
end;



/********************* trigger example **************************/

CREATE TRIGGER [dbo].t_patient_record  ON  [dbo].patient 
for insert not for replication 
AS 
BEGIN
	SET NOCOUNT ON;
	
	declare @RecordNo varchar(30)
	declare @Date datetime = GETDATE();

	-- select @Date = Creation from inserted;

    /***/
	exec p_sequence 'RECORD', @Date, @RecordNo output;
	/***/
	
	update Pacient set RecordNo=@RecordNo where PatientId =(select PatientId from inserted)
END
SELECT

  LEFT(CONVERT(varchar,[REQ ITEM CHECKIN DATETIME], 120),16) as วันที่รับแล็บ
     ,LEFT(CONVERT(varchar,[ORDER DATETIME], 120),16) as วันที่สั่ง
     ,[HN]
     ,[LN]
     ,[FULLNAME] as [ชื่อ-สกุล]
     ,[YEAR] as ปี
     ,[SEX] as เพศ
     ,[WARD NAME] as จุดที่สั่ง
     ,[REQ ITEM GROUP CODE] as CODE
     ,[RES ITEM NAME] as รายการตรวจ
     ,[RES ITEM RESULT] as ผลการตรวจ
     ,[RES ITEM STATE] as state
     ,[ORDER COMMENT] as comment



  FROM [LAB_DB].[dbo].[view_lab_statistic_Result_List]

WHERE 
  [ORDER INACTIVE] ='n' 
     AND ([REQ ITEM GROUP CODE] ='SP' or [REQ ITEM GROUP CODE] ='PA')
     AND ([REQ STATE] ='a' or [REQ STATE] ='u')
	 
	 
	 --[REQ ITEM CHECKIN DATETIME]
	 --Checkin
CASE WHEN "%CH201%" = "<10" AND "%CH202%" = "<10" THEN "<30"
     WHEN "%CH201%" = "<10" AND "%CH202%" <> "" THEN "<30" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = "<10" THEN "50"    
     WHEN "%CH201%" = "10" AND "%CH202%" = "10" THEN "100" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = "50" THEN "20" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = "100" THEN "10" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = "200" THEN "5" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = "300" THEN "3" 
	 WHEN "%CH201%" = "10" AND "%CH202%" = ">300" THEN "<30" 
	 WHEN "%CH201%" = "30" AND "%CH202%" = "<10" THEN "150" 
	 WHEN "%CH201%" = "30" AND "%CH202%" = "10" THEN "300" 
	 WHEN "%CH201%" = "30" AND "%CH202%" = "50" THEN "60" 
	 WHEN "%CH201%" = "30" AND "%CH202%" = "100" THEN "30"
	 WHEN "%CH201%" = "30" AND "%CH202%" = "200" THEN "15"
	 WHEN "%CH201%" = "30" AND "%CH202%" = "300" THEN "10"
	 WHEN "%CH201%" = "30" AND "%CH202%" = ">300" THEN "<30"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "<10" THEN ">300"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "10" THEN ">300"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "50" THEN "160"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "100" THEN "80"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "200" THEN "40"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "300" THEN "27"
	 WHEN "%CH201%" = "80" AND "%CH202%" = "<30" THEN "<30"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "<10" THEN ">300"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "10" THEN ">300"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "50" THEN "300"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "100" THEN "150"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "200" THEN "75"
	 WHEN "%CH201%" = "150" AND "%CH202%" = "300" THEN "50"
	 WHEN "%CH201%" = "150" AND "%CH202%" = ">300" THEN "<30"
	 WHEN "%CH201%" = ">150" AND "%CH202%" <> "" THEN ">300"
 ELSE "N/A" END
 [SOURCE CODE]

[WARD]

[PRIORITY]

[PATIENT TYPE CODE]

[YEAR]

[SEX]

[REQ ITEM ORDER DATETIME]
declare @value decimal(10,2)
set @value = (select 
CASE WHEN %CH003% = 0 THEN 0 
WHEN {AGE,YEAR} > 130.0 THEN 0 
WHEN {AGE,YEAR} < 18.0 THEN ((0.41 * {HEIGHT}) / %CH003%) 
WHEN {SEX} = "M" AND %CH003% <= 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -0.411))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "M" AND %CH003% > 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -1.209))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% <= 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -0.329))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% > 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -1.209))) * POWER(0.993, {AGE,YEAR})) ELSE 0 END)

SELECT CASE WHEN @value > 90.0 THEN "Stage G1"      
WHEN @value >= 60.00 AND @value <= 89.99 THEN "Stage G2"      
WHEN @value >= 45.00 AND @value <= 59.99 THEN "Stage G3a"      
WHEN @value >= 30.00 AND @value <= 44.99 THEN "Stage G3b"      
WHEN @value >= 15.00 AND @value <= 29.99 THEN "Stage G4"      
WHEN @value <  15.00 THEN "Stage G5"      
ELSE "N/A" END
taskkill /F /IM  I-CN-HIS.exe /T


D:

cd  D:\LisInterface\HIS\i-CN-HIS XML

start i-CN-HIS.exe
CREATE TABLE SECUENCIA(
	ID INTEGER PRIMARY KEY,
	CODIGO VARCHAR(60) NOT NULL,
	PREFIJO VARCHAR(60),
	DESCRIPCION VARCHAR(255) NOT NULL,
	NUMERO INTEGER DEFAULT 0 NOT NULL
);

CREATE SEQUENCE SECUENCIA_ID_GEN;

CREATE TRIGGER T_SECUENCIA_BI
FOR SECUENCIA BEFORE INSERT
AS
BEGIN
  IF (NEW.ID IS NULL) THEN
      NEW.ID = GEN_ID(SECUENCIA_ID_GEN, 1);
END;


CREATE OR ALTER PROCEDURE SP_GENERA_SECUENCIA(CODIGO VARCHAR(60)) 
RETURNS (
	SECUENCIA INTEGER
)
AS
DECLARE SEC INTEGER;
BEGIN
  UPDATE SECUENCIA 
  	SET NUMERO = NUMERO + 1 
  WHERE CODIGO = :CODIGO
  RETURNING NUMERO INTO :SECUENCIA;

  IF (SECUENCIA IS NULL) THEN
  BEGIN
  	INSERT INTO SECUENCIA(CODIGO, DESCRIPCION, NUMERO) VALUES (:CODIGO, :CODIGO, 1);
  	SECUENCIA = 1;
  END
  SUSPEND;
END;

-- EJ
/*
 * SELECT SECUENCIA FROM SP_GENERA_SECUENCIA('PRUEBA');
 * ó .. NEW.SECUENCIA = (SELECT SECUENCIA FROM SP_GENERA_SECUENCIA('PRUEBA'))
 */
dotnet ef dbcontext scaffold "Server=[SERVIDOR]\[BANCO];Database=[BASE];Integrated Security=True;" Microsoft.EntityFrameworkCore.SqlServer -o ModelosEfCore --use-database-names --no-pluralize --force --data-annotations
//เด็กคอม www.dek-com.com

const ACCESS_TOKEN = "ACCESS_TOKEN";

const bot = new LineBotSdk.client(ACCESS_TOKEN);

function doPost(e) { bot.call(e, callback) };

function callback(e) {

  if (e.message.type == "text" ) {

     bot.replyMessage(e, [bot.textMessage(bard(e.message.text, 'en', 'th'))]);

  }

};

//เด็กคอม www.dek-com.com

function bard(prompt) {

  var promptEN = LanguageApp.translate(prompt, 'th', 'en'); // เพิ่มจุดที่ 1

  var api_key = "API KEY"; //bard2 api

  var url = "https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=" + api_key;

  var headers = {

    "Content-Type": "application/json"

  };

  var requestBody = {

    "prompt": {

    "text": promptEN

    }

  }

  

  var options = {

    "method" : "POST",

    "headers": headers,

    "payload": JSON.stringify(requestBody)

  }

  var response = UrlFetchApp.fetch(url,options);

  var data = JSON.parse(response.getContentText());

  var output = data.candidates[0].output;

  return  LanguageApp.translate(output, 'en', 'th'); // เพิ่มจุดที่ 2

}

//เด็กคอม www.dek-com.comDepoy ใหม่ด้วยนะครับ 😊

//เด็กคอม www.dek-com.com

const ACCESS_TOKEN = "ACCESS_TOKEN";

const bot = new LineBotSdk.client(ACCESS_TOKEN);

function doPost(e) { bot.call(e, callback) };

function callback(e) {

  if (e.message.type == "text" ) {

     bot.replyMessage(e, [bot.textMessage(bard(e.message.text, 'en', 'th'))]);

  }

};

//เด็กคอม www.dek-com.com

function bard(prompt) {

  var promptEN = LanguageApp.translate(prompt, 'th', 'en'); // เพิ่มจุดที่ 1

  var api_key = "API KEY"; //bard2 api

  var url = "https://generativelanguage.googleapis.com/v1beta2/models/text-bison-001:generateText?key=" + api_key;

  var headers = {

    "Content-Type": "application/json"

  };

  var requestBody = {

    "prompt": {

    "text": promptEN

    }

  }

  

  var options = {

    "method" : "POST",

    "headers": headers,

    "payload": JSON.stringify(requestBody)

  }

  var response = UrlFetchApp.fetch(url,options);

  var data = JSON.parse(response.getContentText());

  var output = data.candidates[0].output;

  return  LanguageApp.translate(output, 'en', 'th'); // เพิ่มจุดที่ 2

}

//เด็กคอม www.dek-com.com
-- Kill all running queries
SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE state = 'active' and pid <> pg_backend_pid();

-- Kill specific query
SELECT * FROM pg_stat_activity WHERE state = 'active';
SELECT pg_terminate_backend(<PID>);
SELECT column_name, data_type
--SELECT CONCAT(column_name, ',')
FROM   information_schema.columns
WHERE  table_name = 'foo'
ORDER  BY ordinal_position;
$start = microtime(true);
for($i = 0; $i < 3000; $i++)
{
    mysqli_query($res, "insert into app__debuglog VALUE (null,now(), 'msg : $i','callstack','user','debug_speed','vars')");
}
$end = microtime(true);
echo "Took " . ($end - $start) . " s\n";
SELECT setval('"table_name_id_seq"', (SELECT MAX(id) FROM table_name)+1);
Create view SEQUENCE_VIEW as
  SELECT NAME
       , current_value as CURRVALL
       , current_value + increment as NEXTVAL
  FROM sys.sequences
UPDATE dbo.tbl_lab_setup_Source_Definition
SET Source_Definition_Name = CASE

			WHEN Source_Definition_Code = '000'	  THEN		'ไม่ทราบฝ่าย'
			WHEN Source_Definition_Code = '001'   THEN      'ความดัน'
			WHEN Source_Definition_Code = '002'   THEN      'เบาหวาน'
			WHEN Source_Definition_Code = '003'   THEN      'โรคปอด'
			WHEN Source_Definition_Code = '004'   THEN      'จุดซักประวัติ VIP'
			WHEN Source_Definition_Code = '005'   THEN      'ฝ่ายทันตสาธารณสุข'
			WHEN Source_Definition_Code = '006'   THEN      'หน้าห้องฉุกเฉิน'
			WHEN Source_Definition_Code = '007'   THEN      'ห้อง LAB'
			WHEN Source_Definition_Code = '008'   THEN      'ฝ่าย Computer'
			WHEN Source_Definition_Code = '009'   THEN      'ตึกผู้ป่วยใน'
			WHEN Source_Definition_Code = '010'   THEN      'จุดซักประวัติOPD1'
			WHEN Source_Definition_Code = '011'   THEN      'ห้อง Emergency Room (ER)'
			WHEN Source_Definition_Code = '012'   THEN      'ฝ่าย X-Ray'
			WHEN Source_Definition_Code = '013'   THEN      'ฝ่ายเวชระเบียน'
			WHEN Source_Definition_Code = '014'   THEN      'ห้องตรวจโรค'
			WHEN Source_Definition_Code = '015'   THEN      'CUPS'
			WHEN Source_Definition_Code = '016'   THEN      'ห้องจ่ายเงินผู้ป่วยนอก'
			WHEN Source_Definition_Code = '017'   THEN      'ห้องคลอด'
			WHEN Source_Definition_Code = '018'   THEN      'ห้องจ่ายกลาง'
			WHEN Source_Definition_Code = '019'   THEN      'ห้องตรวจโรค1'
			WHEN Source_Definition_Code = '020'   THEN      'ห้องตรวจโรค2'
			WHEN Source_Definition_Code = '021'   THEN      'ห้องตรวจโรค 3'
			WHEN Source_Definition_Code = '022'   THEN      'ห้องงานสุขศึกษาประชาสัมพันธ์'
			WHEN Source_Definition_Code = '023'   THEN      'จุดชักประวัติ ARV'
			WHEN Source_Definition_Code = '024'   THEN      'โรงครัว'
			WHEN Source_Definition_Code = '025'   THEN      'ห้องทำหัตถการผู้ป่วยนอก'
			WHEN Source_Definition_Code = '026'   THEN      'จุดซักประวัติOPD2'
			WHEN Source_Definition_Code = '027'   THEN      'ห้องตรวจโรค 4'
			WHEN Source_Definition_Code = '028'   THEN      'ห้องตรวจโรค 5'
			WHEN Source_Definition_Code = '029'   THEN      'ห้องตรวจโรค 6'
			WHEN Source_Definition_Code = '030'   THEN      'ห้องจ่ายยาผู้ป่วยนอก'
			WHEN Source_Definition_Code = '031'   THEN      'จุดเรียกคิวสูตินารีเวช'
			WHEN Source_Definition_Code = '032'   THEN      'ห้องตรวจสูตินารีเวช'
			WHEN Source_Definition_Code = '033'   THEN      'ห้องตรวจโรคหัวใจ'
			WHEN Source_Definition_Code = '034'   THEN      'ห้องตรวจสุขภาพเด็กดี'
			WHEN Source_Definition_Code = '035'   THEN      'งานป้องกันโรค'
			WHEN Source_Definition_Code = '036'   THEN      'ห้องตรวจครรภ์'
			WHEN Source_Definition_Code = '037'   THEN      'ห้องตรวจจิตเวช'
			WHEN Source_Definition_Code = '038'   THEN      'NB (ทารกแรกเกิด)'
			WHEN Source_Definition_Code = '039'   THEN      'ห้องตรวจโรค7'
			WHEN Source_Definition_Code = '040'   THEN      'ห้องฉีดยา'
			WHEN Source_Definition_Code = '041'   THEN      'แพทย์แผนไทย'
			WHEN Source_Definition_Code = '042'   THEN      'กายภาพบำบัด'
			WHEN Source_Definition_Code = '043'   THEN      'Asthma'
			WHEN Source_Definition_Code = '044'   THEN      'ห้องตรวจโรค9'
			WHEN Source_Definition_Code = '045'   THEN      'ห้องตรวจโรค ARV'
			WHEN Source_Definition_Code = '046'   THEN      'HHC'
			WHEN Source_Definition_Code = '047'   THEN      'วัยทอง'
			WHEN Source_Definition_Code = '048'   THEN      'ฝ่ายส่งเสริมสุขภาพ'
			WHEN Source_Definition_Code = '049'   THEN      'ห้องตรวจโรค10'
			WHEN Source_Definition_Code = '050'   THEN      'จุดเรียกคิวคลีกนิคพิเศษ'
			WHEN Source_Definition_Code = '051'   THEN      'จุดเรียกคิว OPD'
			WHEN Source_Definition_Code = '052'   THEN      'Exit Nurse'
			WHEN Source_Definition_Code = '053'   THEN      'ห้องฟ้าใส'
			WHEN Source_Definition_Code = '054'   THEN      'ห้องผ่าตัด'
			WHEN Source_Definition_Code = '055'   THEN      'ห้องตรวจโรค8'
			WHEN Source_Definition_Code = '056'   THEN      'จุดซักประวัติตรวจสุขภาพประจำปี'
			WHEN Source_Definition_Code = '057'   THEN      'คลินิกผู้สูงอายุ'
			WHEN Source_Definition_Code = '058'   THEN      'ห้องจ่ายยาคลินิกพิเศษ'
			WHEN Source_Definition_Code = '059'   THEN      'ห้องตรวจธาลัสซีเมียในเด็ก'
			WHEN Source_Definition_Code = '060'   THEN      'คลิกนิคไข้หวัด'
			WHEN Source_Definition_Code = '061'   THEN      'Bloodbank'
			WHEN Source_Definition_Code = '062'   THEN      'จุดซักประวัติกุมารเวชกรรม'
			WHEN Source_Definition_Code = '063'   THEN      'จุดเรียกคิวกุมารเวชกรรม'
			WHEN Source_Definition_Code = '064'   THEN      'ห้องอัลตราชาวด์'
			WHEN Source_Definition_Code = '065'   THEN      'ห้องตรวจคลินิกพิเศษ1'
			WHEN Source_Definition_Code = '066'   THEN      'ห้องตรวจคลินิกพิเศษ2'
			WHEN Source_Definition_Code = '067'   THEN      'ห้องตรวจคลินิกพิเศษ3'
			WHEN Source_Definition_Code = '068'   THEN      'ห้องตรวจคลินิกพิเศษ4'
			WHEN Source_Definition_Code = '069'   THEN      'ห้องตรวจคลินิกพิเศษ5'
			WHEN Source_Definition_Code = '070'   THEN      'ห้องตรวจคลินิกพิเศษ6'
			WHEN Source_Definition_Code = '071'   THEN      'ห้องตรวจคลินิกพิเศษ7'
			WHEN Source_Definition_Code = '072'   THEN      'ห้องตรวจคลินิกพิเศษ8'
			WHEN Source_Definition_Code = '073'   THEN      'จุดเรียกคิวเบาหวาน+ความดัน'
			WHEN Source_Definition_Code = '074'   THEN      'จุดซักประวัติผู้ป่วยนอก2(ผู้ป่วยนัด)'
			WHEN Source_Definition_Code = '075'   THEN      'จุดซักประวัติผู้ป่วยนอก3(ผู้ป่วยมีไข้)'
			WHEN Source_Definition_Code = '076'   THEN      'คลินิกลดพุง'
			WHEN Source_Definition_Code = '077'   THEN      'จุดเรียกคิวผู้สูงอายุ'
			WHEN Source_Definition_Code = '078'   THEN      'Teen friendly'
			WHEN Source_Definition_Code = '079'   THEN      'ห้องตรวจส่งเสริม'
			WHEN Source_Definition_Code = '080'   THEN      'ตึกเด็ก'
			WHEN Source_Definition_Code = '081'   THEN      'ตึกชาย'
			WHEN Source_Definition_Code = '082'   THEN      'ตึกหญิง'
			WHEN Source_Definition_Code = '083'   THEN      'ตึกสูติ-นารีเวช'
			WHEN Source_Definition_Code = '084'   THEN      'ตึกศัลยกรรม'
			WHEN Source_Definition_Code = '085'   THEN      'จุดเรียกคิวศัลยกรรม'
			WHEN Source_Definition_Code = '086'   THEN      'จุดเรียกคิวศัลยกรรมเด็ก'
			WHEN Source_Definition_Code = '087'   THEN      '70 ปีไม่มีคิว'
			WHEN Source_Definition_Code = '088'   THEN      'ห้องวางแผนครอบครัว'
			WHEN Source_Definition_Code = '089'   THEN      'โรคเรื้อรัง'
			WHEN Source_Definition_Code = '090'   THEN      'ห้องตรวจผู้ป่วยCAPD'
			WHEN Source_Definition_Code = '091'   THEN      'ห้องตรวจผู้ป่วยHD'
			WHEN Source_Definition_Code = '092'   THEN      'จุดซักประวัติไตวายเรื้อรัง'
			WHEN Source_Definition_Code = '093'   THEN      'คลินิคพบแพทย์ส่งเสริม'
			WHEN Source_Definition_Code = '094'   THEN      'คลินิคWarfarin'
			WHEN Source_Definition_Code = '095'   THEN      'กลุ่มเสี่ยง-ทางเดินหายใจ'
			WHEN Source_Definition_Code = '096'   THEN      'ศัลยกรรมกระดูก'
			WHEN Source_Definition_Code = '097'   THEN      'ตึกผู้ป่วยพิเศษชั้น6'
			WHEN Source_Definition_Code = '098'   THEN      'จุดซักประวัติธาลัสซีเมียเด็ก'
			WHEN Source_Definition_Code = '099'   THEN      'คลินิกฝังเข็ม'
			WHEN Source_Definition_Code = '100'   THEN      'ERเสริม'
			WHEN Source_Definition_Code = '101'   THEN      'ตึกศัลยกรรมกระดูก'
			WHEN Source_Definition_Code = '102'   THEN      'ตึกICU'
			WHEN Source_Definition_Code = '103'   THEN      'ห้องผ่าตัดเล็ก'
			WHEN Source_Definition_Code = '104'   THEN      'จุดเรียกคิวConsultนอกเวลา'
			WHEN Source_Definition_Code = '105'   THEN      'ห้องให้คำปรึกษา'
			WHEN Source_Definition_Code = '106'   THEN      'ตึกCOHORT'
			WHEN Source_Definition_Code = '107'   THEN      'คลิกนิกARI'
			WHEN Source_Definition_Code = '108'   THEN      'ARI ศูนย์พักคอย'
			WHEN Source_Definition_Code = '109'   THEN      'ห้องตรวจARI'
			WHEN Source_Definition_Code = '110'   THEN      'คลินิกกัญชา(พฤหัสบดี)'
			WHEN Source_Definition_Code = '111'   THEN      'ตึกผู้ป่วยพิเศษชั้น5'
			WHEN Source_Definition_Code = '112'   THEN      'จุดซักประวัติortho'
			WHEN Source_Definition_Code = '113'   THEN      'คลินิควัคซีน Covid 19'
			WHEN Source_Definition_Code = '114'   THEN      'ห้องตรวจเบาหวาน'
			WHEN Source_Definition_Code = '115'   THEN      'ห้องตรวจความดันโลหิตสูง'
			WHEN Source_Definition_Code = '116'   THEN      'ห้องตรวจโรคไตวายเรื้อรัง'
			WHEN Source_Definition_Code = '117'   THEN      'จุดซักประวัติCAPD'
			WHEN Source_Definition_Code = '118'   THEN      'จุดซักประวัติPalliative Care'
			WHEN Source_Definition_Code = '119'   THEN      'ห้องจ่ายยาTeleMedicine'
			WHEN Source_Definition_Code = '120'   THEN      'ตึกCOHORT2(ICUเก่า)'
			WHEN Source_Definition_Code = '121'   THEN      'ตึกCOHORT3(ชั้น6)'
			WHEN Source_Definition_Code = '122'   THEN      'ห้องยาศูนย์พักคอย'
			WHEN Source_Definition_Code = '123'   THEN      'ห้องจ่ายยา NCD'
			WHEN Source_Definition_Code = '124'   THEN      'ห้องจ่ายเงิน NCD'
			WHEN Source_Definition_Code = '125'   THEN      'ห้องจ่ายยาผู้ป่วยใน'
			WHEN Source_Definition_Code = '126'   THEN      'จุดซักประวัติคลินิควัคซีนCovid 19'
			WHEN Source_Definition_Code = '127'   THEN      'จุดซักประวัติ Self Isolation'
			WHEN Source_Definition_Code = '128'   THEN      'ห้องตรวจ Self Isolation'
			WHEN Source_Definition_Code = '129'   THEN      'จุดซักประวัติ Home Isolation'
			WHEN Source_Definition_Code = '130'   THEN      'ห้องตรวจ Home Isolation'
			WHEN Source_Definition_Code = '131'   THEN      'ห้องจำหน่ายผู้ป่วย Home Isolatio'
			WHEN Source_Definition_Code = '132'   THEN      'จุดซักประวัติERบ่ายเสริม'
			WHEN Source_Definition_Code = '133'   THEN      'ห้องตรวจสุขภาพจิตเด็กและวัยรุ่น'
			WHEN Source_Definition_Code = '134'   THEN      'จุดซักประวัติสุขภาพจิต'
			WHEN Source_Definition_Code = '135'   THEN      'จุดซักประวัติฝากครรภ์'
			WHEN Source_Definition_Code = '136'   THEN      'ห้องให้คำปรึกษาสุขภาพจิต'
			WHEN Source_Definition_Code = '137'   THEN      'ห้องรอฉีดยา'
			WHEN Source_Definition_Code = '138'   THEN      'ห้องตรวจพัฒนาการเด็ก'
			WHEN Source_Definition_Code = '139'   THEN      'CTscan'
			WHEN Source_Definition_Code = '999'   THEN      'กลับบ้าน'
			WHEN Source_Definition_Code = '01'    THEN      'ตึกชาย'
			WHEN Source_Definition_Code = '02'    THEN      'ตึกหญิง'
			WHEN Source_Definition_Code = '03'    THEN      'ตึกเด็ก'
			WHEN Source_Definition_Code = '05'    THEN      'ตึกคลอด'
			WHEN Source_Definition_Code = '06'    THEN      'ตึกผู้ป่วยพิเศษชั้น6'
			WHEN Source_Definition_Code = '07'    THEN      'ตึกสูติ-นารีเวช'
			WHEN Source_Definition_Code = '08'    THEN      'ตึกศัลยกรรม'
			WHEN Source_Definition_Code = '09'    THEN      'ตึกศัลยกรรมกระดูก'
			WHEN Source_Definition_Code = '10'    THEN      'ตึกICU'
			WHEN Source_Definition_Code = '11'    THEN      'ตึกติดเชื้อ'
			WHEN Source_Definition_Code = '12'    THEN      'ตึกผู้ป่วยพิเศษชั้น5'
			WHEN Source_Definition_Code = '13'    THEN      'ตึกCOHORT3(ชั้น6)'
			WHEN Source_Definition_Code = '14'    THEN      'ตึกCOHORT2(ICUเก่า)'
			WHEN Source_Definition_Code = '15'    THEN      'CIสังขะ_HI'
			WHEN Source_Definition_Code = '16'    THEN      'CIกระเทียม_KT'
			WHEN Source_Definition_Code = '17'    THEN      'CIทับทัน_TA'
			WHEN Source_Definition_Code = '18'    THEN      'CIเทพรักษา_TR'
			WHEN Source_Definition_Code = '19'    THEN      'CIขอนแตก_KK'
			WHEN Source_Definition_Code = '20'    THEN      'CIพระแก้ว_PK'
			WHEN Source_Definition_Code = '21'    THEN      'CIดม_DM'
			WHEN Source_Definition_Code = '22'    THEN      'CIสะกาด_SK'
			WHEN Source_Definition_Code = '23'    THEN      'CIตาคง_TK'
			WHEN Source_Definition_Code = '24'    THEN      'CIตาตุม+ตาแตรว_TT'
			WHEN Source_Definition_Code = '25'    THEN      'CIบ้านจารย์_BJ'
			WHEN Source_Definition_Code = '26'    THEN      'CIชบ_CP'
			WHEN Source_Definition_Code = '27'    THEN      'CIโดง_DO'
			WHEN Source_Definition_Code = '28'    THEN      'ตึกAKT'
			WHEN Source_Definition_Code = '29'    THEN      'ตึกCOHORT4'
			WHEN Source_Definition_Code = '30'    THEN      'ทารกแรกเกิด(Neonatal ward)'
			
			
	ELSE Source_Definition_Name -- ถ้าไม่ตรงกับเงื่อนไขใด ๆ ให้เก็บค่าเดิม
END,
Source_Group_Code = CASE

			WHEN Source_Definition_Code = '000'	  THEN	'OPD'
            WHEN Source_Definition_Code = '001'   THEN  'OPD'
            WHEN Source_Definition_Code = '002'   THEN  'OPD'
            WHEN Source_Definition_Code = '003'   THEN  'OPD'
            WHEN Source_Definition_Code = '004'   THEN  'OPD'
            WHEN Source_Definition_Code = '005'   THEN  'OPD'
            WHEN Source_Definition_Code = '006'   THEN  'OPD'
            WHEN Source_Definition_Code = '007'   THEN  'OPD'
            WHEN Source_Definition_Code = '008'   THEN  'OPD'
            WHEN Source_Definition_Code = '009'   THEN  'IPD'
            WHEN Source_Definition_Code = '010'   THEN  'OPD'
            WHEN Source_Definition_Code = '011'   THEN  'OPD'
            WHEN Source_Definition_Code = '012'   THEN  'OPD'
            WHEN Source_Definition_Code = '013'   THEN  'OPD'
            WHEN Source_Definition_Code = '014'   THEN  'OPD'
            WHEN Source_Definition_Code = '015'   THEN  'OPD'
            WHEN Source_Definition_Code = '016'   THEN  'OPD'
            WHEN Source_Definition_Code = '017'   THEN  'IPD'
            WHEN Source_Definition_Code = '018'   THEN  'OPD'
            WHEN Source_Definition_Code = '019'   THEN  'OPD'
            WHEN Source_Definition_Code = '020'   THEN  'OPD'
            WHEN Source_Definition_Code = '021'   THEN  'OPD'
            WHEN Source_Definition_Code = '022'   THEN  'OPD'
            WHEN Source_Definition_Code = '023'   THEN  'OPD'
            WHEN Source_Definition_Code = '024'   THEN  'OPD'
            WHEN Source_Definition_Code = '025'   THEN  'OPD'
            WHEN Source_Definition_Code = '026'   THEN  'OPD'
            WHEN Source_Definition_Code = '027'   THEN  'OPD'
            WHEN Source_Definition_Code = '028'   THEN  'OPD'
            WHEN Source_Definition_Code = '029'   THEN  'OPD'
            WHEN Source_Definition_Code = '030'   THEN  'OPD'
            WHEN Source_Definition_Code = '031'   THEN  'OPD'
            WHEN Source_Definition_Code = '032'   THEN  'OPD'
            WHEN Source_Definition_Code = '033'   THEN  'OPD'
            WHEN Source_Definition_Code = '034'   THEN  'OPD'
            WHEN Source_Definition_Code = '035'   THEN  'OPD'
            WHEN Source_Definition_Code = '036'   THEN  'OPD'
            WHEN Source_Definition_Code = '037'   THEN  'OPD'
            WHEN Source_Definition_Code = '038'   THEN  'IPD'
            WHEN Source_Definition_Code = '039'   THEN  'OPD'
            WHEN Source_Definition_Code = '040'   THEN  'OPD'
            WHEN Source_Definition_Code = '041'   THEN  'OPD'
            WHEN Source_Definition_Code = '042'   THEN  'OPD'
            WHEN Source_Definition_Code = '043'   THEN  'OPD'
            WHEN Source_Definition_Code = '044'   THEN  'OPD'
            WHEN Source_Definition_Code = '045'   THEN  'OPD'
            WHEN Source_Definition_Code = '046'   THEN  'OPD'
            WHEN Source_Definition_Code = '047'   THEN  'OPD'
            WHEN Source_Definition_Code = '048'   THEN  'OPD'
            WHEN Source_Definition_Code = '049'   THEN  'OPD'
            WHEN Source_Definition_Code = '050'   THEN  'OPD'
            WHEN Source_Definition_Code = '051'   THEN  'OPD'
            WHEN Source_Definition_Code = '052'   THEN  'OPD'
            WHEN Source_Definition_Code = '053'   THEN  'OPD'
            WHEN Source_Definition_Code = '054'   THEN  'IPD'
            WHEN Source_Definition_Code = '055'   THEN  'OPD'
            WHEN Source_Definition_Code = '056'   THEN  'OPD'
            WHEN Source_Definition_Code = '057'   THEN  'OPD'
            WHEN Source_Definition_Code = '058'   THEN  'OPD'
            WHEN Source_Definition_Code = '059'   THEN  'OPD'
            WHEN Source_Definition_Code = '060'   THEN  'OPD'
            WHEN Source_Definition_Code = '061'   THEN  'OPD'
            WHEN Source_Definition_Code = '062'   THEN  'OPD'
            WHEN Source_Definition_Code = '063'   THEN  'OPD'
            WHEN Source_Definition_Code = '064'   THEN  'OPD'
            WHEN Source_Definition_Code = '065'   THEN  'OPD'
            WHEN Source_Definition_Code = '066'   THEN  'OPD'
            WHEN Source_Definition_Code = '067'   THEN  'OPD'
            WHEN Source_Definition_Code = '068'   THEN  'OPD'
            WHEN Source_Definition_Code = '069'   THEN  'OPD'
            WHEN Source_Definition_Code = '070'   THEN  'OPD'
            WHEN Source_Definition_Code = '071'   THEN  'OPD'
            WHEN Source_Definition_Code = '072'   THEN  'OPD'
            WHEN Source_Definition_Code = '073'   THEN  'OPD'
            WHEN Source_Definition_Code = '074'   THEN  'OPD'
            WHEN Source_Definition_Code = '075'   THEN  'OPD'
            WHEN Source_Definition_Code = '076'   THEN  'OPD'
            WHEN Source_Definition_Code = '077'   THEN  'OPD'
            WHEN Source_Definition_Code = '078'   THEN  'OPD'
            WHEN Source_Definition_Code = '079'   THEN  'OPD'
            WHEN Source_Definition_Code = '080'   THEN  'IPD'
            WHEN Source_Definition_Code = '081'   THEN  'IPD'
            WHEN Source_Definition_Code = '082'   THEN  'IPD'
            WHEN Source_Definition_Code = '083'   THEN  'IPD'
            WHEN Source_Definition_Code = '084'   THEN  'IPD'
            WHEN Source_Definition_Code = '085'   THEN  'OPD'
            WHEN Source_Definition_Code = '086'   THEN  'OPD'
            WHEN Source_Definition_Code = '087'   THEN  'OPD'
            WHEN Source_Definition_Code = '088'   THEN  'OPD'
            WHEN Source_Definition_Code = '089'   THEN  'OPD'
            WHEN Source_Definition_Code = '090'   THEN  'OPD'
            WHEN Source_Definition_Code = '091'   THEN  'OPD'
            WHEN Source_Definition_Code = '092'   THEN  'OPD'
            WHEN Source_Definition_Code = '093'   THEN  'OPD'
            WHEN Source_Definition_Code = '094'   THEN  'OPD'
            WHEN Source_Definition_Code = '095'   THEN  'OPD'
            WHEN Source_Definition_Code = '096'   THEN  'IPD'
            WHEN Source_Definition_Code = '097'   THEN  'IPD'
            WHEN Source_Definition_Code = '098'   THEN  'OPD'
            WHEN Source_Definition_Code = '099'   THEN  'OPD'
            WHEN Source_Definition_Code = '100'   THEN  'OPD'
            WHEN Source_Definition_Code = '101'   THEN  'IPD'
            WHEN Source_Definition_Code = '102'   THEN  'IPD'
            WHEN Source_Definition_Code = '103'   THEN  'IPD'
            WHEN Source_Definition_Code = '104'   THEN  'OPD'
            WHEN Source_Definition_Code = '105'   THEN  'OPD'
            WHEN Source_Definition_Code = '106'   THEN  'IPD'
            WHEN Source_Definition_Code = '107'   THEN  'OPD'
            WHEN Source_Definition_Code = '108'   THEN  'OPD'
            WHEN Source_Definition_Code = '109'   THEN  'OPD'
            WHEN Source_Definition_Code = '110'   THEN  'OPD'
            WHEN Source_Definition_Code = '111'   THEN  'IPD'
            WHEN Source_Definition_Code = '112'   THEN  'OPD'
            WHEN Source_Definition_Code = '113'   THEN  'OPD'
            WHEN Source_Definition_Code = '114'   THEN  'OPD'
            WHEN Source_Definition_Code = '115'   THEN  'OPD'
            WHEN Source_Definition_Code = '116'   THEN  'OPD'
            WHEN Source_Definition_Code = '117'   THEN  'OPD'
            WHEN Source_Definition_Code = '118'   THEN  'OPD'
            WHEN Source_Definition_Code = '119'   THEN  'OPD'
            WHEN Source_Definition_Code = '120'   THEN  'IPD'
            WHEN Source_Definition_Code = '121'   THEN  'IPD'
            WHEN Source_Definition_Code = '122'   THEN  'OPD'
            WHEN Source_Definition_Code = '123'   THEN  'OPD'
            WHEN Source_Definition_Code = '124'   THEN  'OPD'
            WHEN Source_Definition_Code = '125'   THEN  'OPD'
            WHEN Source_Definition_Code = '126'   THEN  'OPD'
            WHEN Source_Definition_Code = '127'   THEN  'OPD'
            WHEN Source_Definition_Code = '128'   THEN  'OPD'
            WHEN Source_Definition_Code = '129'   THEN  'OPD'
            WHEN Source_Definition_Code = '130'   THEN  'OPD'
            WHEN Source_Definition_Code = '131'   THEN  'OPD'
            WHEN Source_Definition_Code = '132'   THEN  'OPD'
            WHEN Source_Definition_Code = '133'   THEN  'OPD'
            WHEN Source_Definition_Code = '134'   THEN  'OPD'
            WHEN Source_Definition_Code = '135'   THEN  'OPD'
            WHEN Source_Definition_Code = '136'   THEN  'OPD'
            WHEN Source_Definition_Code = '137'   THEN  'OPD'
            WHEN Source_Definition_Code = '138'   THEN  'OPD'
            WHEN Source_Definition_Code = '139'   THEN  'OPD'
            WHEN Source_Definition_Code = '999'   THEN  'OPD'
            WHEN Source_Definition_Code = '01'    THEN  'IPD'
            WHEN Source_Definition_Code = '02'    THEN  'IPD'
            WHEN Source_Definition_Code = '03'    THEN  'IPD'
            WHEN Source_Definition_Code = '05'    THEN  'IPD'
            WHEN Source_Definition_Code = '06'    THEN  'IPD'
            WHEN Source_Definition_Code = '07'    THEN  'IPD'
            WHEN Source_Definition_Code = '08'    THEN  'IPD'
            WHEN Source_Definition_Code = '09'    THEN  'IPD'
            WHEN Source_Definition_Code = '10'    THEN  'IPD'
            WHEN Source_Definition_Code = '11'    THEN  'IPD'
            WHEN Source_Definition_Code = '12'    THEN  'IPD'
            WHEN Source_Definition_Code = '13'    THEN  'IPD'
            WHEN Source_Definition_Code = '14'    THEN  'IPD'
            WHEN Source_Definition_Code = '15'    THEN  'IPD'
            WHEN Source_Definition_Code = '16'    THEN  'IPD'
            WHEN Source_Definition_Code = '17'    THEN  'IPD'
            WHEN Source_Definition_Code = '18'    THEN  'IPD'
            WHEN Source_Definition_Code = '19'    THEN  'IPD'
            WHEN Source_Definition_Code = '20'    THEN  'IPD'
            WHEN Source_Definition_Code = '21'    THEN  'IPD'
            WHEN Source_Definition_Code = '22'    THEN  'IPD'
            WHEN Source_Definition_Code = '23'    THEN  'IPD'
            WHEN Source_Definition_Code = '24'    THEN  'IPD'
            WHEN Source_Definition_Code = '25'    THEN  'IPD'
            WHEN Source_Definition_Code = '26'    THEN  'IPD'
            WHEN Source_Definition_Code = '27'    THEN  'IPD'
            WHEN Source_Definition_Code = '28'    THEN  'IPD'
            WHEN Source_Definition_Code = '29'    THEN  'IPD'
            WHEN Source_Definition_Code = '30'    THEN  'IPD'
	ELSE Source_Group_Code -- ถ้าไม่ตรงกับเงื่อนไขใด ๆ ให้เก็บค่าเดิม
END

WHERE Source_Definition_Code IN (
    '000','001','002','003','004','005','006','007','008','009','010','011','012','013','014','015','016','017','018','019','020','021','022','023','024','025','026','027','028','029','030','031','032','033','034','035','036','037','038','039','040','041','042','043','044','045','046','047','048','049','050','051','052','053','054','055','056','057','058','059','060','061','062','063','064','065','066','067','068','069','070','071','072','073','074','075','076','077','078','079','080','081','082','083','084','085','086','087','088','089','090','091','092','093','094','095','096','097','098','099','100','101','102','103','104','105','106','107','108','109','110','111','112','113','114','115','116','117','118','119','120','121','122','123','124','125','126','127','128','129','130','131','132','133','134','135','136','137','138','139','999','01','02','03','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30' -- รายการ Source_Definition_Code ที่ต้องการอัปเดต
    -- เพิ่มรายการ Source_Definition_Code อื่น ๆ ต่อไป
	
);
-- Select columns from the joined tables
SELECT
  t.term_id AS CategoryID,
  -- Select term_id from wp_terms as CategoryID
  tax.parent AS ParentCategoryID,
  -- Select parent from wp_term_taxonomy as ParentCategoryID
  t.name AS CategoryName,
  -- Select name from wp_terms as CategoryName
  t.slug AS CategorySlug  -- Select slug from wp_terms as CategorySlug
  -- From the wp_term_taxonomy table
FROM
  wp_term_taxonomy tax
  -- Join with the wp_terms table where the term_id matches
INNER JOIN
  wp_terms t
ON
  tax.term_id = t.term_id
  -- Only get rows where the taxonomy column is 'category'
WHERE
  tax.taxonomy = 'category'
nohup python3.8 -m streamlit run Streamlit_Launch.py --server.port 2111 &
drop table if exists schema.table_name;
select
    t.schemaname as db_schema,
    c.relname as table_name,
    t.tableowner as table_owner,
    c.relcreationtime as creationtime,
    *
from
    pg_catalog.pg_class_info c
    left join pg_catalog.pg_namespace n ON c.relnamespace = n.oid
    left join pg_catalog.pg_tables t on c.relname = t.tablename
where
    1 = 1
    --and t.tableowner like '%user_id'
    --and relname like '%revenues%'
;
SELECT 
  indexname AS index_name,
  tablename AS table_name,
  indexdef AS index_definition
FROM 
  pg_indexes
WHERE 
  schemaname = 'public'; -- Replace 'public' with the desired schema name if the index is in a different schema
CREATE DATABASE "VeneeredIncorporated"
    WITH
    OWNER = postgres
    ENCODING = 'UTF8'
    LC_COLLATE = 'English_India.1252'
    LC_CTYPE = 'English_India.1252'
    TABLESPACE = pg_default
    CONNECTION LIMIT = -1;

SELECT  short_code
FROM    domain1
EXCEPT
SELECT  short_code
FROM    domain2
SELECT  d1.short_code
FROM    domain1 d1
LEFT JOIN
        domain2 d2
ON      d2.short_code = d1.short_code
WHERE   d2.short_code IS NULL
UPDATE `wp_postmeta` 
SET `meta_value` = ROUND(`meta_value` * 1.24, 2) 
WHERE meta_key LIKE '%_price%' 
AND (meta_value > 0 or `meta_value` != '')
AND `post_id` IN (
    SELECT `ID` 
    FROM `wp_posts` 
    WHERE `post_type` = 'product' 
    AND `post_status` = 'publish' 
    AND `ID` = `post_id`
);
CREATE OR ALTER PROCEDURE SP_SPLIT_INTO_WORDS (A_TEXT VARCHAR(32000), A_DELS VARCHAR(100) default ',.<>/?;:''"[]{}`~!@#$%^&*()-_=+\|/', A_SPECIAL CHAR(1) default ' ')
RETURNS (
	WORD VARCHAR(50), POSIC integer
)
AS
DECLARE norder integer;
BEGIN

	-- Aux SP, used only in oltp_data_filling.sql to filling table PATTERNS
	-- with miscelan combinations of words to be used in SIMILAR TO testing.
	
	norder = 0;

	for
	    with recursive
	    j as( -- loop #1: transform list of delimeters to rows
	        select s,1 i, substring(s from 1 for 1) del
	        from(
	          select replace(:a_dels,:a_special,'') s
	          from rdb$database
	        )
	        
	        UNION ALL
	        
	        select s, i+1, substring(s from i+1 for 1)
	        from j
	        where substring(s from i+1 for 1)<>''
	    )
	 
	    ,d as(
	        select :a_text s, :a_special sp from rdb$database
	    )
	    ,e as( -- loop #2: perform replacing each delimeter to `space`
	        select d.s, replace(d.s, j.del, :a_special) s1, j.i, j.del
	        from d join j on j.i=1
	 
	        UNION ALL
	 
	        select e.s, replace(e.s1, j.del, :a_special) s1, j.i, j.del
	        from e
	        -- nb: here 'column unknown: e.i' error will be on old builds of 2.5,
	        -- e.g: WI-V2.5.2.26540 (letter from Alexey Kovyazin, 24.08.2014 14:34)
	        join j on j.i = e.i + 1
	    )
	    ,f as(
	        select s1 from e order by i desc rows 1
	    )
	    
	    ,r as ( -- loop #3: perform split text into single words
	        select iif(t.k>0, substring(t.s from t.k+1 ), t.s) s,
	             iif(t.k>0,position( del, substring(t.s from t.k+1 )),-1) k,
	             t.i,
	             t.del,
	             iif(t.k>0,left(t.s, t.k-1),t.s) word
	        from(
	          select f.s1 s, d.sp del, position(d.sp, s1) k, 0 i from f cross join d
	        )t
	 
	        UNION ALL
	 
	        select iif(r.k>0, substring(r.s from r.k+1 ), r.s) s,
	             iif(r.k>0,position(r.del, substring(r.s from r.k+1 )),-1) k,
	             r.i+1,
	             r.del,
	             iif(r.k>0,left(r.s, r.k-1),r.s) word
	        from r
	        where r.k>=0
	    )
	    select word from r where word>''
	    INTO word
	do
	BEGIN
		posic = norder;
    	suspend;
    	norder = norder + 1;
    end
end;

CREATE TABLE alumno_srch_nombre(
	id integer NOT NULL,
	valor varchar(255) NOT NULL,
	orden integer
);

CREATE INDEX ndx_alum_srch_nombre ON alumno_srch_nombre(id, valor);

CREATE OR ALTER TRIGGER t_alumno_biu_ndx
FOR alumno 
BEFORE INSERT OR UPDATE 
AS
BEGIN
	DELETE FROM alumno_srch_nombre
	WHERE id = NEW.id;

	INSERT INTO alumno_srch_nombre
	SELECT NEW.id, word, posic FROM SP_SPLIT_INTO_WORDS(NEW.apenom);
END;

SELECT * FROM (
SELECT a.*,
  CASE 
	  WHEN a.apenom LIKE replace(:INPUT,' ','%') || '%' THEN 0
	  ELSE 1
  END NIVEL
FROM alumno a
INNER JOIN (
	SELECT id, count(*) cnt
	FROM (
		SELECT id, word, min(orden) orden 
		FROM SP_SPLIT_INTO_WORDS(:input) x
		INNER JOIN ALUMNO_SRCH_NOMBRE asn ON asn.valor LIKE word || '%'
		GROUP BY id, word
	)
	GROUP BY id
	HAVING count(*) = (SELECT count(*) FROM SP_SPLIT_INTO_WORDS(:input))
) b ON b.id=a.id
) ORDER BY NIVEL;


--- OTHER EXAMPLE:

SELECT a.* FROM alumno a
INNER JOIN (
	SELECT id, count(*) cant FROM alumno
	INNER JOIN SP_SPLIT_INTO_WORDS(:input) X ON 
		ALUMNO.APELLIDOS LIKE X.WORD||'%'
	  		OR ALUMNO.NOMBRES LIKE X.WORD||'%'
	  		OR ALUMNO.CEDULA LIKE X.WORD||'%'
	GROUP BY id
	HAVING count(*) = (SELECT count(*) FROM SP_SPLIT_INTO_WORDS(:input))
) x ON a.id = x.id 
UNION 
SELECT a.* FROM ALUMNO a
where a.cedula LIKE :input || '%'
https://apex.oracle.com/pls/apex/r/apex_pm/ut/menu-popup
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
-- CREATING TABLES BEFORE IMPORTING RAW DATA


CREATE TABLE t_aus (
	category VARCAR(100),
	reported_loss VARCHAR(100),
	total_reports VARCHAR(100),
	reports_with_loss VARCHAR(100),
	loss_from_total_reports VARCHAR(100),
	percentage_change VARCHAR(100)

);

CREATE TABLE t_state(
	state VARCHAR(100),
	category VARCHAR(100),
	reported_loss VARCHAR(100),
	total_reports VARCHAR(100),
	reports_with_loss VARCHAR(100),
	loss_from_total_reports VARCHAR(100),
	percentage_change VARCHAR(100)
);

CREATE TABLE t_age (
	age VARCHAR(100),
	amount_loss VARCHAR(100),

	reported_loss VARCHAR (100)
);

CREATE TABLE t_gender(
	gender VARCHAR(100),
	amount_loss VARCHAR(100),
	percentage VARCHAR(100)

);

CREATE TABLE t_contact (
	contact_method VARCHAR(100),
	amount_lost VARCHAR(100),
	reported_loss VARCHAR(100)
);


-- Creating duplicate tables for Cleaning/Altering

CREATE TABLE t_aus_copy AS
SELECT * FROM t_aus;

CREATE TABLE t_state_copy AS
SELECT * FROM t_state;

CREATE TABLE t_age_copy AS
SELECT * FROM t_age;

CREATE TABLE t_gender_copy AS
SELECT * FROM t_gender;

CREATE TABLE t_contact_copy AS
SELECT * FROM t_contact;

-- DATA CLEANING: t_Aus_copy Table

	-- Dropping unecessary columns

	ALTER TABLE t_aus
	DROP COLUMN percentage_change;

	-- Removing Symbols

	UPDATE t_aus
	SET reported_loss = REPLACE(reported_loss, '$','');

	UPDATE t_aus
	SET reported_loss = REPLACE(reported_loss,',','');

	UPDATE t_aus
	SET total_reports = REPLACE(total_reports,',','');

	UPDATE t_aus
	SET reports_with_loss = REPLACE(reports_with_loss,',','');

	UPDATE t_aus
	SET loss_from_total_reports = REPLACE(loss_from_total_reports,',','');


-- Dropping unnecessary columns

ALTER TABLE t_aus
DROP COLUMN loss_from_total_reports;

ALTER TABLE t_aus
DROP COLUMN percentage_change;

	-- Finding & Replacing missing values

	SELECT *
	FROM t_aus
	WHERE category IS NULL AND reported_loss IS NULL AND total_reports IS NULL AND reports_with_loss IS NULL AND loss_from_total_reports IS NULL;
	-- No columns have missing values

-- Converting Data Types From String To Integers for t_aus

ALTER TABLE t_aus
ALTER COLUMN reported_loss TYPE integer
USING (CASE WHEN reported_loss ~ '^\d+$' THEN reported_loss::integer ELSE NULL END);

ALTER TABLE t_aus
ALTER COLUMN total_reports TYPE integer
USING (CASE WHEN total_reports ~ '^\d+$' THEN total_reports::integer ELSE NULL END);

ALTER TABLE t_aus
ALTER COLUMN reports_with_loss TYPE integer
USING (CASE WHEN reports_with_loss ~ '^\d+$' THEN reports_with_loss::integer ELSE NULL END);

-- Converting Data Types From String To Integers for t_state

ALTER TABLE t_state
ALTER COLUMN reported_loss TYPE integer
USING (CASE WHEN reported_loss ~ '^\d+$' THEN reported_loss::integer ELSE NULL END);

ALTER TABLE t_state
ALTER COLUMN total_reports TYPE integer
USING (CASE WHEN total_reports ~ '^\d+$' THEN total_reports::integer ELSE NULL END);

ALTER TABLE t_state
ALTER COLUMN reports_with_loss TYPE integer
USING (CASE WHEN reports_with_loss ~ '^\d+$' THEN reports_with_loss::integer ELSE NULL END);
/****** Script for SelectTopNRows command from SSMS  ******/
SELECT 
      [LN]
      ,[HN]
      ,[FULLNAME]
     , [YEAR]

  
      ,[SEX]
      ,[BIRTHDATE]
    
      ,[WARD NAME]
      ,[PATIENT TYPE NAME]
      ,[DOCTOR NAME]
  
      ,[AN]
      ,[VN]
     ,[ORDER DATETIME]
       ,[IREQ_LAST_CHK_DT] as 'Checkin datetime'

      ,[RES ITEM NAME]
      ,[RES ITEM RESULT]

      ,[RES ITEM REPORT DATETIME]
      ,[RES ITEM REPORT STAFF NAME]
      ,[RES ITEM APPROVE DATETIME]
      ,[RES ITEM APPROVE STAFF NAME] 

--      ,[IREQ_LAST_APP_DT]
  FROM [LAB_DB].[dbo].[view_lab_statistic_Result_List]
  where [RES ITEM STATE] = 'A' and  HN in (SELECT  distinct HN
  FROM [LAB_DB].[dbo].[view_lab_statistic_Result_List]
  where [RES ITEM CODE] in ('IM1429','IM1430','IM1433','IM1448') and ([IREQ_LAST_CHK_DT] Between '@dt1' and '@dt2')) and [RES ITEM CODE] in ('IM1429','IM1430','IM1433','IM1448')
  
-- The purpose of this query is to join all 'daily' information into a single query for easiness and comprehension.
-- In this query: primary table is 'daily intensity' measure, joined with daily caloric measurement and steps taken as foreign tables.
SELECT
  *
FROM 
  `capstone-project-track-1.intensities_data.daily_intensities` AS daily_intensities -- activity/intensity table
JOIN
  `capstone-project-track-1.calories_data.daily_calories`AS daily_calories -- calories table
ON 
  daily_intensities.Id = daily_calories.Id
AND
  daily_intensities.ActivityDay = daily_calories.ActivityDay
JOIN
  `capstone-project-track-1.steps_data.daily_steps`AS daily_steps -- steps table
ON 
  daily_intensities.Id = daily_steps.Id
AND
  daily_intensities.ActivityDay = daily_steps.ActivityDay
ORDER BY
  daily_intensities.ActivityDay
-- What has been the avg. death rate by race?
SELECT
    STUB_NAME, STUB_LABEL, YEAR, ESTIMATE
FROM
    death_rates
WHERE
    STUB_LABEL NOT LIKE '%years%' -- exclude age bracket descriptions
AND
    STUB_NAME LIKE '%and%' -- include all categories except 'sex only'
-- What has been the avg. estimated death rate by sex since 1950?
SELECT
    STUB_LABEL, YEAR, ESTIMATE
FROM
    death_rates
WHERE
    STUB_LABEL IN ('Male', 'Female')
GROUP BY
    STUB_LABEL, YEAR
-- What is the average estimated death rate by sex & age since 1950?
SELECT
    STUB_LABEL, ESTIMATE, YEAR
FROM
    death_rates
WHERE
    STUB_NAME = 'Sex and age'
GROUP BY
    STUB_LABEL, YEAR
Dans la requêtes =>    
donnee.flag_rafraichissement    AS histoLectureFlashRaw,
donnee.envoi_auto               AS envoiAutoEnergyRaw,
  
import org.springframework.beans.factory.annotation.Value;    


    @Value("#{target.envoiAutoEnergyRaw==null?null:target.envoiAutoEnergyRaw== 1}")
    Boolean isEnvoiAutoEnergy();

    @Value("#{target.histoLectureFlashRaw == 1}")
    boolean getHistoLectureFlash();
SELECT *
FROM all_source
WHERE REGEXP_LIKE(text, '(^|\W)Your Text Here($|\W)', 'i')
AND owner = 'LLV3';
BEGIN
  DBMS_MVIEW.REFRESH('MV_MINIBAR_ITEM');
END;
/
WITH geocoded AS (
    SELECT 
        *,
        {{ address_to_coordinates( 'ADDRESS_LINE_ONE' ) }} AS METADATA

    FROM {{ ref('test_address__mock_data') }}

    WHERE
        ( 
            address_line_one NOT IN {{ var('mask_data_pattern') }} 
            AND
            address_line_two NOT IN {{ var('mask_data_pattern') }} 
        )
        OR
        (
            address_line_one IS NULL 
            AND
            address_line_two IS NULL
        )
)
// 
// 24-8-65
if {RequestInfo.Report_By} = 'มัทนาวดี' then  'จพ.วิทยา'
else if {RequestInfo.Report_By} = 'สายทิพย์ ' then  'จพ.งาน'
else if {RequestInfo.Report_By} = 'ทนพ.ญ. ญาณิศา0' then  'Laboratory'
else if {RequestInfo.Report_By} = 'เข็มทอง' then  'Laboratory'
else if {RequestInfo.Report_By} = 'ทนพ.ญ.วัลดี  ' then  'Laboratory'
else if {RequestInfo.Report_By} = 'นายขวัญชัย สม0' then  'นักเทคนิคการแพทย์ชำนาญการ'
else if {RequestInfo.Report_By} = 'จริยา อิน' then  'นักเทคนิคการแพทย์'
else if {RequestInfo.Report_By} = 'administrator' then  'Administrator LIS'
insert into analytics.dbt_klee.mock_orders (order_id, status, created_at, updated_at) 
values
    (1, 'delivered', '2020-01-01', '2020-01-04'),
    (2, 'shipped', '2020-01-02', '2020-01-04'),
    (3, 'shipped', '2020-01-03', '2020-01-04'),
    (4, 'processed', '2020-01-04', '2020-01-04');
create or replace transient table analytics.dbt_klee.mock_orders(
    order_id integer,
    status varchar(100),
    created_at date,
    updated_at date
)
MINUTE( DATETIME(1970,1,1,0,
 CASE 
  WHEN CAST(Nb of days as NUMBER) <= 1 THEN Hour
  WHEN CAST(Nb of days as NUMBER) >= 180 THEN ((MONTH(Date)-MONTH(PARSE_DATE("%Y%m%d",Min date)))+((YEAR(Date)-YEAR(PARSE_DATE("%Y%m%d",Min date)))*12))  
  WHEN CAST(Nb of days as NUMBER) >= 56 THEN (((YEAR(Date)-1)*52)+IF(WEEK(Date) IN (52,53) AND MONTH(Date) = 1, 0, WEEK(Date)))-(((YEAR(PARSE_DATE("%Y%m%d",Min date))-1)*52)+IF(WEEK(PARSE_DATE("%Y%m%d",Min date)) IN (52,53), 0, WEEK(PARSE_DATE("%Y%m%d",Min date))))
  ELSE DATE_DIFF(Date,PARSE_DATE("%Y%m%d",Min date))
 END
 ,0))
<!-- TeamViewer Logo (generated at https://www.teamviewer.com) -->
<div style="position:relative; width:234px; height:60px;">
    <a href="https://www.teamviewer.com/link/?url=842558&id=1682695569731" style="text-decoration:none;">
        <img src="https://static.teamviewer.com/resources/badges/teamviewer_badge_flat4.png" alt="Download TeamViewer Remote Control" title="Download TeamViewer Remote Control" border="0" width="234" height="60" />

    </a>
</div>
SELECT table_name, column_name, comments
FROM all_col_comments
WHERE owner = 'LLV3'--Schema name
AND (table_name LIKE '%recipe%' OR column_name LIKE '%recipe%' OR comments LIKE '%recipe%')
OR (table_name LIKE '%element%' OR column_name LIKE '%element%' OR comments LIKE '%element%')
OR (table_name LIKE '%RCP%' OR column_name LIKE '%RCP%' OR comments LIKE '%RCP%')
ORDER BY table_name, column_name;


--How to know your SCHEMA
SELECT sys_context('USERENV', 'CURRENT_SCHEMA') as schema_name
FROM dual;
declare @value decimal(10,2)
set @value = (select 
CASE WHEN %CH003% = 0 THEN 0 
WHEN {AGE,YEAR} > 130.0 THEN 0 
WHEN {AGE,YEAR} < 18.0 THEN ((0.41 * {HEIGHT}) / %CH003%) 
WHEN {SEX} = "M" AND %CH003% <= 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -0.411))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "M" AND %CH003% > 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -1.209))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% <= 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -0.329))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% > 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -1.209))) * POWER(0.993, {AGE,YEAR})) ELSE 0 END)

SELECT CASE WHEN {AGE,YEAR} < 18.0 THEN "-"
WHEN @value >= 90.0 THEN "Stage 1"      
WHEN @value >= 60.0 AND @value <= 89.99 THEN "Stage 2"      
WHEN @value >= 45.0 AND @value <= 59.99 THEN "Stage 3a"      
WHEN @value >= 30.0 AND @value <= 44.99 THEN "Stage 3b"      
WHEN @value >= 15.0 AND @value <= 29.99 THEN "Stage 4"      
WHEN @value <  15.0  AND @value >= 0.0 THEN "Stage 5"      
ELSE "" END
declare @value decimal(10,2)
set @value = (select 
CASE WHEN %CH003% = 0 THEN 0 
WHEN {AGE,YEAR} > 130.0 THEN 0 
WHEN {AGE,YEAR} < 18.0 THEN ((0.41 * {HEIGHT}) / %CH003%) 
WHEN {SEX} = "M" AND %CH003% <= 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -0.411))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "M" AND %CH003% > 0.9 THEN ((141 * (POWER((%CH003% / 0.9), -1.209))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% <= 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -0.329))) * POWER(0.993, {AGE,YEAR})) 
WHEN {SEX} = "F" AND %CH003% > 0.7 THEN ((144 * (POWER((%CH003% / 0.7), -1.209))) * POWER(0.993, {AGE,YEAR})) ELSE 0 END)

SELECT CASE WHEN @value >= 90.0 THEN "Stage 1"      
WHEN @value >= 60.0 AND @value <= 89.99 THEN "Stage 2"      
WHEN @value >= 45.0 AND @value <= 59.99 THEN "Stage 3a"      
WHEN @value >= 30.0 AND @value <= 44.99 THEN "Stage 3b"      
WHEN @value >= 15.0 AND @value <= 29.99 THEN "Stage 4"      
WHEN @value <  15.0  AND @value >= 0.0 THEN "Stage 5"      
ELSE "" END
BEGIN
	SET NOCOUNT ON;
	
	--
	declare @LN nvarchar(20);
	declare @Testcode nvarchar(10);
	declare @Data nvarchar(MAX);
	declare @Result nvarchar(MAX);


	select @LN = LN from inserted;
	select @Result = Result from inserted;
	select @Testcode = Result_Test_Code from inserted;

	--

	if (@Testcode = 'UA001')
	BEGIN
	delete tbl_lab_concate_test where LN = @LN
	INSERT INTO tbl_lab_concate_test VALUES (@LN,'','N');   
    END

	if (@Testcode = 'URT203' and @Result is not null) -- Bacreria
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+'Bacteria = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT203'
    END

	if (@Testcode = 'URT203' and @Result is  null) -- Bacreria
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT203'
    END


	if (@Testcode = 'URT200' and @Result is not null) --Hyaline Cast
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Hyaline Cast = '+@Result+'/LPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT200'
    END

	if (@Testcode = 'URT200' and @Result is  null) --Hyaline Cast
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT200'
    END

	
	if (@Testcode = 'URT204' and @Result is not null) -- Calcium Oxalate
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',CaOx monohydrate = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT204'
    END
	
	if (@Testcode = 'URT204' and @Result is null) -- Calcium Oxalate
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT204'
    END 

	if (@Testcode = 'URT205' and @Result is not null) -- CaOx Dihydrate
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',CaOx Dihydrate = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT205'
    END
	
	if (@Testcode = 'URT205' and @Result is null) -- CaOx Dihydrate
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT205'
    END
	
	if (@Testcode = 'URT209' and @Result is not null) -- Triple Phosphate Crystals
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Triple Phosphate Crystals = '+@Result+'/HPF'	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT209'
    END
	
	if (@Testcode = 'URT209' and @Result is null) --Triple Phosphate Crystals
	BEGIN
	
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT209'
    END

	--if (@Testcode = 'URT202' and @Result is not null) -- None Sq.EPI.
	--BEGIN
	
 --   update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',None Sq.EPI. = '+@Result+'/HPF'	where LN = @LN
	--update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT202'
 --   END
	
	--if (@Testcode = 'URT202' and @Result is null) --None Sq.EPI.
	--BEGIN
	
	--update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT202'
 --   END

	if (@Testcode = 'URT201' and @Result is not null and @Result <> '0') -- PAT
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',PAT = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT201'
    END

	if (@Testcode = 'URT201' and @Result is null) -- PAT
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT201'
    END



	if (@Testcode = 'URT208' and @Result is not null) -- Mucous Thread
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Mucous = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT208'
    END

	if (@Testcode = 'URT208' and @Result is null) -- Mucous Thread
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT208'
    END

	if (@Testcode = 'URT071' and @Result is not null) -- Amorphous
	BEGIN
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Amorphous = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT071'
    END

	if (@Testcode = 'URT071' and @Result is null) -- Amorphous
	BEGIN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT071'
    END

	if (@Testcode = 'URT207' and @Result is not null) -- Yeast cell
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Yeast cell = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT207'
    END

	if (@Testcode = 'URT207' and @Result is null) -- Yeast cell
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT207'
    END

	if (@Testcode = 'URT230' and @Result is not null) -- Uric acid Crytal
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Uric acid Crytal = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT230'
    END

	if (@Testcode = 'URT230' and @Result is null) -- Uric acid Crytal
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT230'
    END

	if (@Testcode = 'URT231' and @Result is not null) -- Spermatozao
	BEGIN
	
    update 	tbl_lab_concate_test set LN = @LN,Temp_Data = Temp_Data+',Uric acid Crytal = '+@Result	where LN = @LN
	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT231'
    END

	if (@Testcode = 'URT231' and @Result is null) -- Spermatozao
	BEGIN

	update 	tbl_lab_information_ExportToHIS set CanDelete = 'Y'	where LN = @LN and Result_Test_Code = 'URT231'
    END

	if (@Testcode = 'URT500') -- Other
	BEGIN
	 if (@Result = '-')
	 update B set B.Result = A.Temp_Data
	 from tbl_lab_concate_test A, tbl_lab_information_ExportToHIS B
	 where A.LN = B.LN and B.Result_Test_Code = 'URT500'

	 if (@Result <> '-')
	 update B set B.Result = A.Temp_Data+','+@Result
	 from tbl_lab_concate_test A, tbl_lab_information_ExportToHIS B
	 where A.LN = B.LN and B.Result_Test_Code = 'URT500'

    update 	tbl_lab_concate_test set Inactive = 'Y'	where LN = @LN

    END
END
load data infile '/home/cloudera/Desktop/shared_local/flipkart_dataset.csv'
into table flipkart_tab
fields terminated by ','
enclosed by '"' LINES
TERMINATED BY '\n' (category_1,category_2,category_3,title,product_rating,selling_price,mrp,seller_name,seller_rating,description,highlights,image_links);
import sqlite3
JList = []

class MyJournal:
    
    def __init__(self,id,name):
        self.id = id
        self.name = name
        
    def description(self):
        return "Journal number: " + str(self.id) + " has the name " + self.name

conn = sqlite3.connect('academic_papers_populated.db')

cursor = conn.cursor()


for row in cursor.execute('''SELECT * FROM Journal;'''):

    JList.append(MyJournal(row[0],row[1]))

cursor.close()
import sqlite3
conn = sqlite3.connect('academic_papers_populated.db')

def apaReference(ArticleId):
    SQL = '''SELECT AuthorNumber, LastName, Initials, Year, Title, Name  
             FROM Article as p, Journal as j, Author as a, Article_Authors as b
             WHERE p.JournalID = j.JournalID
             AND p.ArticleID = b.ArticleID
             AND b.AuthorID = a.AuthorID
             AND p.Articleid = :id
             ORDER BY AuthorNumber;'''

    cursor = conn.cursor()
    record = cursor.execute(SQL,{'id':ArticleId}).fetchall()
    cursor.close()
    if len(record) ==0:
        raise Exception("Invalid Article")
    else:
        ref = ''
        count = 0
        for row in record:
            ref = ref + row[1]+', '+row[2]
            count += 1
            if count < len(record):
                if count + 1 < len(record):
                    ref = ref +', '
                else:
                    ref = ref +', & '
        ref = ref + ' (' + str(record[0][3]) + ') '+ record[0][4]+ '. ' +record[0][5]+'.'
        return ref
import sqlite3

# Create a connection to the database
conn = sqlite3.connect('Northwind2020.db')

# Create a cursor object to execute SQL queries
cursor = conn.cursor()

# Retrieve the number of unique suppliers that have discontinued products
query = '''
SELECT COUNT(DISTINCT SupplierId)
FROM Product
WHERE IsDiscontinued = 1
'''

cursor.execute(query)
num_discontinued_suppliers = cursor.fetchone()[0]

# Display an appropriate message based on the number of discontinued suppliers
if num_discontinued_suppliers >= 20:
    print("20 or more suppliers have discontinued products.")
elif num_discontinued_suppliers <= 0:
    print("All products are available.")
else:
    print("Less than 20 suppliers have discontinued products.")

# Close the database connection
conn.close()




import sqlite3
conn = sqlite3.connect('Northwind2020.db')
SQL = '''SELECT COUNT(*)As NoMore
         FROM
         (SELECT Distinct(S.Id)
         FROM Supplier S, Product P
         WHERE S.Id == P.SupplierId
         AND P.IsDiscontinued = 1
         GROUP BY S.CompanyName) '''

cursor = conn.cursor()
cursor.execute(SQL)
answer = cursor.fetchone()

if answer[0]!= None:
    if int((answer[0]) >= 20):
        print("More than 20 suppliers have discontinued products.")
    else:
        print("Less than 20 suppliers have discontinued products.")
else:
    print("All products are available.")
    
cursor.close()
from datetime import datetime

currentyear = 2021
currentmonth = 2
currentday = 10

def validate_age(DOB):
    current_date = datetime(currentyear, currentmonth, currentday)
    day, month, year = map(int, DOB.split('-'))
    dob_date = datetime(year, month, day)
    age_in_years = current_date.year - dob_date.year - ((current_date.month, current_date.day) < (dob_date.month, dob_date.day))
    return age_in_years



currentyear = 2021
currentmonth = 2
currentday = 10

def validate_age(DOB):
    #your code goes here
    b_year = int(DOB[-4:])
    b_month = int(DOB[-7:-5])
    b_day = int(DOB[:2])
    
    age = currentyear - b_year
    if b_month > currentmonth:
        age -= 1
    elif b_month == currentmonth:
        if b_day > currentday:
            age -= 1
    
    return int(age)
import sqlite3

# Connect to the database
conn = sqlite3.connect('BookCollection.db')

# Define the SQL query
query = '''
SELECT Authors.Surname, Authors.Name, Books.Title
FROM Authors
INNER JOIN Books ON Authors.AuthorID = Books.AuthorID
ORDER BY Authors.Surname, Books.Title
'''

# Execute the query and print the results
cursor = conn.cursor()
for row in cursor.execute(query):
    print(row)

# Close the database connection
conn.close()


import sqlite3
conn = sqlite3.connect('BookCollection.db')

cursor = conn.cursor()
for row in cursor.execute('''SELECT Surname, Name, Title
                             FROM Authors, Books
                             WHERE Authors.AuthorID = Books.AuthorID
                             ORDER BY Surname, Name, Title;'''):
    print(row)
cursor.close()
SQL = '''SELECT O.OrderNumber, O.OrderDate, TotalAmount, O.CustomerID
         FROM [Order] O, Customer C
         WHERE O.CustomerID = C.Id
         AND C.Id = :custID'''

for row in cursor.execute(SQL, {'custID':inputid}):
    print(row)
cursor.close()

import sqlite3
conn = sqlite3.connect('Northwind2020.db')

def CustOrderCountDetails(CustomerID):
    cursor = conn.cursor()
    cursor.execute("SELECT FirstName, LastName, COUNT(*) FROM Customer JOIN [Order] ON Customer.Id = [Order].CustomerId WHERE Customer.Id = ? GROUP BY Customer.Id", (CustomerID,))
    result = cursor.fetchone()
    conn.close()
    return f"Customer {result[0]} {result[1]} had a total of {result[2]} orders"





import sqlite3
conn = sqlite3.connect('Northwind2020.db')
    
def CustOrderCountDetails(CustomerID):
    # Your code goes here
    cursor = conn.cursor()

    SQL = '''SELECT FirstName, lastname, COUNT([Order].Id) as OrderCount
    From Customer, [Order]
    WHERE customer.id = CustomerId
    and customer.id = :Cid
    GROUP BY FirstName, LastName'''


    result = cursor.execute(SQL,{'Cid': CustomerID}).fetchone()
    return "Customer " + result[0] + " " + result[1] + " had a total of " + str(result[2]) + " orders"

    cursor.close()
SELECT
	tbl_lab_setup_Test.[Test_Code],
	tbl_lab_setup_Test.Test_Name,
	[Sex_Description],
	[Age_Text],
	[Test_Reference_Text],
	[Unit_Title],
	[Test_Reference_Cri1],
	[Test_Reference_Cri2]
FROM
	[LAB_DB].[dbo].[view_lab_setup_Test_Numeric_Reference]
JOIN tbl_lab_setup_Test ON [view_lab_setup_Test_Numeric_Reference].Test_Code = tbl_lab_setup_Test.Test_Code
WHERE
	Test_Reference_Id IS NOT NULL
AND tbl_lab_setup_Test.Test_Inactive = '0'
ORDER BY
	tbl_lab_setup_Test.Test_Code,
	Test_Reference_Id
SELECT
    planname,
    '{"c":"baz"}'::JSON,
    json_build_object(planname,1,'bar',2)
FROM bplaene
LIMIT 10
;

-- Complex nested JSON with arrays
SELECT json_build_object(
               'trans_id', t.trans_id,
               'user_data', json_build_object(
                       'user_id', t.user_id,
                       'user_username', t.user_username,
                       'user_full_name', t.user_full_name,
                       'user_street', t.user_street,
                       'user_postal_code', t.user_postal_code,
                       'user_additional_info', t.user_additional_info,
                       'user_country', t.user_country,
                       'user_vat_number', t.user_vat_number),
               'order_data', json_build_object(
                       'order_date', t.order_date,
                       'order_sum', t.order_sum,
                       'order_vat', t.order_vat,
                       'order_invoice_nr', t.order_invoice_nr
                   ),
               'locations',
               (SELECT json_agg(row_to_json(locations))
                FROM (SELECT l.address,
                             l.project_title,
                             (SELECT json_agg(row_to_json(f))
                              FROM (SELECT layername,
                                           data
                                    FROM sales.features) f) features

                      FROM sales.locations l) locations)
           ) transaction
FROM sales.transactions t
SELECT 
    req.session_id
    , req.total_elapsed_time AS duration_ms
    , req.cpu_time AS cpu_time_ms
    , req.total_elapsed_time - req.cpu_time AS wait_time
    , req.logical_reads
    , REPLACE (REPLACE (SUBSTRING (ST.text, (req.statement_start_offset/2) + 1, 
       ((CASE statement_end_offset
           WHEN -1
           THEN DATALENGTH(ST.text)  
           ELSE req.statement_end_offset
         END - req.statement_start_offset)/2) + 1) , CHAR(10), ' '), CHAR(13), ' ')  AS statement_text  
FROM sys.dm_exec_requests AS req
    CROSS APPLY sys.dm_exec_sql_text(req.sql_handle) AS ST
ORDER BY total_elapsed_time DESC;
with pg_tx as (
SELECT
  account_id,
  sum(value.amount)/100 as tx_amount
FROM `reby-cloud.analytics_reby_v1_eu.pg_transactions_json`
where type like '%top_up'
and date(created_at) >= date_sub(current_date, INTERVAL 6 MONTH)
group by 1
),

account as (
  select
    acc.id,
    acc.balance__amount/100 as balance_amount,
    acc.updated_at,
    tx.account_id,
    tx.tx_amount
  from `reby-cloud.analytics_reby_v1_eu.pg_account` acc
  left join pg_tx tx on acc.id = tx.account_id

),
join_user as (
select
  acc.*,
  cu.user_id,
  cu.company_id
from account acc
  left join `reby-cloud.reby_marketing_eu.pg_company_user` cu
  on acc.id = cu.account_id
where acc.account_id is not null
and acc.balance_amount > 0
and cu.company_id = 'c_3qteetfhxjshx4j54111'
),

join_sa as (
select
  u.*,
  fmr.service_area
from join_user u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` fmr
  on u.user_id = fmr.user_id
)

select service_area, sum(balance_amount) as amount from join_sa group by 1
;
SELECT SalesPerson, [Oranges] AS Oranges, [Pickles] AS Pickles
FROM 
   ( SELECT SalesPerson, Product, SalesAmount
     FROM ProductSales 
   ) ps
PIVOT
   ( SUM (SalesAmount)
     FOR Product IN ( [Oranges], [Pickles])
   ) AS pvt
UPDATE [LAB_DB_DES].[dbo].[tbl_lab_setup_Test]
SET [Specimen_Type_Code] ='98'

--SELECT* FROM tbl_lab_setup_Test
WHERE
	([Test_His_Code] IN ('i39','i40','i44','i46','i50','i51','i55','i58','i61','i63','i65','i71','i72','i73','i93','i102','i622','i623','i626','i637','i638','i652','i656','i657','i658','i659','i668','i669','i670','i671','i163','i676','i683','i686','i437','i182','i184','i440','i441','i703','i455','i502','i505','i507','i509'))
UPDATE [LAB_DB_DES].[dbo].[tbl_lab_setup_Test]
SET [Specimen_Type_Code] ='98'

--SELECT* FROM tbl_lab_setup_Test
WHERE
	([Test_His_Code] IN ('i39','i40','i44','i46','i50','i51','i55','i58','i61','i63','i65','i71','i72','i73','i93','i102','i622','i623','i626','i637','i638','i652','i656','i657','i658','i659','i668','i669','i670','i671','i163','i676','i683','i686','i437','i182','i184','i440','i441','i703','i455','i502','i505','i507','i509'))
/*use "sa" user*/

/*backup*/
BACKUP DATABASE [servicedesk] TO DISK = N'/var/opt/mssql/data/SDDB20230221.BAK';

/*restore*/
ALTER DATABASE [servicedesk] SET OFFLINE WITH ROLLBACK IMMEDIATE;
RESTORE DATABASE [servicedesk] FROM DISK = N'/var/opt/mssql/data/SDDB20230221.BAK' WITH REPLACE;

/*go online after restore*/
ALTER DATABASE [servicedesk] SET ONLINE;
CREATE TABLE Employee
(
	ID				INT,
	FirstName		VARCHAR(50),
	LastName		VARCHAR(50),
	DepartmentID	INT,
	ReportToID		INT
)
CREATE Table Departments
(
	ID				INT,
	DepartmentName	VARCHAR(50)
)
INSERT INTO Employee VALUES (1, 'F1', 'L1', 1, NULL);
INSERT INTO Employee VALUES (2, 'F2', 'L2', 2, 1);
INSERT INTO Employee VALUES (3, 'F3', 'L3', 1, 2);
INSERT INTO Employee VALUES (4, 'F4', 'L4', 2, 3);
INSERT INTO Employee VALUES (5, 'F5', 'L5', 1, NULL);
INSERT INTO Employee VALUES (6, 'F6', 'L6', 2, 1);
INSERT INTO Employee VALUES (7, 'F7', 'L7', 1, 2);
INSERT INTO Departments VALUES ( 1 , 'D1');
INSERT INTO Departments VALUES ( 2 , 'D2');
INSERT INTO Departments VALUES ( 3 , 'D3');
SELECT *
FROM Employee
SELECT *
FROM Departments
;WITH cte AS
(
SELECT ID,firstname,lastname,departmentid,reporttoid
FROM Employee WHERE FirstName='F2' -- Change this
UNION ALL
SELECT a.ID,a.firstname,a.lastname,a.departmentid,a.reporttoid
FROM Employee a
INNER JOIN cte b ON a.ReportToID=b.Id
),cte1 AS 
(
SELECT CTE.iD AS EmployeeId
	,e.Id AS ManagerId
	,cte.FirstName AS EmployeeFirstName
	,cte.LastName AS EmployeeLastName
	,e.FirstName AS ManagerFirstName
	,e.LastName AS ManagerLastName
	,cte.DepartmentId  AS EmpDepId--Emp
	,e.DepartmentId AS MgrDepId-- Mgr
FROM cte
	CROSS APPLY Employee e
WHERE cte.Reporttoid=e.ID
),cte2 AS 
(
SELECT cte.*
	,d1.DepartmentName AS EmployeeDepartmentName
FROM cte1 AS cte
	LEFT JOIN Departments AS d1
		ON cte.EmpDepId=d1.ID
)
SELECT cte2.EmployeeId
	,cte2.ManagerId
	,cte2.EmployeeFirstName
	,cte2.EmployeeLastName
	,cte2.ManagerFirstName
	,cte2.ManagerLastName
	,cte2.EmployeeDepartmentName
	,d2.DepartmentName AS ManagerDepartmentName
FROM cte2		
	LEFT JOIN Departments AS d2
		ON cte2.MgrDepId=d2.Id
<div style ="overflow-x: scroll;overflow-y: scroll;height:500px;">
CASE WHEN "%HIV1%" = "Non-Reactive" 
       and "%HIV2%" <> "Reactive" 
       and "%HIV3%" <> "Reactive" then "Negative" 
     WHEN "%HIV1%" = "Reactive" 
       and "%HIV2%" <> "Non-Reactive" 
       and "%HIV3%" <> "Non-Reactive" then "Positive" 
	else CASE WHEN "%HIV2%" = "Reactive" and "%HIV3%" = "Reactive" then "Positive" 
		else "Inconclusive" 
		end 
end
/*
https://codebeautify.org/sqlformatter/y23e67982
*\
//var ig$ = apex.region('uiCount').widget();
var gridView = apex.region('uiCount').call('getViews').grid;

if ($v('P111_ITYPE') === 'DE') {  //Hide columns when DE type is selected

    gridView.view$.grid('hideColumn', 'EXPIRYDT');
    gridView.view$.grid('hideColumn', 'PUOM');
    gridView.view$.grid('hideColumn', 'PUOMDES');
    gridView.view$.grid('hideColumn', 'COUNT_QTY_PUOM');
    gridView.view$.grid('hideColumn', 'SUOM');
    gridView.view$.grid('hideColumn', 'SUOMDES');
    gridView.view$.grid('showColumn', 'MENUCODE');
    gridView.view$.grid('showColumn', 'MNU_DESCRIPTION');
    gridView.view$.grid('showColumn', 'DESCRIPTION');

}else{

    gridView.view$.grid('showColumn', 'EXPIRYDT');
    gridView.view$.grid('showColumn', 'PUOM');
    gridView.view$.grid('showColumn', 'PUOMDES');
    gridView.view$.grid('showColumn', 'COUNT_QTY_PUOM');
    gridView.view$.grid('showColumn', 'SUOM');
    gridView.view$.grid('showColumn', 'SUOMDES');
    gridView.view$.grid('hideColumn', 'MENUCODE');
    gridView.view$.grid('hideColumn', 'MNU_DESCRIPTION');
    gridView.view$.grid('hideColumn', 'DESCRIPTION');

}
public class NetworkSharedDrive
    {
        #region Consts
        const int RESOURCE_CONNECTED = 0x00000001;
        const int RESOURCE_GLOBALNET = 0x00000002;
        const int RESOURCE_REMEMBERED = 0x00000003;

        const int RESOURCETYPE_ANY = 0x00000000;
        const int RESOURCETYPE_DISK = 0x00000001;
        const int RESOURCETYPE_PRINT = 0x00000002;

        const int RESOURCEDISPLAYTYPE_GENERIC = 0x00000000;
        const int RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001;
        const int RESOURCEDISPLAYTYPE_SERVER = 0x00000002;
        const int RESOURCEDISPLAYTYPE_SHARE = 0x00000003;
        const int RESOURCEDISPLAYTYPE_FILE = 0x00000004;
        const int RESOURCEDISPLAYTYPE_GROUP = 0x00000005;

        const int RESOURCEUSAGE_CONNECTABLE = 0x00000001;
        const int RESOURCEUSAGE_CONTAINER = 0x00000002;


        const int CONNECT_INTERACTIVE = 0x00000008;
        const int CONNECT_PROMPT = 0x00000010;
        const int CONNECT_REDIRECT = 0x00000080;
        const int CONNECT_UPDATE_PROFILE = 0x00000001;
        const int CONNECT_COMMANDLINE = 0x00000800;
        const int CONNECT_CMD_SAVECRED = 0x00001000;

        const int CONNECT_LOCALDRIVE = 0x00000100;
        #endregion

        #region Errors
        const int NO_ERROR = 0;

        const int ERROR_ACCESS_DENIED = 5;
        const int ERROR_ALREADY_ASSIGNED = 85;
        const int ERROR_BAD_DEVICE = 1200;
        const int ERROR_BAD_NET_NAME = 67;
        const int ERROR_BAD_PROVIDER = 1204;
        const int ERROR_CANCELLED = 1223;
        const int ERROR_EXTENDED_ERROR = 1208;
        const int ERROR_INVALID_ADDRESS = 487;
        const int ERROR_INVALID_PARAMETER = 87;
        const int ERROR_INVALID_PASSWORD = 1216;
        const int ERROR_MORE_DATA = 234;
        const int ERROR_NO_MORE_ITEMS = 259;
        const int ERROR_NO_NET_OR_BAD_PATH = 1203;
        const int ERROR_NO_NETWORK = 1222;

        const int ERROR_BAD_PROFILE = 1206;
        const int ERROR_CANNOT_OPEN_PROFILE = 1205;
        const int ERROR_DEVICE_IN_USE = 2404;
        const int ERROR_NOT_CONNECTED = 2250;
        const int ERROR_OPEN_FILES = 2401;

        private struct ErrorClass
        {
            public int num;
            public string message;
            public ErrorClass(int num, string message)
            {
                this.num = num;
                this.message = message;
            }
        }

        private static ErrorClass[] ERROR_LIST = new ErrorClass[] {
        new ErrorClass(ERROR_ACCESS_DENIED, "Error: Access Denied"), 
        new ErrorClass(ERROR_ALREADY_ASSIGNED, "Error: Already Assigned"), 
        new ErrorClass(ERROR_BAD_DEVICE, "Error: Bad Device"), 
        new ErrorClass(ERROR_BAD_NET_NAME, "Error: Bad Net Name"), 
        new ErrorClass(ERROR_BAD_PROVIDER, "Error: Bad Provider"), 
        new ErrorClass(ERROR_CANCELLED, "Error: Cancelled"), 
        new ErrorClass(ERROR_EXTENDED_ERROR, "Error: Extended Error"), 
        new ErrorClass(ERROR_INVALID_ADDRESS, "Error: Invalid Address"), 
        new ErrorClass(ERROR_INVALID_PARAMETER, "Error: Invalid Parameter"), 
        new ErrorClass(ERROR_INVALID_PASSWORD, "Error: Invalid Password"), 
        new ErrorClass(ERROR_MORE_DATA, "Error: More Data"), 
        new ErrorClass(ERROR_NO_MORE_ITEMS, "Error: No More Items"), 
        new ErrorClass(ERROR_NO_NET_OR_BAD_PATH, "Error: No Net Or Bad Path"), 
        new ErrorClass(ERROR_NO_NETWORK, "Error: No Network"), 
        new ErrorClass(ERROR_BAD_PROFILE, "Error: Bad Profile"), 
        new ErrorClass(ERROR_CANNOT_OPEN_PROFILE, "Error: Cannot Open Profile"), 
        new ErrorClass(ERROR_DEVICE_IN_USE, "Error: Device In Use"), 
        new ErrorClass(ERROR_EXTENDED_ERROR, "Error: Extended Error"), 
        new ErrorClass(ERROR_NOT_CONNECTED, "Error: Not Connected"), 
        new ErrorClass(ERROR_OPEN_FILES, "Error: Open Files"), 
    };

        private static string getErrorForNumber(int errNum)
        {
            foreach (ErrorClass er in ERROR_LIST)
            {
                if (er.num == errNum) return er.message;
            }
            return "Error: Unknown, " + errNum;
        }
        #endregion

        [DllImport("Mpr.dll")]
        private static extern int WNetUseConnection(
            IntPtr hwndOwner,
            NETRESOURCE lpNetResource,
            string lpPassword,
            string lpUserID,
            int dwFlags,
            string lpAccessName,
            string lpBufferSize,
            string lpResult
            );

        [DllImport("Mpr.dll")]
        private static extern int WNetCancelConnection2(
            string lpName,
            int dwFlags,
            bool fForce
            );

        [StructLayout(LayoutKind.Sequential)]
        private class NETRESOURCE
        {
            public int dwScope = 0;
            public int dwType = 0;
            public int dwDisplayType = 0;
            public int dwUsage = 0;
            public string lpLocalName = "";
            public string lpRemoteName = "";
            public string lpComment = "";
            public string lpProvider = "";
        }


        public static string connectToRemote(string remoteUNC, string username, string password)
        {
            return connectToRemote(remoteUNC, username, password, false);
        }

        public static string connectToRemote(string remoteUNC, string username, string password, bool promptUser)
        {
            NETRESOURCE nr = new NETRESOURCE();
            nr.dwType = RESOURCETYPE_DISK;
            nr.lpRemoteName = remoteUNC;

            int ret;
            if (promptUser)
                ret = WNetUseConnection(IntPtr.Zero, nr, "", "", CONNECT_INTERACTIVE | CONNECT_PROMPT, null, null, null);
            else
                ret = WNetUseConnection(IntPtr.Zero, nr, password, username, 0, null, null, null);

            if (ret == NO_ERROR) return null;
            return getErrorForNumber(ret);
        }

        public static string disconnectRemote(string remoteUNC)
        {
            int ret = WNetCancelConnection2(remoteUNC, CONNECT_UPDATE_PROFILE, false);
            if (ret == NO_ERROR) return null;
            return getErrorForNumber(ret);
        }
    }
}
--all times - all museums

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Roman_Baths'
	--or project_name = 'Mauricewood_School'
	--or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Roman_Baths'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
alter table ams_eq_inc_fund_stats enable row movement;
flashback table ams_eq_inc_fund_stats to timestamp to_timestamp('2023-01-31 07:30:00', 'YYYY-MM-DD HH:MI:SS');
function(options) {
    options.defaultGridOptions = {
        columns: [{
            ITYPE: {
                heading: "IType",
                width: 50,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            NAMED: {
                heading: "ItemName",
                width: 200,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            MNU_DESCRIPTION: {
                heading: "Section",
                width: 120,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            PUOM_COST: {
                heading: "Cost",
                width: 50,
                alignment: "end",
                headingAlignment: "end",
                canSort: true
            },
            PUOMD: {
                heading: "PurchaseUnit",
                width: 70,
                alignment: "start",
                headingAlignment: "start",
                canSort: true
            },
            SUOMD: {
                heading: "SalesUnit",
                width: 70,
                alignment: "start",
                headingAlignment: "start",
                canSort: true           
            },
            CONRATIO: {
                heading: "Conratio",
                width: 70,
                alignment: "end",
                headingAlignment: "end",
                canSort: true
            }
        }]

    };

    return options;

}
 EXTRACT (MONTH FROM OPENDT) = 11
 EXTRACT (YEAR FROM OPENDT) = 2022
# remove references to the deprecated value
UPDATE job SET job_status = 'running' WHERE job_status = 'waiting';

# rename the existing type
ALTER TYPE status_enum RENAME TO status_enum_old;

# create the new type
CREATE TYPE status_enum AS ENUM('queued', 'running', 'done');

# update the columns to use the new type
ALTER TABLE job ALTER COLUMN job_status TYPE status_enum USING job_status::text::status_enum;
# if you get an error, see bottom of post

# remove the old type
DROP TYPE status_enum_old;
select 
  tbl_lab_setup_Test.Test_Code
  ,tbl_lab_setup_Test.Test_Name
  ,count(tbl_lab_setup_Test.Test_Name) as Count
  from [tbl_lab_log_Staff_Action]
  join view_lab_information_Request on [tbl_lab_log_Staff_Action].LSA_LN = view_lab_information_Request.IREQ_LN
  join tbl_lab_setup_Test on [tbl_lab_log_Staff_Action].[LSA_TEST_CODE] = tbl_lab_setup_Test.Test_Code
  where [LSA_TYPE] = 'ResRerun'
select tmptable.project_name, tmptable.geometry_name, tmptable.geometry_id, tmptable.diff --, tmptable.lost_for_seconds

from 
(SELECT 	
 	project_name,
	geometry_name, 
 	geometry_id,
 	max(time) as time, 
 	(now()-max(time)) as diff,
 	(extract(epoch from (now()-max(time)))) as lost_for_seconds
 	FROM public.metrics_app_timeandgeometric as a
	join public.metrics_app_geometry as b on a.geometry_id = b.id
	where project_name in ('Lasswade', 
						   'Mauricewood_School',
						   'Leconfield_House',
						   'Roman_Baths'
						   )-- PROJECT_NAME
-- 	and (geometry_name like '%_iaq'
-- 		 or geometry_name like '%_count'
-- 		 or geometry_name like '%_sound')
 	and metric_name not in ('Occupancy', 'Utilisation', 'PlannedOccupancy', 'PlannedUtilisation', 
						   	'peopleCount', 'inCountTotal', 'outCountTotal'
						   )
 	and sensor_id IS NOT NULL
 	and geometry_name not like 'Window%' 
 	and geometry_name not like 'Desk%'
 	--and value != 0
	group by project_name, geometry_name, geometry_id
	--order by time desc
) as tmptable

where tmptable.lost_for_seconds > 1*60*60 --HOUR*MINUTES*SECONDS
	--and tmptable.geometry_name not like 'Window%'
order by tmptable.project_name asc, tmptable.lost_for_seconds desc
SELECT COUNT (1)
  FROM pti
 WHERE EXTRACT (MONTH FROM oarslogdt) = 11 
 AND EXTRACT (YEAR FROM oarslogdt) = 2022
Select OrderID = 
    Case OrderID 
        When 1 Then 'Customer1'
        When 2 Or 3 Then 'Customer2'
        Else 'Unknown Customer'
    End 
From OrdersPlaced
SELECT 
    pg_terminate_backend(pid) 
FROM 
    pg_stat_activity 
WHERE 
    -- don't kill my own connection!
    pid <> pg_backend_pid()
    -- don't kill the connections to other databases
    AND datname = 'database_name'
    ;
use LAB_DB_GEE
UPDATE tbl_lab_setup_Test_Control
SET Test_Text_Keyin_Flag = '1';
UPDATE tbl_lab_setup_Test
SET Test_Inactive = '1',
 Test_His_Code = 'XXX',
 Test_Checkup_Code = '' 

DELETE FROM tbl_lab_configuration_Interface
WHERE CFGI_NAME=N''

DELETE FROM  tbl_lab_setup_Source_Group 
WHERE Source_Group_Code=''

DELETE FROM tbl_lab_setup_Source_Definition 
WHERE Source_Definition_Code <>'000000'

DELETE FROM tbl_lab_management_Users 
WHERE ([User_ID]<>N'admin')


UPDATE  tbl_lab_setup_Test_Group
SET Test_Group_Report_Form_Path=N'',
Test_Group_Inactive='1'


UPDATE tbl_lab_setup_Test_Control
SET Test_Default_Value_Flag='0'

UPDATE tbl_lab_setup_Test_Control 
set Test_Text_Keyin_Flag = '1'
 select comments from user_col_comments where table_name = 'PTI' and column_name = 'DISCOUNT'
CREATE FUNCTION dbo.ToDateTime2 ( @Ticks bigint )
  RETURNS datetime2
AS
BEGIN
    DECLARE @DateTime datetime2 = '00010101';
    SET @DateTime = DATEADD( DAY, @Ticks / 864000000000, @DateTime );
    SET @DateTime = DATEADD( SECOND, ( @Ticks % 864000000000) / 10000000, @DateTime );
    RETURN DATEADD( NANOSECOND, ( @Ticks % 10000000 ) * 100, @DateTime );
END
CREATE FUNCTION dbo.ToTicks ( @DateTime datetime2 )
  RETURNS bigint
AS
BEGIN
    DECLARE @Days bigint = DATEDIFF( DAY, '00010101', cast( @DateTime as date ) );
    DECLARE @Seconds bigint = DATEDIFF( SECOND, '00:00', cast( @DateTime as time( 7 ) ) );
    DECLARE @Nanoseconds bigint = DATEPART( NANOSECOND, @DateTime );
    RETURN  @Days * 864000000000 + @Seconds * 10000000 + @Nanoseconds / 100;
END
CREATE FUNCTION dbo.ToTicks ( @DateTime datetime2 )
  RETURNS bigint
AS
BEGIN
 
    RETURN DATEDIFF_BIG( microsecond, '00010101', @DateTime ) * 10 +
           ( DATEPART( NANOSECOND, @DateTime ) % 1000 ) / 100;
END
--working hours - all offices

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Leconfield_House'
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
--tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
tablewithcounts."Day" not like '%Saturday%'
and tablewithcounts."Day" not like '%Sunday%'
and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

and tablewithcounts.time not between '2022-01-01' and '2022-01-01'
and tablewithcounts.time not between '2022-01-02' and '2022-01-02'
and tablewithcounts.time not between '2022-01-03' and '2022-01-03'
and tablewithcounts.time not between '2022-01-04' and '2022-01-04'
and tablewithcounts.time not between '2022-03-17' and '2022-03-17'
and tablewithcounts.time not between '2022-04-15' and '2022-04-15'
and tablewithcounts.time not between '2022-04-18' and '2022-04-18'
and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
and tablewithcounts.time not between '2022-06-02' and '2022-06-02'
and tablewithcounts.time not between '2022-06-03' and '2022-06-03'
and tablewithcounts.time not between '2022-07-12' and '2022-07-12'
and tablewithcounts.time not between '2022-08-01' and '2022-08-01'
and tablewithcounts.time not between '2022-08-29' and '2022-08-29'
and tablewithcounts.time not between '2022-11-30' and '2022-11-30'
and tablewithcounts.time not between '2022-12-25' and '2022-12-25'
and tablewithcounts.time not between '2022-12-26' and '2022-12-26'
and tablewithcounts.time not between '2022-12-27' and '2022-12-27'
and tablewithcounts.time not between '2023-01-01' and '2023-01-01'
and tablewithcounts.time not between '2023-01-02' and '2023-01-02'
and tablewithcounts.time not between '2023-01-03' and '2023-01-03'
and tablewithcounts.time not between '2023-03-17' and '2023-03-17'
and tablewithcounts.time not between '2023-04-07' and '2023-04-07'
and tablewithcounts.time not between '2023-04-10' and '2023-04-10'
and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
and tablewithcounts.time not between '2023-05-29' and '2023-05-29'
and tablewithcounts.time not between '2023-07-12' and '2023-07-12'
and tablewithcounts.time not between '2023-08-07' and '2023-08-07'
and tablewithcounts.time not between '2023-08-28' and '2023-08-28'
and tablewithcounts.time not between '2023-11-30' and '2023-11-30'
and tablewithcounts.time not between '2023-12-25' and '2023-12-25'
and tablewithcounts.time not between '2023-12-26' and '2023-12-26'

group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
--all times - all offices

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
SELECT id, project_name, metric_name, "time", value, display_value, geometry_id
	FROM public.metrics_app_timeandgeometric
	where project_name = 'Lasswade'
	and metric_name in ('peopleCount', 'Occupancy', 'Utilisation')
	and geometry_id in (1210, 5127)
	and display_value is NULL
	and time > '2022-12-08'
	order by time, metric_name
 select comments from user_col_comments where table_name = 'PTI' and column_name = 'DISCOUNT'


COMMENT ON COLUMN item.shelf_life
   IS 'the number of days for which an item remains usable, fit for consumption, or saleable.'
SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)) AS color;


UPDATE `frm_staff` SET `profile_colour` = (SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)))
-- INCOUNT - OUTCOUNT

-- 1 - inCount to inCount_1
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCount_1'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCount'




-- 2 - outCount to inCount
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCount'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'outCount'




-- 3 - inCount_1 to outCount
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'outCount'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCount_1'



-- INCOUNTTOTAL - OUTCOUNTTOTAL

-- 4 - inCountTotal to inCountTotal_1
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCountTotal_1'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCountTotal'




-- 5 - outCountTotal to inCountTotal
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'inCountTotal'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'outCountTotal'




-- 6 - inCountTotal_1 to outCountTotal
UPDATE public.metrics_app_timeandgeometric_historic
	SET metric_name= 'outCountTotal'
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'inCountTotal_1'





-- PEOPLECOUNT

-- 7 - peopleCount to peopleCount * -1
UPDATE public.metrics_app_timeandgeometric_historic
	
	--SET metric_name= 'peopleCount'
	SET value = value * -1
	
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'peopleCount'
	



-- 8 - peopleCount to abs(peopleCount)
UPDATE public.metrics_app_timeandgeometric_historic
	
	--SET metric_name= 'peopleCount'
	SET value = abs(value * 1)
	
	WHERE 
	project_name = 'Roman_Baths'
	and geometry_id in (
						2371,
						2375,
						2377,
						2376
						)
	--and time >= '2022-12-02' 
	and time < '2022-12-01'
	and metric_name = 'peopleCount'
	and value = 0
	

	
	
	
	
-- non-working hours - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 0) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')

-- WEEKENDS
(
(tablewithcounts."Day" in ('Saturday ', 'Sunday   ')
and 
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 24)
)
-- WEEKDAYS (MON-THU)
or
(
(tablewithcounts."Day" not in ('Saturday ', 'Sunday   '))
and 
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 8
or tablewithcounts."Hour" >= 16 and tablewithcounts."Hour" < 24)
)
-- FRIDAYS
or
(
(tablewithcounts."Day" in ('Friday   '))
and
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 8
or tablewithcounts."Hour" >= 12 and tablewithcounts."Hour" < 24)
)
)

-- HOLIDAYS
or
(
(
tablewithcounts.time between '2021-08-01' and '2021-08-18' -- '2021-08-17'
or tablewithcounts.time between '2021-09-20' and '2021-09-21' -- '2021-09-20'
or tablewithcounts.time between '2021-10-18' and '2021-10-26' -- '2021-10-25'
or tablewithcounts.time between '2021-12-23' and '2022-01-11' -- '2022-01-10'
or tablewithcounts.time between '2022-02-14' and '2022-02-19' -- '2022-02-18'
or tablewithcounts.time between '2022-04-11' and '2022-04-23' -- '2022-04-22'
or tablewithcounts.time between '2022-05-02' and '2022-05-03' -- '2022-05-02'
or tablewithcounts.time between '2022-05-23' and '2022-05-24' -- '2022-05-23'
or tablewithcounts.time between '2022-07-01' and '2022-08-01' -- '2022-07-31'
or tablewithcounts.time between '2022-08-01' and '2022-08-17' -- '2022-08-16'
or tablewithcounts.time between '2022-09-16' and '2022-09-20' -- '2022-09-19'
or tablewithcounts.time between '2022-10-17' and '2022-10-25' -- '2022-10-24'
or tablewithcounts.time between '2022-12-21' and '2023-01-05' -- '2023-01-04'
or tablewithcounts.time between '2023-02-13' and '2023-02-18' -- '2023-02-17'
or tablewithcounts.time between '2023-04-03' and '2023-04-15' -- '2023-04-14'
or tablewithcounts.time between '2023-05-01' and '2023-05-02' -- '2023-05-01'
or tablewithcounts.time between '2023-05-22' and '2023-05-23' -- '2023-05-22'
or tablewithcounts.time between '2023-06-29' and '2023-08-01' -- '2023-07-31'
)
and
(tablewithcounts."Hour" >= 0 and tablewithcounts."Hour" < 24)
)


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--order by tablewithcounts.time
--limit 10000
--working hours - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 0) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


where 
--tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
tablewithcounts."Day" not like '%Saturday%'
and tablewithcounts."Day" not like '%Sunday%'
and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
SELECT DISTINCT Category, creationDate
FROM MonitoringJob
ORDER BY CreationDate DESC
CREATE TABLE identity_types (
    transaction_id UUID NOT NULL REFERENCES transaction_doctor(id) ON DELETE CASCADE,
    appointment_id UUID NOT NULL REFERENCES appointment(id) ON DELETE CASCADE,
    PRIMARY KEY (transaction_id, appointment_id),
    additional_attribute_1 TEXT NULL,
    additional_attribute_2 TEXT NULL,
    additional_attribute_3 TEXT NULL,
    additional_attribute_4 TEXT NULL,
    additional_attribute_5 TEXT NULL,
    created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
    updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

CREATE OR REPLACE FUNCTION trigger_set_timestamp()
    RETURNS TRIGGER AS $$
BEGIN
    NEW.updated_at = NOW();
    RETURN NEW;
END;
$$ LANGUAGE plpgsql;

CREATE TRIGGER set_timestamp
    BEFORE UPDATE ON identity_types
    FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
mysql> SELECT CONCAT('#',LPAD(CONV(ROUND(RAND()*16777215),10,16),6,0)) AS color;
+---------+
| color   |
+---------+
| #0E74A9 |
+---------+
1 row in set (0.00 sec)
DROP TABLE EMP
DROP TABLE DEPT
DROP TABLE BONUS
DROP TABLE SALGRADE
DROP TABLE DUMMY

CREATE TABLE EMP
(EMPNO NUMERIC(4) NOT NULL,
ENAME VARCHAR(10),
JOB VARCHAR(9),
MGR NUMERIC(4),
HIREDATE DATETIME,
SAL NUMERIC(7, 2),
COMM NUMERIC(7, 2),
DEPTNO NUMERIC(2))

INSERT INTO EMP VALUES
(7369, 'SMITH', 'CLERK', 7902, '17-DEC-1980', 800, NULL, 20)
INSERT INTO EMP VALUES
(7499, 'ALLEN', 'SALESMAN', 7698, '20-FEB-1981', 1600, 300, 30)
INSERT INTO EMP VALUES
(7521, 'WARD', 'SALESMAN', 7698, '22-FEB-1981', 1250, 500, 30)
INSERT INTO EMP VALUES
(7566, 'JONES', 'MANAGER', 7839, '2-APR-1981', 2975, NULL, 20)
INSERT INTO EMP VALUES
(7654, 'MARTIN', 'SALESMAN', 7698, '28-SEP-1981', 1250, 1400, 30)
INSERT INTO EMP VALUES
(7698, 'BLAKE', 'MANAGER', 7839, '1-MAY-1981', 2850, NULL, 30)
INSERT INTO EMP VALUES
(7782, 'CLARK', 'MANAGER', 7839, '9-JUN-1981', 2450, NULL, 10)
INSERT INTO EMP VALUES
(7788, 'SCOTT', 'ANALYST', 7566, '09-DEC-1982', 3000, NULL, 20)
INSERT INTO EMP VALUES
(7839, 'KING', 'PRESIDENT', NULL, '17-NOV-1981', 5000, NULL, 10)
INSERT INTO EMP VALUES
(7844, 'TURNER', 'SALESMAN', 7698, '8-SEP-1981', 1500, 0, 30)
INSERT INTO EMP VALUES
(7876, 'ADAMS', 'CLERK', 7788, '12-JAN-1983', 1100, NULL, 20)
INSERT INTO EMP VALUES
(7900, 'JAMES', 'CLERK', 7698, '3-DEC-1981', 950, NULL, 30)
INSERT INTO EMP VALUES
(7902, 'FORD', 'ANALYST', 7566, '3-DEC-1981', 3000, NULL, 20)
INSERT INTO EMP VALUES
(7934, 'MILLER', 'CLERK', 7782, '23-JAN-1982', 1300, NULL, 10)

CREATE TABLE DEPT
(DEPTNO NUMERIC(2),
DNAME VARCHAR(14),
LOC VARCHAR(13) )

INSERT INTO DEPT VALUES (10, 'ACCOUNTING', 'NEW YORK')
INSERT INTO DEPT VALUES (20, 'RESEARCH', 'DALLAS')
INSERT INTO DEPT VALUES (30, 'SALES', 'CHICAGO')
INSERT INTO DEPT VALUES (40, 'OPERATIONS', 'BOSTON')

CREATE TABLE BONUS
(ENAME VARCHAR(10),
JOB VARCHAR(9),
SAL NUMERIC,
COMM NUMERIC)

CREATE TABLE SALGRADE
(GRADE NUMERIC,
LOSAL NUMERIC,
HISAL NUMERIC)

INSERT INTO SALGRADE VALUES (1, 700, 1200)
INSERT INTO SALGRADE VALUES (2, 1201, 1400)
INSERT INTO SALGRADE VALUES (3, 1401, 2000)
INSERT INTO SALGRADE VALUES (4, 2001, 3000)
INSERT INTO SALGRADE VALUES (5, 3001, 9999)

CREATE TABLE DUMMY
(DUMMY NUMERIC)

INSERT INTO DUMMY VALUES (0)
---------------------Example 1
SELECT
    hits.page.searchKeyword as searchKeyword,
    COUNT(hits.page.searchKeyword) AS HIT_COUNT,
    COUNT(DISTINCT sessions.fullVisitorId) AS USER_COUNT,
    COUNT(DISTINCT CONCAT(sessions.fullVisitorId, CAST(sessions.visitStartTime AS STRING))) AS SESSION_COUNT
FROM
    `digicce.142980449.ga_sessions_*` sessions,
UNNEST(sessions.hits) as hits
WHERE
    _TABLE_SUFFIX BETWEEN '20200701' AND '20211001'
    -- _TABLE_SUFFIX = '20210401'
    AND hits.page.hostname = "www.pearson.com"
    AND REGEXP_CONTAINS(hits.page.pagePath, r"\/pearsonplus")
    -- AND REGEXP_CONTAINS(hits.page.pagePath, r"^\/(en-us|(store\/(en-us|p)))")
    AND hits.page.searchKeyword is not null
GROUP BY
    hits.page.searchKeyword
ORDER BY
    HIT_COUNT DESC
            
            
---------------------Example 2            
SELECT
    date,
    -- clientId,
    sessions.fullVisitorId AS fullVisitorId,
    CONCAT(sessions.fullVisitorId, CAST(sessions.visitStartTime AS STRING)) as visitIdentifier,
    hits.eventInfo.eventCategory as hitEventCategory,
    hits.eventInfo.eventAction as hitEventAction,
    hits.eventInfo.eventLabel as hitEventLabel,
    REGEXP_EXTRACT(hits.page.pagePath, r"^([^\?]+)") as pagePath,
    hits.page.searchKeyword as searchKeyword,
    hits.type as hitType,
    hits.hitNumber as hitNumber
    -- hits.product AS product,
    -- hits.transaction AS transaction,
    -- (SELECT MAX(cd.value) FROM UNNEST(hits.customDimensions) cd WHERE cd.index=13) as pageCategory
    -- (SELECT MAX(cd.value) FROM UNNEST(hits.customDimensions) cd WHERE cd.index=26) as countryLocale
    -- sessions.channelGrouping AS channelGrouping
FROM
    `digicce.142980449.ga_sessions_*` sessions,
    UNNEST(sessions.hits) as hits
WHERE
    sessions.totals.visits > 0
    -- AND _TABLE_SUFFIX BETWEEN '20210101' AND '20210419'
    AND _TABLE_SUFFIX = '20210401'
    AND hits.page.hostname = "www.pearson.com"
    AND REGEXP_CONTAINS(hits.page.pagePath, r"^\/(en-us|(store\/(en-us|p)))")
    AND (
        (
            hits.type = "PAGE"
        ) OR (
            hits.type = "EVENT" 
            AND hits.eventInfo.eventCategory IN ('ecommerce')
            AND hits.eventInfo.eventAction IN ('purchase')
        )
    )
ORDER BY
    fullVisitorId,
    visitIdentifier,
    hitNumber
--all times - all schools

select 
	
	--distinct tablewithcounts.project_name
	tablewithcounts.freq_val, count(tablewithcounts.id)
	

--tablewithcounts.id, tablewithcounts.project_name, tablewithcounts.metric_name, tablewithcounts."time", tablewithcounts."Day", tablewithcounts."Hour", tablewithcounts.value, tablewithcounts.freq_val, tablewithcounts.display_value --, geometry_id

from
(
SELECT
	id, project_name, metric_name, "time", value, display_value, geometry_id,
	round(cast(value as numeric), 1) as freq_val,
	extract(hour from "time") as "Hour",
	to_char("time", 'Day') as "Day"
	FROM public.metrics_app_timeandgeometric_historic
	--where project_name in ('Lasswade')
	where 
	(
	project_name = 'Lasswade'
	or project_name = 'Mauricewood_School'
	or project_name = 'Eaglesham_Primaryschool'
	or
	project_name in (
			'Lasswade'
			--'Roman_Baths',
			'Mauricewood_School',
			--'Leconfield_House',
			'Inverkeithing_Nursery',
			'Windmill',
			'Dunfermline_Highschool',
			'Kinross_Primaryschool',
			'Pitlochry_Highschool',
			'Tulloch_Primaryschool',
			'Viewlands_Primaryschool',
			'Busby_Nursery',
			'Carlibar_Primaryschool',
			'Eaglesham_Elc',
			'Eaglesham_Primaryschool',
			'Glen_Family_Centre',
			'Mearns_Castle_Highschool',
			'Williamwood_Highschool',
			'Glenwood_Family_Centre',
			'Isobel_Mair',
			'Maidenhill_Primaryschool',
			'Olm_Primaryschool',
			'Thornliebank_Primaryschool',
			'St_Lukes_Primaryschool',
			'Abroath_Academy',
			'Abroath_Highschool',
			'Eassie_Primaryschool',
			'Maisondue_Primaryschool',
			'Northmuir_Primaryschool',
			'St_Margarets_Primaryschool',
			'Strathmartine_Primaryschool',
			'Tealing_Primaryschool',
			'Websters_Highschool'
			)
	)
	and metric_name = 'temp'
	and (value >= 0 and value < 50)
	and display_value is NULL
) as tablewithcounts	


-- where 
-- --tablewithcounts."Day" not in ('Saturday ', 'Sunday   ')
-- tablewithcounts."Day" not like '%Saturday%'
-- and tablewithcounts."Day" not like '%Sunday%'
-- and tablewithcounts."Hour" >= 8 and tablewithcounts."Hour" < 16

-- and tablewithcounts.time not between '2021-08-01' and '2021-08-17'
-- and tablewithcounts.time not between '2021-09-20' and '2021-09-20'
-- and tablewithcounts.time not between '2021-10-18' and '2021-10-25'
-- and tablewithcounts.time not between '2021-12-23' and '2022-01-10'
-- and tablewithcounts.time not between '2022-02-14' and '2022-02-18'
-- and tablewithcounts.time not between '2022-04-11' and '2022-04-22'
-- and tablewithcounts.time not between '2022-05-02' and '2022-05-02'
-- and tablewithcounts.time not between '2022-05-23' and '2022-05-23'
-- and tablewithcounts.time not between '2022-07-01' and '2022-07-31'
-- and tablewithcounts.time not between '2022-08-01' and '2022-08-16'
-- and tablewithcounts.time not between '2022-09-16' and '2022-09-19'
-- and tablewithcounts.time not between '2022-10-17' and '2022-10-24'
-- and tablewithcounts.time not between '2022-12-21' and '2023-01-04'
-- and tablewithcounts.time not between '2023-02-13' and '2023-02-17'
-- and tablewithcounts.time not between '2023-04-03' and '2023-04-14'
-- and tablewithcounts.time not between '2023-05-01' and '2023-05-01'
-- and tablewithcounts.time not between '2023-05-22' and '2023-05-22'
-- and tablewithcounts.time not between '2023-06-29' and '2023-07-31'


group by tablewithcounts.freq_val
order by tablewithcounts.freq_val

--limit 1000
CASE WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "-" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Reactive" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "-" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "Reactive" and "%IM095%" = "-" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "-" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Reactive" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Reactive" and "%IM0952%" = "Reactive" and "%IM095%" = "Reactive" THEN "Positive" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "-" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Non-reactive" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "-" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "Non-reactive" and "%IM095%" = "-" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "-" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "-" and "%IM0952%" = "Non-reactive" and "%IM095%" = "Non-reactive" THEN "Negative" 
WHEN "%IM0951%" = "Non-reactive" and "%IM0952%" = "Non-reactive" and "%IM095%" = "Non-reactive" THEN "Negative" 
ELSE "Inconclusive" END
SELECT
	lab_items_sub_group_list.lab_items_sub_group_code,
	lab_items_sub_group.lab_items_sub_group_name,
	lab_items.ecode,
	lab_items.lab_items_name,
	lab_items.lab_items_code,
	lab_items.display_order,
	lab_items.lab_items_normal_value,
	lab_items.possible_value,
	lab_items.lab_items_unit
FROM
	lab_items_sub_group_list
	INNER JOIN
	lab_items
	ON
		lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
	INNER JOIN
	lab_items_sub_group
	ON
		lab_items_sub_group_list.lab_items_sub_group_code = lab_items_sub_group.lab_items_sub_group_code
WHERE
	#.lab_items_sub_group_code IN (#'248','249','250')
	lab_items_sub_group_list.lab_items_sub_group_code ='250'
ORDER BY
	lab_items_sub_group_code ASC,
	lab_items.display_order ASC

/*
SELECT
	lab_items_sub_group_list.lab_items_sub_group_code,
	lab_items_sub_group.lab_items_sub_group_name,
	lab_items.ecode,
	lab_items.lab_items_name,
	lab_items.lab_items_code,
	lab_items.display_order,
	lab_items.lab_items_normal_value,
	lab_items.possible_value,
	lab_items.lab_items_unit
FROM
	lab_items_sub_group_list
	INNER JOIN
	lab_items
	ON
		lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
	INNER JOIN
	lab_items_sub_group
	ON
		lab_items_sub_group_list.lab_items_sub_group_code = lab_items_sub_group.lab_items_sub_group_code
WHERE
	lab_items_sub_group_list.lab_items_sub_group_code
	in(
	'247'
			)
ORDER BY
	lab_items_sub_group_code ASC,
	lab_items.display_order ASC

*/
/*
SELECT
lab_items_sub_group_list.lab_items_code,
lab_items.lab_items_code,
lab_items_sub_group_list.lab_items_sub_group_code,
lab_items.lab_items_name,
lab_items.ecode,
lab_items.display_order
FROM
lab_items_sub_group_list
INNER JOIN lab_items ON lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
WHERE
lab_items_sub_group_list.lab_items_sub_group_code = ---80
ORDER BY
lab_items.display_order ASC
*/
SELECT
lab_items_sub_group_list.lab_items_code,
lab_items.lab_items_code,
lab_items_sub_group_list.lab_items_sub_group_code,
lab_items.lab_items_name,
lab_items.ecode,
lab_items.display_order
FROM
lab_items_sub_group_list
INNER JOIN lab_items ON lab_items.lab_items_code = lab_items_sub_group_list.lab_items_code
WHERE
lab_items_sub_group_list.lab_items_sub_group_code = ---80
ORDER BY
lab_items.display_order ASC
CASE
WHEN "%IM0951%" = "Reactive" THEN
    "Positive"
    OR
WHEN "%IM0952%" = "Reactive"THEN
    "Positive"
    OR
WHEN "%IM095%" = "Reactive" THEN
    "Positive"
WHEN "%IM0951%" = "Non-reactive" THEN
    "Negative"
    OR
WHEN "%IM0952%" = "Non-reactive"THEN
    "Negative"
    OR
WHEN "%IM095%" = "Non-reactive" THEN
    "Negative"
ELSE
    "Inconclusive"
END
CASE  WHEN %HM014% >= 0 AND %HM014% < 50.0 THEN "Markedly decreased"  WHEN %HM014% >= 50.0 AND %HM014% <= 100.0 THEN "Decreased"      WHEN %HM014% >= 100.001 AND %HM014% <= 150.0 THEN "Slightly decreased"      WHEN %HM014% >= 150.001 AND %HM014% <= 450.0 THEN "Adequate"      WHEN %HM014% >= 450.001 AND %HM014% <= 500.0 THEN "Slightly increased"          WHEN %HM014% >  500.001 THEN "Markedly increase"      ELSE "N/A" END
$post = Post::find(1);
$newPost = $post->replicate();
$newPost->created_at = Carbon::now();
$newPost->save();
SELECT ct.ID, 
       ISNULL(NULLIF(ct.LaunchDate, ''), null) [LaunchDate]
FROM   [dbo].[CustomerTable] ct
select @var1 = avg(someColumn), @var2 = avg(otherColumn) 
from theTable
UPDATE 
    t1
SET 
    t1.c1 = t2.c2,
    t1.c2 = expression,
    ...   
FROM 
    t1
    [INNER | LEFT] JOIN t2 ON join_predicate
WHERE 
    where_predicate;
create or replace trigger "AL_ITEM_UI"
AFTER
INSERT OR UPDATE on AL
FOR EACH ROW

BEGIN
 IF INSERTING THEN
      INSERT INTO ITEM   ( itemid,
                            itype,
                            named,
                            price,
                            puom,
                            suom,
                            puomdes,
                            suomdes,
                            sources,
                            component,
                            supplier,
                            parentid,
                            section,
                            inactive,
                            notes,
                            rcp_recalc,
                            conratio,
                            rtsb,
                            currency,
                            suom_convert,
                            cost_method
							)
							
   VALUES 				 (  :NEW.ID
						   , :NEW.itype
						   , :NEW.NAME
						   , :NEW.PRICE
						   , :NEW.PUOM
						   , :NEW.SUOM
						   , :NEW.PUOMDES
						   , :NEW.SUOMDES
						   , :NEW.SOURCER
						   , :NEW.COMPONENT
						   , :NEW.SUPPLIER
						   , :NEW.FROMID
						   , :NEW.SECTIONC
						   , :NEW.INACTIVE
						   , :NEW.NOTES
						   , :NEW.RCP_RECALC
						   , :NEW.CONRATIO
						   , :NEW.RTSB
						   , :NEW.CURRENCY
                           , :NEW.SUOMCONV
                           , :NEW.COST_METHOD
					       );
 ELSIF UPDATING THEN
      UPDATE ITEM
         SET               
                            itype=		:NEW.itype,
                            named=		:NEW.NAME,
                            price=		:NEW.PRICE,
                            puom=		:NEW.PUOM,
                            suom=		:NEW.SUOM,
                            puomdes=	:NEW.PUOMDES,
                            suomdes=	:NEW.SUOMDES,
                            sources=	:NEW.SOURCER,
                            component=	:NEW.COMPONENT,
                            supplier=	:NEW.SUPPLIER,
                            parentid=	:NEW.FROMID,
                            section=	:NEW.SECTIONC,
                            inactive=	:NEW.INACTIVE,
                            notes=		:NEW.NOTES,
                            rcp_recalc=	:NEW.RCP_RECALC,
                            conratio =	:NEW.CONRATIO,
                            rtsb =		:NEW.RTSB,
                            currency=	:NEW.CURRENCY,
                            cost_method=:NEW.COST_METHOD
					       ;
       END IF;

END;
SELECT CAST(dtDateTime as Date) As TheDate, count(*) as TotalRecs, 
COUNT(CASE WHEN nDBWTS_EffluentFlow<0 THEN 1 END) AS CountNegatives, 
Min(nDBWTS_EffluentFlow) as MinFlow, Max(nDBWTS_EffluentFlow) as MaxFlow, 
SUM(nDBWTS_EffluentFlow) as TotAllFlow, 
AVG(nDBWTS_EffluentFlow) as AvgAllFlow, 
AVG(nDBWTS_EffluentFlow)*0.001 * 60 * 1440 as AllEnvirSum, 
SUM(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END) as TotPosFlow,
AVG(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END) AS AvgPosFlow,
AVG(CASE WHEN nDBWTS_EffluentFlow>=0 THEN nDBWTS_EffluentFlow END)*0.001 * 60 * 1440  AS PosEnvirSum
FROM [EnvolvData].[dbo].[Environmental]
WHERE CAST(dtDateTime AS Date) BETWEEN '2022-09-01' AND '2022-09-20'
GROUP BY CAST(dtDateTime as Date)
ORDER BY CAST(dtDateTime as Date) ASC
SELECT duration_seconds,
       SUM(duration_seconds) OVER (ORDER BY start_time) AS running_total
  FROM tutorial.dc_bikeshare_q1_2012
sudo apt update
sudo apt install mysql-server
sudo service mysql start
sudo mysql_secure_installation
sudo mysql -u root 

In mysql console:
DROP USER 'root'@'localhost';
CREATE USER 'root'@'%' IDENTIFIED BY 'YOURPASSWORD';
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;
FLUSH PRIVILEGES;
CREATE TABLE  ITEM_TEST 
   (	ITEMID NUMBER(6,0) NOT NULL ENABLE, 
	ITYPE VARCHAR2(2 CHAR) NOT NULL ENABLE, 
	NAMED VARCHAR2(50), 
	PRICE NUMBER(8,2), 
	PUOM_COST NUMBER(8,2), 
	SUOM_CONVERT NUMBER(12,6), 
	PUOM NUMBER(7,2), 
	SUOM NUMBER(7,2), 
	PUOMDES CHAR(1), 
	SUOMDES CHAR(1), 
	SOURCE CHAR(1), 
	MINIBAR CHAR(1), 
	COMPONENT CHAR(1), 
	SUPPLIER NUMBER(6,0), 
	LOT_SIZE NUMBER(6,0), 
	PARENTID NUMBER(5,0), 
	SECTION CHAR(1), 
	INACTIVE CHAR(1), 
	NOTES VARCHAR2(600), 
	REORDER NUMBER(4,0), 
	OARSLOGDT TIMESTAMP (6), 
	UPDATEDT TIMESTAMP (0) WITH LOCAL TIME ZONE, 
	UPDATEBY NUMBER(6,0), 
	VINTAGE NUMBER(4,0), 
	RCP_RECALC NUMBER(8,2), 
	BRAND VARCHAR2(10), 
	MENUCODE VARCHAR2(5), 
	VARIETY CHAR(1), 
	LDES VARCHAR2(200), 
	APPROVEDBY NUMBER(5,0), 
	 CONSTRAINT ITEM_TEST_PK PRIMARY KEY (ITEMID)
   )
SELECT agent_code,COUNT(agent_code),MAX(ord_amount) 
FROM orders 
GROUP BY agent_code 
HAVING MAX(ord_amount) IN(500,800,2000);
Select * from Opportunity
Select * from Contract
INSERT INTO country (iso, name, nicename, iso3, numcode, phonecode) VALUES
('AF', 'AFGHANISTAN', 'Afghanistan', 'AFG', 4, 93),
('AL', 'ALBANIA', 'Albania', 'ALB', 8, 355),
('DZ', 'ALGERIA', 'Algeria', 'DZA', 12, 213),
('AS', 'AMERICAN SAMOA', 'American Samoa', 'ASM', 16, 1684),
('AD', 'ANDORRA', 'Andorra', 'AND', 20, 376),
('AO', 'ANGOLA', 'Angola', 'AGO', 24, 244),
('AI', 'ANGUILLA', 'Anguilla', 'AIA', 660, 1264),
('AQ', 'ANTARCTICA', 'Antarctica', 'ATA', 10, 0),
('AG', 'ANTIGUA AND BARBUDA', 'Antigua and Barbuda', 'ATG', 28, 1268),
('AR', 'ARGENTINA', 'Argentina', 'ARG', 32, 54),
('AM', 'ARMENIA', 'Armenia', 'ARM', 51, 374),
('AW', 'ARUBA', 'Aruba', 'ABW', 533, 297),
('AU', 'AUSTRALIA', 'Australia', 'AUS', 36, 61),
('AT', 'AUSTRIA', 'Austria', 'AUT', 40, 43),
('AZ', 'AZERBAIJAN', 'Azerbaijan', 'AZE', 31, 994),
('BS', 'BAHAMAS', 'Bahamas', 'BHS', 44, 1242),
('BH', 'BAHRAIN', 'Bahrain', 'BHR', 48, 973),
('BD', 'BANGLADESH', 'Bangladesh', 'BGD', 50, 880),
('BB', 'BARBADOS', 'Barbados', 'BRB', 52, 1246),
('BY', 'BELARUS', 'Belarus', 'BLR', 112, 375),
('BE', 'BELGIUM', 'Belgium', 'BEL', 56, 32),
('BZ', 'BELIZE', 'Belize', 'BLZ', 84, 501),
('BJ', 'BENIN', 'Benin', 'BEN', 204, 229),
('BM', 'BERMUDA', 'Bermuda', 'BMU', 60, 1441),
('BT', 'BHUTAN', 'Bhutan', 'BTN', 64, 975),
('BO', 'BOLIVIA', 'Bolivia', 'BOL', 68, 591),
('BA', 'BOSNIA AND HERZEGOVINA', 'Bosnia and Herzegovina', 'BIH', 70, 387),
('BW', 'BOTSWANA', 'Botswana', 'BWA', 72, 267),
('BV', 'BOUVET ISLAND', 'Bouvet Island', 'BVT', 74, 0),
('BR', 'BRAZIL', 'Brazil', 'BRA', 76, 55),
('IO', 'BRITISH INDIAN OCEAN TERRITORY', 'British Indian Ocean Territory', 'IOT', 86, 246),
('BN', 'BRUNEI DARUSSALAM', 'Brunei Darussalam', 'BRN', 96, 673),
('BG', 'BULGARIA', 'Bulgaria', 'BGR', 100, 359),
('BF', 'BURKINA FASO', 'Burkina Faso', 'BFA', 854, 226),
('BI', 'BURUNDI', 'Burundi', 'BDI', 108, 257),
('KH', 'CAMBODIA', 'Cambodia', 'KHM', 116, 855),
('CM', 'CAMEROON', 'Cameroon', 'CMR', 120, 237),
('CA', 'CANADA', 'Canada', 'CAN', 124, 1),
('CV', 'CAPE VERDE', 'Cape Verde', 'CPV', 132, 238),
('KY', 'CAYMAN ISLANDS', 'Cayman Islands', 'CYM', 136, 1345),
('CF', 'CENTRAL AFRICAN REPUBLIC', 'Central African Republic', 'CAF', 140, 236),
('TD', 'CHAD', 'Chad', 'TCD', 148, 235),
('CL', 'CHILE', 'Chile', 'CHL', 152, 56),
('CN', 'CHINA', 'China', 'CHN', 156, 86),
('CX', 'CHRISTMAS ISLAND', 'Christmas Island', 'CXR', 162, 61),
('CC', 'COCOS (KEELING) ISLANDS', 'Cocos (Keeling) Islands', NULL, NULL, 672),
('CO', 'COLOMBIA', 'Colombia', 'COL', 170, 57),
('KM', 'COMOROS', 'Comoros', 'COM', 174, 269),
('CG', 'CONGO', 'Congo', 'COG', 178, 242),
('CD', 'CONGO, THE DEMOCRATIC REPUBLIC OF THE', 'Congo, the Democratic Republic of the', 'COD', 180, 242),
('CK', 'COOK ISLANDS', 'Cook Islands', 'COK', 184, 682),
('CR', 'COSTA RICA', 'Costa Rica', 'CRI', 188, 506),
('CI', 'COTE D''IVOIRE', 'Cote D''Ivoire', 'CIV', 384, 225),
('HR', 'CROATIA', 'Croatia', 'HRV', 191, 385),
('CU', 'CUBA', 'Cuba', 'CUB', 192, 53),
('CY', 'CYPRUS', 'Cyprus', 'CYP', 196, 357),
('CZ', 'CZECHIA', 'Czech Republic', 'CZE', 203, 420),
('DK', 'DENMARK', 'Denmark', 'DNK', 208, 45),
('DJ', 'DJIBOUTI', 'Djibouti', 'DJI', 262, 253),
('DM', 'DOMINICA', 'Dominica', 'DMA', 212, 1767),
('DO', 'DOMINICAN REPUBLIC', 'Dominican Republic', 'DOM', 214, 1),
('EC', 'ECUADOR', 'Ecuador', 'ECU', 218, 593),
('EG', 'EGYPT', 'Egypt', 'EGY', 818, 20),
('SV', 'EL SALVADOR', 'El Salvador', 'SLV', 222, 503),
('GQ', 'EQUATORIAL GUINEA', 'Equatorial Guinea', 'GNQ', 226, 240),
('ER', 'ERITREA', 'Eritrea', 'ERI', 232, 291),
('EE', 'ESTONIA', 'Estonia', 'EST', 233, 372),
('ET', 'ETHIOPIA', 'Ethiopia', 'ETH', 231, 251),
('FK', 'FALKLAND ISLANDS (MALVINAS)', 'Falkland Islands (Malvinas)', 'FLK', 238, 500),
('FO', 'FAROE ISLANDS', 'Faroe Islands', 'FRO', 234, 298),
('FJ', 'FIJI', 'Fiji', 'FJI', 242, 679),
('FI', 'FINLAND', 'Finland', 'FIN', 246, 358),
('FR', 'FRANCE', 'France', 'FRA', 250, 33),
('GF', 'FRENCH GUIANA', 'French Guiana', 'GUF', 254, 594),
('PF', 'FRENCH POLYNESIA', 'French Polynesia', 'PYF', 258, 689),
('TF', 'FRENCH SOUTHERN TERRITORIES', 'French Southern Territories', 'ATF', 260, 0),
('GA', 'GABON', 'Gabon', 'GAB', 266, 241),
('GM', 'GAMBIA', 'Gambia', 'GMB', 270, 220),
('GE', 'GEORGIA', 'Georgia', 'GEO', 268, 995),
('DE', 'GERMANY', 'Germany', 'DEU', 276, 49),
('GH', 'GHANA', 'Ghana', 'GHA', 288, 233),
('GI', 'GIBRALTAR', 'Gibraltar', 'GIB', 292, 350),
('GR', 'GREECE', 'Greece', 'GRC', 300, 30),
('GL', 'GREENLAND', 'Greenland', 'GRL', 304, 299),
('GD', 'GRENADA', 'Grenada', 'GRD', 308, 1473),
('GP', 'GUADELOUPE', 'Guadeloupe', 'GLP', 312, 590),
('GU', 'GUAM', 'Guam', 'GUM', 316, 1671),
('GT', 'GUATEMALA', 'Guatemala', 'GTM', 320, 502),
('GN', 'GUINEA', 'Guinea', 'GIN', 324, 224),
('GW', 'GUINEA-BISSAU', 'Guinea-Bissau', 'GNB', 624, 245),
('GY', 'GUYANA', 'Guyana', 'GUY', 328, 592),
('HT', 'HAITI', 'Haiti', 'HTI', 332, 509),
('HM', 'HEARD ISLAND AND MCDONALD ISLANDS', 'Heard Island and Mcdonald Islands', 'HMD', 334, 0),
('VA', 'HOLY SEE (VATICAN CITY STATE)', 'Holy See (Vatican City State)', 'VAT', 336, 39),
('HN', 'HONDURAS', 'Honduras', 'HND', 340, 504),
('HK', 'HONG KONG', 'Hong Kong', 'HKG', 344, 852),
('HU', 'HUNGARY', 'Hungary', 'HUN', 348, 36),
('IS', 'ICELAND', 'Iceland', 'ISL', 352, 354),
('IN', 'INDIA', 'India', 'IND', 356, 91),
('ID', 'INDONESIA', 'Indonesia', 'IDN', 360, 62),
('IR', 'IRAN, ISLAMIC REPUBLIC OF', 'Iran, Islamic Republic of', 'IRN', 364, 98),
('IQ', 'IRAQ', 'Iraq', 'IRQ', 368, 964),
('IE', 'IRELAND', 'Ireland', 'IRL', 372, 353),
('IL', 'ISRAEL', 'Israel', 'ISR', 376, 972),
('IT', 'ITALY', 'Italy', 'ITA', 380, 39),
('JM', 'JAMAICA', 'Jamaica', 'JAM', 388, 1876),
('JP', 'JAPAN', 'Japan', 'JPN', 392, 81),
('JO', 'JORDAN', 'Jordan', 'JOR', 400, 962),
('KZ', 'KAZAKHSTAN', 'Kazakhstan', 'KAZ', 398, 7),
('KE', 'KENYA', 'Kenya', 'KEN', 404, 254),
('KI', 'KIRIBATI', 'Kiribati', 'KIR', 296, 686),
('KP', 'KOREA, DEMOCRATIC PEOPLE''S REPUBLIC OF', 'Korea, Democratic People''s Republic of', 'PRK', 408, 850),
('KR', 'KOREA, REPUBLIC OF', 'Korea, Republic of', 'KOR', 410, 82),
('KW', 'KUWAIT', 'Kuwait', 'KWT', 414, 965),
('KG', 'KYRGYZSTAN', 'Kyrgyzstan', 'KGZ', 417, 996),
('LA', 'LAO PEOPLE''S DEMOCRATIC REPUBLIC', 'Lao People''s Democratic Republic', 'LAO', 418, 856),
('LV', 'LATVIA', 'Latvia', 'LVA', 428, 371),
('LB', 'LEBANON', 'Lebanon', 'LBN', 422, 961),
('LS', 'LESOTHO', 'Lesotho', 'LSO', 426, 266),
('LR', 'LIBERIA', 'Liberia', 'LBR', 430, 231),
('LY', 'LIBYAN ARAB JAMAHIRIYA', 'Libyan Arab Jamahiriya', 'LBY', 434, 218),
('LI', 'LIECHTENSTEIN', 'Liechtenstein', 'LIE', 438, 423),
('LT', 'LITHUANIA', 'Lithuania', 'LTU', 440, 370),
('LU', 'LUXEMBOURG', 'Luxembourg', 'LUX', 442, 352),
('MO', 'MACAO', 'Macao', 'MAC', 446, 853),
('MK', 'NORTH MACEDONIA', 'North Macedonia', 'MKD', 807, 389),
('MG', 'MADAGASCAR', 'Madagascar', 'MDG', 450, 261),
('MW', 'MALAWI', 'Malawi', 'MWI', 454, 265),
('MY', 'MALAYSIA', 'Malaysia', 'MYS', 458, 60),
('MV', 'MALDIVES', 'Maldives', 'MDV', 462, 960),
('ML', 'MALI', 'Mali', 'MLI', 466, 223),
('MT', 'MALTA', 'Malta', 'MLT', 470, 356),
('MH', 'MARSHALL ISLANDS', 'Marshall Islands', 'MHL', 584, 692),
('MQ', 'MARTINIQUE', 'Martinique', 'MTQ', 474, 596),
('MR', 'MAURITANIA', 'Mauritania', 'MRT', 478, 222),
('MU', 'MAURITIUS', 'Mauritius', 'MUS', 480, 230),
('YT', 'MAYOTTE', 'Mayotte', 'MYT', 175, 269),
('MX', 'MEXICO', 'Mexico', 'MEX', 484, 52),
('FM', 'MICRONESIA, FEDERATED STATES OF', 'Micronesia, Federated States of', 'FSM', 583, 691),
('MD', 'MOLDOVA, REPUBLIC OF', 'Moldova, Republic of', 'MDA', 498, 373),
('MC', 'MONACO', 'Monaco', 'MCO', 492, 377),
('MN', 'MONGOLIA', 'Mongolia', 'MNG', 496, 976),
('MS', 'MONTSERRAT', 'Montserrat', 'MSR', 500, 1664),
('MA', 'MOROCCO', 'Morocco', 'MAR', 504, 212),
('MZ', 'MOZAMBIQUE', 'Mozambique', 'MOZ', 508, 258),
('MM', 'MYANMAR', 'Myanmar', 'MMR', 104, 95),
('NA', 'NAMIBIA', 'Namibia', 'NAM', 516, 264),
('NR', 'NAURU', 'Nauru', 'NRU', 520, 674),
('NP', 'NEPAL', 'Nepal', 'NPL', 524, 977),
('NL', 'NETHERLANDS', 'Netherlands', 'NLD', 528, 31),
('AN', 'NETHERLANDS ANTILLES', 'Netherlands Antilles', 'ANT', 530, 599),
('NC', 'NEW CALEDONIA', 'New Caledonia', 'NCL', 540, 687),
('NZ', 'NEW ZEALAND', 'New Zealand', 'NZL', 554, 64),
('NI', 'NICARAGUA', 'Nicaragua', 'NIC', 558, 505),
('NE', 'NIGER', 'Niger', 'NER', 562, 227),
('NG', 'NIGERIA', 'Nigeria', 'NGA', 566, 234),
('NU', 'NIUE', 'Niue', 'NIU', 570, 683),
('NF', 'NORFOLK ISLAND', 'Norfolk Island', 'NFK', 574, 672),
('MP', 'NORTHERN MARIANA ISLANDS', 'Northern Mariana Islands', 'MNP', 580, 1670),
('NO', 'NORWAY', 'Norway', 'NOR', 578, 47),
('OM', 'OMAN', 'Oman', 'OMN', 512, 968),
('PK', 'PAKISTAN', 'Pakistan', 'PAK', 586, 92),
('PW', 'PALAU', 'Palau', 'PLW', 585, 680),
('PS', 'PALESTINIAN TERRITORY, OCCUPIED', 'Palestinian Territory, Occupied', NULL, NULL, 970),
('PA', 'PANAMA', 'Panama', 'PAN', 591, 507),
('PG', 'PAPUA NEW GUINEA', 'Papua New Guinea', 'PNG', 598, 675),
('PY', 'PARAGUAY', 'Paraguay', 'PRY', 600, 595),
('PE', 'PERU', 'Peru', 'PER', 604, 51),
('PH', 'PHILIPPINES', 'Philippines', 'PHL', 608, 63),
('PN', 'PITCAIRN', 'Pitcairn', 'PCN', 612, 0),
('PL', 'POLAND', 'Poland', 'POL', 616, 48),
('PT', 'PORTUGAL', 'Portugal', 'PRT', 620, 351),
('PR', 'PUERTO RICO', 'Puerto Rico', 'PRI', 630, 1787),
('QA', 'QATAR', 'Qatar', 'QAT', 634, 974),
('RE', 'REUNION', 'Reunion', 'REU', 638, 262),
('RO', 'ROMANIA', 'Romania', 'ROU', 642, 40),
('RU', 'RUSSIAN FEDERATION', 'Russian Federation', 'RUS', 643, 7),
('RW', 'RWANDA', 'Rwanda', 'RWA', 646, 250),
('SH', 'SAINT HELENA', 'Saint Helena', 'SHN', 654, 290),
('KN', 'SAINT KITTS AND NEVIS', 'Saint Kitts and Nevis', 'KNA', 659, 1869),
('LC', 'SAINT LUCIA', 'Saint Lucia', 'LCA', 662, 1758),
('PM', 'SAINT PIERRE AND MIQUELON', 'Saint Pierre and Miquelon', 'SPM', 666, 508),
('VC', 'SAINT VINCENT AND THE GRENADINES', 'Saint Vincent and the Grenadines', 'VCT', 670, 1784),
('WS', 'SAMOA', 'Samoa', 'WSM', 882, 684),
('SM', 'SAN MARINO', 'San Marino', 'SMR', 674, 378),
('ST', 'SAO TOME AND PRINCIPE', 'Sao Tome and Principe', 'STP', 678, 239),
('SA', 'SAUDI ARABIA', 'Saudi Arabia', 'SAU', 682, 966),
('SN', 'SENEGAL', 'Senegal', 'SEN', 686, 221),
('RS', 'SERBIA', 'Serbia', 'SRB', 688, 381),
('SC', 'SEYCHELLES', 'Seychelles', 'SYC', 690, 248),
('SL', 'SIERRA LEONE', 'Sierra Leone', 'SLE', 694, 232),
('SG', 'SINGAPORE', 'Singapore', 'SGP', 702, 65),
('SK', 'SLOVAKIA', 'Slovakia', 'SVK', 703, 421),
('SI', 'SLOVENIA', 'Slovenia', 'SVN', 705, 386),
('SB', 'SOLOMON ISLANDS', 'Solomon Islands', 'SLB', 90, 677),
('SO', 'SOMALIA', 'Somalia', 'SOM', 706, 252),
('ZA', 'SOUTH AFRICA', 'South Africa', 'ZAF', 710, 27),
('GS', 'SOUTH GEORGIA AND THE SOUTH SANDWICH ISLANDS', 'South Georgia and the South Sandwich Islands', 'SGS', 239, 0),
('ES', 'SPAIN', 'Spain', 'ESP', 724, 34),
('LK', 'SRI LANKA', 'Sri Lanka', 'LKA', 144, 94),
('SD', 'SUDAN', 'Sudan', 'SDN', 736, 249),
('SR', 'SURINAME', 'Suriname', 'SUR', 740, 597),
('SJ', 'SVALBARD AND JAN MAYEN', 'Svalbard and Jan Mayen', 'SJM', 744, 47),
('SZ', 'SWAZILAND', 'Swaziland', 'SWZ', 748, 268),
('SE', 'SWEDEN', 'Sweden', 'SWE', 752, 46),
('CH', 'SWITZERLAND', 'Switzerland', 'CHE', 756, 41),
('SY', 'SYRIAN ARAB REPUBLIC', 'Syrian Arab Republic', 'SYR', 760, 963),
('TW', 'TAIWAN, PROVINCE OF CHINA', 'Taiwan, Province of China', 'TWN', 158, 886),
('TJ', 'TAJIKISTAN', 'Tajikistan', 'TJK', 762, 992),
('TZ', 'TANZANIA, UNITED REPUBLIC OF', 'Tanzania, United Republic of', 'TZA', 834, 255),
('TH', 'THAILAND', 'Thailand', 'THA', 764, 66),
('TL', 'TIMOR-LESTE', 'Timor-Leste', 'TLS', 626, 670),
('TG', 'TOGO', 'Togo', 'TGO', 768, 228),
('TK', 'TOKELAU', 'Tokelau', 'TKL', 772, 690),
('TO', 'TONGA', 'Tonga', 'TON', 776, 676),
('TT', 'TRINIDAD AND TOBAGO', 'Trinidad and Tobago', 'TTO', 780, 1868),
('TN', 'TUNISIA', 'Tunisia', 'TUN', 788, 216),
('TR', 'TURKEY', 'Turkey', 'TUR', 792, 90),
('TM', 'TURKMENISTAN', 'Turkmenistan', 'TKM', 795, 993),
('TC', 'TURKS AND CAICOS ISLANDS', 'Turks and Caicos Islands', 'TCA', 796, 1649),
('TV', 'TUVALU', 'Tuvalu', 'TUV', 798, 688),
('UG', 'UGANDA', 'Uganda', 'UGA', 800, 256),
('UA', 'UKRAINE', 'Ukraine', 'UKR', 804, 380),
('AE', 'UNITED ARAB EMIRATES', 'United Arab Emirates', 'ARE', 784, 971),
('GB', 'UNITED KINGDOM', 'United Kingdom', 'GBR', 826, 44),
('US', 'UNITED STATES', 'United States', 'USA', 840, 1),
('UM', 'UNITED STATES MINOR OUTLYING ISLANDS', 'United States Minor Outlying Islands', 'UMI', 581, 1),
('UY', 'URUGUAY', 'Uruguay', 'URY', 858, 598),
('UZ', 'UZBEKISTAN', 'Uzbekistan', 'UZB', 860, 998),
('VU', 'VANUATU', 'Vanuatu', 'VUT', 548, 678),
('VE', 'VENEZUELA', 'Venezuela', 'VEN', 862, 58),
('VN', 'VIET NAM', 'Viet Nam', 'VNM', 704, 84),
('VG', 'VIRGIN ISLANDS, BRITISH', 'Virgin Islands, British', 'VGB', 92, 1284),
('VI', 'VIRGIN ISLANDS, U.S.', 'Virgin Islands, U.s.', 'VIR', 850, 1340),
('WF', 'WALLIS AND FUTUNA', 'Wallis and Futuna', 'WLF', 876, 681),
('EH', 'WESTERN SAHARA', 'Western Sahara', 'ESH', 732, 212),
('YE', 'YEMEN', 'Yemen', 'YEM', 887, 967),
('ZM', 'ZAMBIA', 'Zambia', 'ZMB', 894, 260),
('ZW', 'ZIMBABWE', 'Zimbabwe', 'ZWE', 716, 263),
('ME', 'MONTENEGRO', 'Montenegro', 'MNE', 499, 382),
('XK', 'KOSOVO', 'Kosovo', 'XKX', 0, 383),
('AX', 'ALAND ISLANDS', 'Aland Islands', 'ALA', '248', '358'),
('BQ', 'BONAIRE, SINT EUSTATIUS AND SABA', 'Bonaire, Sint Eustatius and Saba', 'BES', '535', '599'),
('CW', 'CURACAO', 'Curacao', 'CUW', '531', '599'),
('GG', 'GUERNSEY', 'Guernsey', 'GGY', '831', '44'),
('IM', 'ISLE OF MAN', 'Isle of Man', 'IMN', '833', '44'),
('JE', 'JERSEY', 'Jersey', 'JEY', '832', '44'),
('BL', 'SAINT BARTHELEMY', 'Saint Barthelemy', 'BLM', '652', '590'),
('MF', 'SAINT MARTIN', 'Saint Martin', 'MAF', '663', '590'),
('SX', 'SINT MAARTEN', 'Sint Maarten', 'SXM', '534', '1'),
('SS', 'SOUTH SUDAN', 'South Sudan', 'SSD', '728', '211');
SELECT colonne1, colonne2, REPLACE(colonne3, 'exemple insulte, 'CENSURE')
FROM table
##sql query with replace function
#syntax
UPDATE tableName
SET  column_name = REPLACE(column_name, 'fromStringValue', 'toStringValue');

#Example 
Update  tbl_employee
Set designation = REPLACE(designation, 'SEO', 'Developer');
from os import environ, remove
from pathlib import Path

from dotenv import find_dotenv, load_dotenv
from sqlalchemy import MetaData
from sqlalchemy_schemadisplay import create_schema_graph

load_dotenv(find_dotenv())
DB_NAME = environ.get("DB_NAME")
DB_PASS = environ.get("DB_PASS")
DB_USER = environ.get("DB_USER")
DB_HOST = environ.get("DB_HOST")
SQLALCHEMY_DATABASE_URL = (
    f"postgresql://{DB_USER}:{DB_PASS}@{DB_HOST}:5432/{DB_NAME}"
)


if __name__ == "__main__":
    FILE_NAME = 'ERD.png'
    try:
        os.remove(FILE_NAME)
    except:
        pass
    graph = create_schema_graph(metadata=MetaData(
        SQLALCHEMY_DATABASE_URL))
    graph.write_png(FILE_NAME)
return $model->where('created_at', '>=', date('Y-m-d').' 00:00:00');
drop table ManImp_002_working

select [ID], [DT], 
  CASE WHEN WS='NA' THEN NULL ELSE CAST(WS AS FLOAT) END AS [WS], 
  CASE WHEN WD='NA' THEN NULL ELSE CAST(WD AS FLOAT) END AS [WD], 
  CASE WHEN AT='NA' THEN NULL ELSE CAST([AT] AS FLOAT) END AS [AT],  
  CASE WHEN RH='NA' THEN NULL ELSE CAST(RH AS FLOAT) END AS [RH], 
  [SampleType], [RecCount], [DT_End] into ManImp_002_working 
from ManImp_002

ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [PK_MI2_ID] PRIMARY KEY CLUSTERED 
(
	[ID] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]

CREATE NONCLUSTERED INDEX [IX_MI2_DT] ON [dbo].[ManImp_002_working]
(
	[DT] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, SORT_IN_TEMPDB = OFF, DROP_EXISTING = OFF, ONLINE = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY]
GO


ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [CNSTRT_MI2_SampleType]  DEFAULT ('Seconds') FOR [SampleType]
GO

ALTER TABLE [dbo].ManImp_002_working ADD  CONSTRAINT [CNSTRT_MI2_RecCount]  DEFAULT ((1)) FOR [RecCount]
GO

SELECT DT as TheDate, Count(*) as TheCount
From ManImp
Group By DT 
Having count(*)>1
Order by TheCount DESC
delete
FROM ManImp
WHERE ID NOT IN
(
    SELECT MAX(ID)
	FROM ManImp
	GROUP BY DT
)
BULK INSERT ManualImport
FROM 'C:\temp\Second_Data-2022-09-15.csv'
WITH
(
    FIRSTROW = 2, -- as 1st one is header
    FIELDTERMINATOR = ',',  --CSV field delimiter
    ROWTERMINATOR = '\n',   --Use to shift the control to next row
    TABLOCK
)

SELECT MIN(DT) as MinDate, MAX(DT) as MaxDate, Count(DT) as [RowCount]
FROM ManualImport
ALTER TABLE DopingTest
ADD(
  CONSTRAINT fk_RaceIDDT
    FOREIGN KEY (RaceID, HorseID)
    REFERENCES Results(RaceID, HorseID)
);
/*
Show all connections to the database
*/

SELECT
	datname, pid, usename, client_addr, client_port,
    query_start, state_change, state, query
FROM
	pg_stat_activity
WHERE
	datname = '<database name HERE>'
ORDER BY
	state_change DESC;
SELECT
  table_schema,
  table_name
FROM
  information_schema.tables
WHERE
  table_schema NOT IN ('pg_catalog', 'information_schema')
  AND table_type = 'BASE TABLE';
DECLARE @selectsql nvarchar(4000),
DECLARE @cnt int

SELECT @selectsql = N' SELECT ' + CAST(@cnt AS NVARCHAR(10)) + N'= COUNT(*) FROM Vwbckup' 
Select SUM(CASE When CPayment='Cash' Then CAmount Else 0 End ) as CashPaymentAmount,
       SUM(CASE When CPayment='Check' Then CAmount Else 0 End ) as CheckPaymentAmount
from TableOrderPayment
Where ( CPayment='Cash' Or CPayment='Check' ) AND CDate<=SYSDATETIME() and CStatus='Active';
select 
  case greatest(col1,col2,col3,col4) 
    when col1 then 'col1:' || col1
    when col2 then 'col2:' || col2
    when col3 then 'col3:' || col3
    when col4 then 'col4:' || col4
    else null
  end as greatestcolumnname
from mytable;
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
  FROM v_codevalue_web_codes_full cv
 WHERE cv.CODEVALUENAME LIKE 'Alipay%'
   AND cv.DOMAINNAME LIKE 'ClgSys';
# connection name == any name we can give 
#ex: connection name = Mysql_connection
#connection_method = standard(tcp/ip)
#host name  = localhost , port = 3306
#username = root (for local then only it work)
#password = root (for local server we much give the root as password )
#test connection
select 44652, '4/1/2022', dateadd(d,44652,'1899-12-30'), DateDiff(dd, '1899-12-30', '4/1/2022')
CREATE OR ALTER FUNCTION DATE_TO_CHAR(TDATE TIMESTAMP, FORMAT VARCHAR(100))
RETURNS VARCHAR(100)
AS
DECLARE SDATE VARCHAR(100);
DECLARE DD VARCHAR(2);
DECLARE MM VARCHAR(2);
DECLARE YY VARCHAR(4);
DECLARE HH VARCHAR(2);
DECLARE MI VARCHAR(2);
DECLARE SS VARCHAR(2);
DECLARE XFORMAT VARCHAR(100);
DECLARE AM VARCHAR(2);
BEGIN
	XFORMAT = UPPER(FORMAT);
	SDATE = CAST(TDATE AS VARCHAR(100));
	YY = SUBSTRING(SDATE FROM 1 FOR 4);
	MM = SUBSTRING(SDATE FROM 6 FOR 2);
	DD = SUBSTRING(SDATE FROM 9 FOR 2);
	HH = SUBSTRING(SDATE FROM 12 FOR 2);
	MI = SUBSTRING(SDATE FROM 15 FOR 2);
	SS = SUBSTRING(SDATE FROM 18 FOR 2);
	XFORMAT = REPLACE(XFORMAT, 'YYYY', YY);
	XFORMAT = REPLACE(XFORMAT, 'MM', MM);
	XFORMAT = REPLACE(XFORMAT, 'DD', DD);
	XFORMAT = REPLACE(XFORMAT, 'YY', SUBSTRING(YY FROM 3 FOR 2));
	XFORMAT = REPLACE(XFORMAT, 'HH24', HH);
	AM = 'AM';

	IF (HH='12') THEN
	BEGIN
		AM = 'M';
		IF (MI > '00') THEN 
		BEGIN
			AM='PM';
		END
	END
	
	IF (HH='00') THEN 
	BEGIN
		HH='12';
		AM='AM';
	END
	
	IF (HH>'12') THEN
	BEGIN
		HH = TRIM(CAST(CAST(HH AS INTEGER)-12 AS VARCHAR(2)));
		IF (CHAR_LENGTH(HH)<2) THEN 
		BEGIN
			HH='0'||HH;
		END
		AM='PM';
	END
	XFORMAT = REPLACE(XFORMAT, 'HH12', HH);
	XFORMAT = REPLACE(XFORMAT, 'HH', HH);
	XFORMAT = REPLACE(XFORMAT, 'MI', MI);
	XFORMAT = REPLACE(XFORMAT, 'SS', SS);
	
	RETURN XFORMAT;
END;

/* Examples:

    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH24:MI:SS') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YYYY HH12:MI AM') FROM RDB$DATABASE;
    SELECT DATE_TO_CHAR(CURRENT_TIMESTAMP, 'DD/MM/YY HH24:MI') FROM RDB$DATABASE;
    -- still uncomplete...
*/
CREATE OR ALTER PROCEDURE date_range(
    startdate date, 
    enddate date, 
    interv integer DEFAULT 1, 
	unit varchar(6) DEFAULT 'DAY', 
	fromtime time DEFAULT null, 
	totime time DEFAULT null
) RETURNS (dateval timestamp)
AS
BEGIN
    dateval = startdate;
    while (dateval < enddate) do
    BEGIN
	    IF ((:fromtime IS NULL OR cast(:dateval AS time) >= :fromtime)
		  AND (:totime IS NULL OR cast(:dateval AS time) < :totime)) THEN 
		BEGIN 
			suspend;
		END 
		
  		SELECT 
	  		CASE  
		  		WHEN :unit = 'MINUTE' THEN dateadd(MINUTE, :interv, :dateval)
		  		WHEN :unit = 'HOUR' THEN dateadd(HOUR, :interv, :dateval)
		  		WHEN :unit = 'YEAR' THEN dateadd(YEAR, :interv, :dateval)
		  		WHEN :unit = 'MONTH' THEN dateadd(MONTH, :interv, :dateval)
		  		else dateadd(DAY, :interv, :dateval)
			END 
		FROM RDB$DATABASE 
		INTO :dateval;
    END
END;

/*
Example: ranges from today to next 10 days, from 8:00am to 6:00pm, in fraction of 30 minutes:

SELECT 
    cast(dateval AS date) TheDate, 
    CAST(dateval AS time) FromTime, 
    CAST(dateadd(MINUTE, 30, dateval) AS time) ToTime 
FROM date_range(
    current_date, ---- initial date
    current_date + 10, ---- next 10 days
    30, 'MINUTE', ---- fraction
    time '08:00', time '18:00') --- from time, to time
*/
select 
  DATE_PART('year', AGE('2012-03-05', '2010-04-01')) AS years,
  DATE_PART('month', AGE('2012-03-05', '2010-04-01')) AS months,
  DATE_PART('day', AGE('2012-03-05', '2010-04-01')) AS days;
CREATE OR ALTER FUNCTION AGE(DFROM TIMESTAMP, DTO TIMESTAMP, CODED BOOLEAN = FALSE)
RETURNS VARCHAR(30)
AS
DECLARE y varchar(3);
DECLARE m varchar(2);
DECLARE d varchar(2);
BEGIN
	y = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				THEN datediff(year, :DFROM, :DTO)-1
			ELSE datediff(year, :DFROM, :DTO)
		END;
	m = CASE 
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)-1
			WHEN datediff(year, :DFROM, :DTO) <> datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)>=0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO)-1, :DFROM), :DTO)
			WHEN datediff(year, :DFROM, :DTO) = datediff(day, :DFROM, :DTO)/365
				AND datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)-1
			ELSE datediff(month, dateadd(year, datediff(year, :DFROM, :DTO), :DFROM), :DTO)
		END;
	d = CASE 
			WHEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)<0
				THEN datediff(day, dateadd(month, datediff(month, :DFROM, :DTO)-1, :DFROM), :DTO)
			ELSE datediff(day, dateadd(month, datediff(month, :DFROM, :DTO), :DFROM), :DTO)
		END;
	RETURN 
		CASE 
			WHEN :CODED THEN lpad(Y,3,'0')||'-'||lpad(m,2,'0')||'-'||lpad(d,2,'0')
			ELSE Y||'y '||m||'m '||d||'d' 
		END;
END;
#Backup

gbak -b -v -user SYSDBA -password "masterkey" D:\database.FDB E:\database.fbk

#Restore

gbak -c -user SYSDBA -password masterkey E:\database.fbk E:\database_restore.fdb
docker run -v /home/marco:/backup --rm svarcoe/mssql-scripter mssql-scripter -S 172.18.0.3 -d CMUCE -U sa -P CMuce1970@ --schema-and-data -f /backup/mssql-scripter-CMUCE.sql

# BACKUP: 
BACKUP DATABASE [YourDB] TO  DISK = N'C:\xxxxx or /var/opt/mssql/backup/YourDB.bak'
WITH NOFORMAT, NOINIT, NAME = N'YourDB-Full Database Backup',
SKIP, NOREWIND, NOUNLOAD, STATS = 10
GO

# RESTORE:
sqlcmd -S localhost -U SA

RESTORE DATABASE YourDB
FROM DISK = '/var/opt/mssql/backup/YourDB.bak'
WITH MOVE 'YourDB' TO '/var/opt/mssql/data/YourDB.mdf',
MOVE 'YourDB_Log' TO '/var/opt/mssql/data/YourDB_Log.ldf'
GO
select * from 
( 
    select 
        sql_id, 
        elapsed_time, 
        executions, 
        sql_text 
    from v$sql  
    ORDER BY elapsed_time desc
) 
where rownum < 11;
docker run -d -e ACCEPT_EULA=Y -e "SA_PASSWORD=P@ssW0rd" -p 1433:1433 \
  --restart unless-stopped \
  -v /var/opt/mssql/data:/var/opt/mssql/data \
  -v /tmp/:/backups/ \
  --name sqlserver \
  mcr.microsoft.com/mssql/server

#backup:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP DATABASE [dbname] TO DISK = N'/tmp/dbname-full.bak' WITH NOFORMAT, NOINIT, NAME = 'dbname-bak-full', SKIP, NOREWIND, NOUNLOAD, STATS = 10"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "BACKUP LOG [dbname] TO DISK = N'/tmp/dbname-log.bak' WITH NOFORMAT, NOINIT, NAME = N'dbname-bak-log', NOSKIP, NOREWIND, NOUNLOAD, STATS = 5"

#restore:

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE DATABASE [dbname] FROM DISK = N'/tmp/dbname-full.bak' WITH FILE = 1, NOUNLOAD, REPLACE, NORECOVERY, STATS = 5"

# /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P P@ssW0rd -Q "RESTORE LOG [dbname] FROM DISK = N'/var/opt/mssql/data/dbname-log.bak'"


#create login myuser with password ='strongPass';
#create user myuser for login myuser;
#ALTER LOGIN [myuser] enable;
#Increment timeout and max_children:

/etc/php/7.0/fpm/php.ini  =>   default_socket_timeout = 60000
/etc/php/7.0/fpm/php.ini  =>   pm.max_children = 20
/etc/php/7.0/fpm/pool.d/www.conf  =>   request_terminate_timeout = 60000

#Increment timeout on /etc/nginx/nginx.conf:
keepalive_timeout 65000;

#After Restart php-fpm and nginx:

sudo service php7.0-fpm restart
sudo service nginx restart
export ORACLE_SID=$1
export NLS_LANG=AMERICAN_AMERICA.WE8ISO8859P9
export USUARIO=system/org24h
export PATHBACKUP=/respaldo/o24/export
export FILENAME=CMLGDB`date +%d%m%Y%H%M`.DMP
export FILENAMELOG=CMLGDB`date +%d%m%Y%H%M`.log
echo  $PATHBACKUP

rm $PATHBACKUP/*.* -rf

if [ -a $PATHBACKUP ] ; then
	expdp $USUARIO FULL=yes DUMPFILE=dpump_dir1:$FILENAME LOGFILE=dpump_dir1:$FILENAMELOG
	#exp $USUARIO file=$PATHBACKUP/$FILENAME full=yes compress=yes indexes=no consistent=yes log=$PATHBACKUP/$FILENAMELOG
else
	echo "ERROR: Export no encontro el directorio de Respaldo"
	exit 1
fi
alter session set "_ORACLE_SCRIPT"=true;

CREATE USER SISTEMAS IDENTIFIED BY las36horas;

GRANT CREATE TABLE TO SISTEMAS;
GRANT CONNECT TO SISTEMAS;
GRANT CTXAPP TO SISTEMAS;
GRANT RESOURCE TO SISTEMAS;
GRANT CREATE ANY CONTEXT TO SISTEMAS;
GRANT CREATE ANY SYNONYM TO SISTEMAS;
GRANT CREATE VIEW TO SISTEMAS;
GRANT DROP ANY CONTEXT TO SISTEMAS;
GRANT QUERY REWRITE TO SISTEMAS;
EXECUTE ON SYS.DBMS_AQ_BQVIEW
EXECUTE ON SYS.DBMS_LOB
EXECUTE ON SYS.UTL_RAW
DROP TABLESPACE MYSPACENAME INCLUDING contents;

CREATE TABLESPACE MYSPACENAME 
   DATAFILE 'ts1_filename.dbf' 
   SIZE 2000m 
   autoextend on NEXT 1000m maxsize unlimited;
   
 alter tablespace MYSPACENAME coalesce;
#use oracle user from system:

sqlplus "/ as sysdba"

SQL> ALTER USER SYS IDENTIFIED BY [password]; 
SQL> ALTER USER SYSTEM IDENTIFIED BY [password];
CREATE OR ALTER PROCEDURE SPLIT(ASTRLIST VARCHAR(32000))
returns (
  STR VARCHAR(255)
)
as
  declare variable StrList varchar(32000);
  declare variable CommaPos integer;
  declare variable StrVal varchar(10);
begin
  StrList = AStrList || '';
  CommaPos = Position(',', StrList);

  while (CommaPos > 0) do
  begin
    StrVal = Trim(SubString(StrList from 1 for CommaPos - 1));

    if (Char_Length(StrVal) > 0) then
    begin
      STR = StrVal;
      suspend;
    end

    if (Char_Length(StrList) > CommaPos) then
      StrList = SubString(StrList from CommaPos + 1);
    else
      StrList = '';

    CommaPos = Position(',', StrList);
  end

  StrList = Trim(StrList);

  if (Char_Length(StrList) > 0) then
  begin
    begin
      STR = StrList;
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE COD_CITY IN (SELECT STR FROM Split('ABC, DEF, GH, IJK, LM, NOP'))

*/
CREATE OR ALTER PROCEDURE GETINTEGERLIST(AINTEGERLIST VARCHAR(32000))
returns (
  ID integer
)
as
  declare variable IntegerList varchar(32000);
  declare variable CommaPos integer;
  declare variable IntegerVal varchar(10);
begin
  IntegerList = AIntegerList || ' ';
  CommaPos = Position(',', IntegerList);

  while (CommaPos > 0) do
  begin
    IntegerVal = Trim(SubString(IntegerList from 1 for CommaPos - 1));

    if (Char_Length(IntegerVal) > 0) then
    begin
      if (IntegerVal similar to '[0-9]*') then
      begin
        ID = Cast(IntegerVal as integer);
        suspend;
      end
    end

    if (Char_Length(IntegerList) > CommaPos) then
      IntegerList = SubString(IntegerList from CommaPos + 1);
    else
      IntegerList = '';

    CommaPos = Position(',', IntegerList);
  end

  IntegerList = Trim(IntegerList);

  if (Char_Length(IntegerList) > 0) then
  begin
    if (IntegerList similar to '[0-9]*') then
    begin
      ID = Cast(IntegerList as integer);
      suspend;
    end
  end
end;

/* use:

SELECT *
FROM CITY
WHERE ID_CITY IN (SELECT ID FROM GetIntegerList('1, 2, 12, 45, 75, 45'))

*/
sudo mkdir -p /your/custom/path/oracle-19c/oradata/
sudo chmod -R 777 /your/custom/path/

docker run -d --name oracle19db \
  -p 1521:1521 \
  -e ORACLE_SID=ORCL \
  -e ORACLE_PDB=ORCLDB \
  -e ORACLE_PWD=Oracle123 \
  -e ORACLE_CHARSET=AL32UTF8 \
  -v /your/custom/path/oracle-19c/oradata:/opt/oracle/oradata \
  banglamon/oracle193db:19.3.0-ee

# Charset Value: WE8MSWIN1252, AL16UTF8

# ALTER SESSION SET NLS_DATE_FORMAT = 'RRRR-MM-DD';
# ALTER SESSION SET NLS_TIME_FORMAT = 'HH24:MI:SS';
# ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'RRRR-MM-DD HH24:MI:SS';
# ALTER SESSION SET NLS_TIME_TZ_FORMAT = 'HH24:MI:SS TZR';
# ALTER SESSION SET NLS_TIMESTAMP_TZ_FORMAT = 'RRRR-MM-DD HH24:MI:SS TZR';

# docker exec -it oracle19db bash -c "source /home/oracle/.bashrc; sqlplus /nolog”
# connect sys as sysdba;

# alter session set "_ORACLE_SCRIPT"=true;
# create user sistemas identified by las36horas;
# GRANT CONNECT, RESOURCE, DBA TO sistemas;
# GRANT UNLIMITED TABLESPACE TO sistemas;
CREATE TABLE GAP(
    COUNTER INTEGER PRIMARY KEY
);

insert into gap (counter) values (1);
insert into gap (counter) values (2);
--::::: 3
--::::: 4
insert into gap (counter) values (5);
insert into gap (counter) values (6);
--::::: 7
insert into gap (counter) values (8);
insert into gap (counter) values (9);
insert into gap (counter) values (10);



--===== here the trick :

SELECT 
	list(CASE 
		WHEN N2-N1-2 > 0 THEN (N1+1) || '-'|| (N2-1)
		ELSE CAST(N1+1 AS VARCHAR(10))
	END) Missing
FROM (
SELECT a.counter N1, 
  (SELECT counter FROM gap WHERE counter > a.counter rows 1) N2 
FROM gap a
) B
WHERE N2-N1 > 1;



-- results:

Missing
=======
3-4,7
alter role <username> set search_path = <schema1>, ..., <scheman>, public;
WITH RECURSIVE PADRE AS (
	SELECT
		COD_CTA,
		DES_CTA
	FROM
		CATALOGO
	WHERE
		COD_CTA=:COD_CTA
	UNION ALL
		SELECT
			e.COD_CTA,
			e.DES_CTA
		FROM
			CATALOGO e
		INNER JOIN padre p ON POSITION(e.COD_CTA, p.COD_CTA) = 1
		  AND p.COD_CTA > COD_CTA
		ROWS 1
) SELECT
	DISTINCT cod_cta
FROM
	PADRE
WHERE COD_CTA < :COD_CTA
<?php

//===========notifier.sql:
    
/*
CREATE OR REPLACE FUNCTION public.notify_channel()
RETURNS trigger
AS $function$
  BEGIN
	  PERFORM pg_notify('channel_name', row_to_json(NEW)::text);
	  RETURN NULL;
  END;
$function$
LANGUAGE plpgsql;

CREATE TRIGGER trigger_on_insert AFTER INSERT ON mytable
FOR EACH ROW EXECUTE PROCEDURE notify_channel();
*/

set_time_limit(0);

//-- using PDO:

$db = new PDO(
    'pgsql:dbname=dbname host=host port=5432;options=--application_name=APPLICATION_NAME',
    'user',
    'password',
    [
        PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
        PDO::ATTR_DEFAULT_FETCH_MODE => PDO::FETCH_ASSOC,
    ]
);

$db->exec('LISTEN channel_name');

while (true) {
    while ($db->pgsqlGetNotify(PDO::FETCH_ASSOC, 30000)) {
        echo json_encode($result).PHP_EOL;
    }
}

//-- using pg_connect:
//<?php

include '../conn.php';
set_time_limit(0);
ob_end_clean();
pg_query($conn, 'LISTEN table_changed;');

while(true){
    
    $notify = pg_get_notify($conn);
    
	if (!$notify) {
        echo json_encode(array('result'=>false, 'data'=>'No messages')).PHP_EOL;
        ob_flush();
        flush();
        sleep(1);
	} else {
        echo json_encode(array('result'=>true, 'process_id'=>$pid , 'pid' => pg_get_pid($conn), 'data' => $notify)).PHP_EOL;
	}
}

---$> psql: user my_user and database my_original_db must exists:

SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity 
WHERE pg_stat_activity.datname = 'my_original_db' AND pid <> pg_backend_pid();

CREATE DATABASE my_new_db WITH TEMPLATE my_original_db OWNER my_user;
-- Example table:

create table patient (
    id serial primary key,
    firstname varchar(100),
    lastname varchar(100)
);

insert into patient(firstname, lastname) values ('MARCO ANTONIO', 'PEREZ SANDERS');

-- Create field tsvector field with text columns (preferable using insert and update trigger);
alter table patient add tsv_name tsvector;

-- initially fill field.
update patient set tsv_name = to_tsvector(firstname || ' ' || lastname);

-- create index for fast search:
create index tsv_name_idx on patient using gin(tsv_name);

-- then you can search as:
select * from patient where tsv_name @@ to_tsquery('MARCO & PEREZ');
select * from patient where tsv_name @@ to_tsquery('MARCO & SANDERS');
select * from patient where tsv_name @@ to_tsquery('ANTONIO & PEREZ');

select * from m_paciente 
where tsv_nombre @@ to_tsquery(array_to_string(string_to_array('LEIRY SEVERINO',' '),':* & ')||':*');

-- other:

select * from patient where fullname ~ all(array['(?=^LEI.*|\s+LEI\w*)','(?=^ME.*|\s+ME\w*)']);
WITH cte AS (
   SELECT *
   FROM   m_paciente mp 
   WHERE  apellido like 'P%'
   )
SELECT *
FROM  (
   TABLE  cte
   ORDER  BY apellido, nombre
   LIMIT  10
   OFFSET 190
   ) sub
RIGHT  JOIN (SELECT count(*) FROM cte) c(full_count) ON true;
select id, id2, debito, credito, 
  sum(debito-credito) over (partition BY id ORDER BY id, id2) saldo
from 
(
	select m.id, 0 id2, importe debito, 0 credito
		from m_cxc m 
		where m.id=:id
	union all 
	select c.id_m_cxc id, c.id id2, debito, credito
		from d_cxc c 
  		where c.id_m_cxc=:id
) a
SELECT procpid, age(clock_timestamp(), query_start), usename, current_query 
FROM pg_stat_activity 
WHERE current_query != '<IDLE>' AND current_query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- show running queries (9.2)
SELECT pid, age(clock_timestamp(), query_start), usename, query 
FROM pg_stat_activity 
WHERE query != '<IDLE>' AND query NOT ILIKE '%pg_stat_activity%' 
ORDER BY query_start desc;

-- kill running query
SELECT pg_cancel_backend(procpid);

-- kill idle query
SELECT pg_terminate_backend(procpid);

-- vacuum command
VACUUM (VERBOSE, ANALYZE);

-- all database users
select * from pg_stat_activity where current_query not like '<%';

-- all databases and their sizes
select * from pg_user;

-- all tables and their size, with/without indexes
select datname, pg_size_pretty(pg_database_size(datname))
from pg_database
order by pg_database_size(datname) desc;

-- cache hit rates (should not be less than 0.99)
SELECT sum(heap_blks_read) as heap_read, sum(heap_blks_hit)  as heap_hit, (sum(heap_blks_hit) - sum(heap_blks_read)) / sum(heap_blks_hit) as ratio
FROM pg_statio_user_tables;

-- table index usage rates (should not be less than 0.99)
SELECT relname, 100 * idx_scan / (seq_scan + idx_scan) percent_of_times_index_used, n_live_tup rows_in_table
FROM pg_stat_user_tables 
ORDER BY n_live_tup DESC;

-- how many indexes are in cache
SELECT sum(idx_blks_read) as idx_read, sum(idx_blks_hit)  as idx_hit, (sum(idx_blks_hit) - sum(idx_blks_read)) / sum(idx_blks_hit) as ratio
FROM pg_statio_user_indexes;

-- Dump database on remote host to file
$ pg_dump -U username -h hostname databasename > dump.sql

-- Import dump into existing database
$ psql -d newdb -f dump.sql

--(On 9.2+): Queries running more than 2 minutes
SELECT now() - query_start as "runtime", usename, datname, waiting, state, query
  FROM  pg_stat_activity
  WHERE now() - query_start > '2 minutes'::interval
 ORDER BY runtime DESC;

-- 
select relname,last_vacuum, last_autovacuum, last_analyze, last_autoanalyze from pg_stat_user_tables;

--
select relname, n_dead_tup, last_vacuum, last_autovacuum from 
pg_catalog.pg_stat_all_tables
where n_dead_tup > 0 and relname =  ’table1' order by n_dead_tup desc;

--Tables and views used by a given view:
with recursive view_tree(parent_schema, parent_obj, child_schema, child_obj, ind, ord) as 
(
  select vtu_parent.view_schema, vtu_parent.view_name, 
    vtu_parent.table_schema, vtu_parent.table_name, 
    '', array[row_number() over (order by view_schema, view_name)]
  from information_schema.view_table_usage vtu_parent
  where vtu_parent.view_schema = '<SCHEMA NAME>' and vtu_parent.view_name = '<VIEW NAME>'
  union all
  select vtu_child.view_schema, vtu_child.view_name, 
    vtu_child.table_schema, vtu_child.table_name, 
    vtu_parent.ind || '  ', 
    vtu_parent.ord || (row_number() over (order by view_schema, view_name))
  from view_tree vtu_parent, information_schema.view_table_usage vtu_child
  where vtu_child.view_schema = vtu_parent.child_schema 
  and vtu_child.view_name = vtu_parent.child_obj
) 
select tree.ind || tree.parent_schema || '.' || tree.parent_obj 
  || ' depends on ' || tree.child_schema || '.' || tree.child_obj txt, tree.ord
from view_tree tree
order by ord;


--Check the size (as in disk space) of all databases:
SELECT d.datname AS Name, pg_catalog.pg_get_userbyid(d.datdba) AS Owner,
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT')
    THEN pg_catalog.pg_size_pretty(pg_catalog.pg_database_size(d.datname)) 
    ELSE 'No Access' 
  END AS SIZE 
FROM pg_catalog.pg_database d 
ORDER BY 
  CASE WHEN pg_catalog.has_database_privilege(d.datname, 'CONNECT') 
    THEN pg_catalog.pg_database_size(d.datname)
    ELSE NULL 
  END;

--Check the size (as in disk space) of each table:
SELECT nspname || '.' || relname AS "relation",
   pg_size_pretty(pg_total_relation_size(C.oid)) AS "total_size"
 FROM pg_class C
 LEFT JOIN pg_namespace N ON (N.oid = C.relnamespace)
 WHERE nspname NOT IN ('pg_catalog', 'information_schema')
   AND C.relkind <> 'i'
   AND nspname !~ '^pg_toast'
 ORDER BY pg_total_relation_size(C.oid) DESC;

--Script to kill all running connections of a current database
SELECT pg_terminate_backend(pg_stat_activity.pid)
FROM pg_stat_activity
WHERE datname = current_database()  
  AND pid <> pg_backend_pid();
<?php

/*==========  install at ubuntu: ==============

sudo apt install php-sybase


//==================================

append this into /etc/freetds/freetds.conf:
(192.168.0.250:2638 is an example:)

//==================================

sudo nano /etc/freetds/freetds.conf

[connName]
    host = 192.168.0.250  
    port = 2638
    tds version = 7.4
    database = MYDB
    
//======  php: */

$connName = "connName";
$user = "myuser";
$pass = "mypass";

$st="dblib:host=$connName;charset=iso8859-1";
$conn = new PDO($st,$user,$pass);

/* $conn->prepare($sql_query);
...etc
*/
select regexp_matches('FV{YY}{MM}{DD}-{UU}-{###}', '\{([A-Za-z#]+)\}', 'g');
create table <tablename>_log(
    id serial primary key,
    idx integer,
    antes json,
    despues json,
    id_m_usuario integer,
    fecha_registro timestamp default now(),
    tipo char(1) default 'U',
    unico varchar(36)
);

create function f_<tablename>_log()
returns trigger
LANGUAGE plpgsql as
$body$
begin
    case tg_op
        when 'INSERT' then
            insert into <tablename>_log (idx, despues, tipo)
            select new.id, row_to_json(new), 'I';
            return new;
        when 'UPDATE' then
            if (.. <condiciones new.campo1 <> new.campo2>... ) then
                insert into <tablename>_log (idx, antes, despues, tipo)
                select new.id, row_to_json(old), row_to_json(new), 'U';
            end if;
            return new;
        when 'DELETE' then
            insert into <tablename>_log (idx, antes, tipo)
            select new.id, row_to_json(old), 'D';
            return old;
    end case;
end;
$body$;

create trigger t_<tablename>_log
before insert or update or delete
on <tablename>
for each row execute procedure f_<tablename>_log();
select id, name from table 
where name ilike all(string_to_array(replace('%'||TRIM('JOHN DOE')||'%',' ','%,%'), ','))

-- 1) it converts spaces into commas (,)
-- 2) it converts string to array using resulting comma separated string
-- 3) it apply ilike to each word in search string

-- It finds (for example) name = 'DOE, JHON'  in table
<?php 

$sql = 'select fielda, fieldb, fieldc from table1
order by field, fieldb, fieldc
union all
select field1, field2, field3 from table2
order by field1, field2 desc, field3 asc';


print_r($sql2 = invert_order($sql));


function invert_order($str){
    $re = '/(?<=order\sby\s)(.*)(?=)/mi';

    preg_match_all($re, $str, $matches, PREG_SET_ORDER, 0);
    
    $mat = $matches[sizeof($matches)-1]; //-- get only last order field list
    $mat=$mat[sizeof($mat)-1];
    $mat=explode(",",$mat);
    
    
    for($i=0; $i<sizeof($mat); $i++){   //-- reverse each pair of order field/direction 
        $duet = preg_split("/\s+/", trim($mat[$i]));
        if (sizeof($duet)<2) $duet[1]="";
        switch(strtolower($duet[1])) {
            case "desc":
                $duet[1] = "asc";
                break;
            default:
                $duet[1] = "desc";
                break;
        }
        $mat[$i] = implode(" ",$duet);
    }
    
    $re2 = '/(order\s+by\s+.*)*$/i';    //-- replace last order by with new inverted order by:
    $subst = "order by ".implode(", ",$mat);
    $result = preg_replace($re2, "", $str);
    return $result . " $subst";
}


?>
SELECT *, count(*) OVER() AS full_count
FROM   tbl
WHERE  /* whatever */
ORDER  BY col1
LIMIT  ?
OFFSET ?
create function f_thetable_biud()
returns trigger as
$body$
--declare vars
begin
    -- new, old
    -- tg_op = INSERT,DELETE,UPDATE
    return new/old;
end;
$body$ language plpgsql;

create trigger t_thetable_biud
before/after insert or update or delete
on thetable
for each row execute procedure f_thetable_biud();
<?php

// sudo apt install php7.0-sybase


header("content-type: text/plain; charset=iso-8859-1");
ini_set("display_errors","1");
error_reporting(E_ALL);

$host="localhost";
$db="test";
$uid="sa";
$pwd="mypassword";

$query = "select top 10 * from testtable";

$conn = new PDO( "dblib:host=$host:1433;dbname=$db;", $uid, $pwd);
$stmt = $conn->prepare( $query );
$stmt->execute();

while ($r=$stmt->fetch(PDO::FETCH_ASSOC)){
	print_r($r);
}

?>
SELECT n.nspname AS schema_name
      ,p.proname AS function_name
      ,pg_get_functiondef(p.oid) AS func_def
      ,pg_get_function_arguments(p.oid) AS args
      ,pg_get_function_result(p.oid) AS result
FROM   pg_proc p
JOIN   pg_namespace n ON n.oid = p.pronamespace
WHERE  p.proname ILIKE 'func_name%';
select * from json_to_recordset( (
  select array_to_json(array_agg(row_to_json(t)))
    from (
      select field1, field2, field3,... from mytable
    ) t
)) as x(
  "field1" integer,
  "field2" integer,
  "field3" text,
  :
)

--- ejemplo de texto json a recordset:
select * from json_to_recordset((select detalles from d_factura_caja where id=4216)::json) as x(
  "seq" integer,
  "padre" integer,
  "tipo" text,
  "idx" integer,
  "nivel" integer,
  "descripcion" text,
  "cantidad" numeric(6,2),
  "presentacion" text,
  "precio_unitario" numeric(15,2),
  "precio" numeric(15,2),
  "porc_impuesto" numeric(6,2),
  "impuesto" numeric,
  "neto"numeric
)
SELECT TRUNC (SYSDATE - ROWNUM) dt
  FROM DUAL CONNECT BY ROWNUM < :ndates;
  
/* from now on */
SELECT TRUNC (SYSDATE + ROWNUM - 2) dt
  FROM DUAL CONNECT BY ROWNUM < 100
#just install samba with

$ sudo apt install samba

#and go to this file:

$ sudo nano /etc/samba/smb.conf

#and just at the bottom add these lines:

    [share]
    comment = Ubuntu File Server Share
    path = /path/to/the/folder  #for example /home/user_name/public
    browsable = yes
    guest ok = yes
    read only = no
    create mask = 0755

#restart the samba service

$ sudo service smbd restart
$ sudo service nmbd restart
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
requires:

1) Install Linux dependences (search for specific linux distro instructions for this):
python-dev, python-pip, freetds-dev, freetds-bin, libaio1

2) Install instantclient-basic-lite
follow these instructions:

http://www.oracle.com/technetwork/database/features/linuxx86-64soft-092277.html?printOnly=1
(go to "Installation of ZIP files" section at the bottom of document)

3) Install python includes:

sudo -H pip install cx_Oracle
sudo -H pip install pymssql

"""

import cx_Oracle
import pymssql

""" ====== let's connect to Oracle DB Server ====="""

orcl_host = "host1"
orcl_port = 1521
orcl_user = "user1"
orcl_pwd  = "password1"
orcl_dbn  = "service_name"

connstr = orcl_user+"/"+orcl_pwd+"@"+orcl_host+":"+str(orcl_port)+"/"+orcl_dbn
orcl = cx_Oracle.connect(connstr)

#If all is correct we will see and object print:

print(orcl)

"""===== let's connect to sqlsvr: ====="""

sql_host = "host2"
sql_user = "user2"
sql_pwd  = "password2"
sql_dbn  = "database_name"

conexion_sql = pymssql.connect(sql_host, sql_user, sql_pwd, sql_dbn)

#If all is correct we will see and object print:

print(conexion_sql)

---Sql:
    
CREATE EXTENSION dblink;

---Then you can use it like this:
    
select * from dblink('dbname=otherdbname','select field1, field2 from otherdb_table') as foreign_table(field1 varchar, field2 integer)
# let's create a backup from remote postgresql database using pg_dump:
#
#   pg_dump -h [host address] -Fc -o -U [database user] <database name> > [dump file]
#
# later it could be restored at the same remote server using:
#
#   sudo -u postgres pg_restore -C mydb_backup.dump
#
#Ex:

pg_dump -h 67.8.78.10 -p 5432 -Fc -o -U myuser mydb > mydb_backup.dump

pg_restore -C mydb_backup.dump



#complete (all databases and objects)

pg_dumpall -U myuser -h 67.8.78.10 -p 5432 --clean --file=mydb_backup.dump


#restore from pg_dumpall --clean:

psql -f mydb_backup.dump postgres #it doesn't matter which db you select here
select mnu_code from lu_mnu_code where mnu_code = :mnu_code

DECLARE
   l_exist        CHAR (1);
   l_code_change   CHAR (1);
BEGIN
   SELECT CASE
             WHEN EXISTS (SELECT *
                            FROM lu_mnu_code
                           WHERE NVL (mnu_code, 'null') = :mnu_code
                         ) THEN 'Y'
             ELSE 'N'
          END
     INTO l_exist
     FROM DUAL;

   IF l_exist = 'Y' THEN
      RETURN FALSE;
   ELSE
      RETURN TRUE;
   END IF;
END;
/* SQL Command within postgres: */

SELECT now() - pg_postmaster_start_time() uptime

/* using psql command line utility deployed with Postgres: */
 
$> psql -c "SELECT now() - pg_postmaster_start_time() uptime"
SELECT tablo.columnName FROM 
(SELECT ROW_NUMBER()OVER(Order By columnName)indexer, 
 * FROM tabloName)tablo WHERE tablo.indexer = @sayac
select to_char(to_date('1/10/2011','mm/dd/yyyy'),'mm-dd-yyyy') from dual
SELECT COUNT(*) 
FROM <Table_Name>
WHERE SITETYPE = 'condition'
SELECT DISTINCT <Column_name>
FROM <table_name>
SELECT Id_card, COUNT(Id_card)
FROM user_application_tbl
GROUP BY Id_card
HAVING COUNT(Id_card) > 1
create or replace table `reby-cloud.analytics_reby_v1_eu.pg_topups_json`
as

select
  id,
  created_at,
  gateway,
  gateway_transaction_id,
  stripe_charge_id,
  array(select as struct value_amount as amount, value_currency as currency) as value,
  array(select as struct original_value_amount as amount, original_value_currency as currency) as original_value,
  user_id,
  company_id,
  braintree_transaction_id
from `reby-cloud.staging_eu.pg_topups_stg`
ALTER TABLE DAYSUM
  MODIFY V_TOT_REVN NUMBER GENERATED ALWAYS AS (ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)) VIRTUAL
SELECT MAX(to_number(regexp_substr(cv.CODEMNEMONIC, '[0-9]*$')))
ROUND(NVL("RM_REVN",0)+NVL("FB_REVN",0)+NVL("RM_UPGRADES",0)+NVL("OTHER_REVN",0)+NVL("DIVE_SERV",0)+NVL("DIVE_EQUIP",0),2)
apex.item( "P368_DATE" ).hide();
-- ==================================================================
-- sys.indexes.type 0 => heap - i.e. no Primary Key 
--					1 => clustered index
--					2 => non-clustered index
-- ==================================================================

SELECT	o.object_id
        , schemaname = OBJECT_SCHEMA_NAME(o.object_id)
		, tablename = o.NAME
		, PrimaryKey = CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END 
		, ClusteredIndexes = CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END
		, NonClusteredIndexes = CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE ''  END
		, [RowCount] = (SELECT SUM(p.Rows) from sys.partitions AS p where p.object_id=o.object_id) / (1+COUNT(ci.object_id)+COUNT(nci.object_id))
		, [IndexCount] = COUNT(ci.object_id)+COUNT(nci.object_id)
FROM sys.objects o
LEFT OUTER JOIN sys.indexes nci ON nci.object_id = o.object_id and nci.type=2
LEFT OUTER JOIN sys.indexes ci ON ci.object_id = o.object_id and ci.type=1
LEFT OUTER JOIN sys.indexes pk ON o.object_id = pk.object_id AND pk.is_primary_key = 1
WHERE o.Type='U' AND ((pk.object_id IS NULL) OR (ci.object_id IS NULL) OR (nci.object_id IS NULL))

GROUP BY o.object_id,
         OBJECT_SCHEMA_NAME(o.object_id), 
         o.NAME, 
		 CASE WHEN pk.object_id IS NULL THEN 'No Primary Key' ELSE '' END, 
		 CASE WHEN nci.object_id IS NULL THEN 'No Non-Clustered Index' ELSE '' END,
		 CASE WHEN ci.object_id IS NULL THEN 'No Clustered Index' ELSE '' END

ORDER BY TableName ASC
SELECT 
     schemaname = OBJECT_SCHEMA_NAME(o.object_id)
    ,tablename = o.NAME
FROM sys.objects o
INNER JOIN sys.indexes i ON i.OBJECT_ID = o.OBJECT_ID
-- tables that are heaps without any nonclustered indexes
WHERE (
        o.type = 'U'
        AND o.OBJECT_ID NOT IN (
            SELECT OBJECT_ID
            FROM sys.indexes
            WHERE index_id > 0
            )
        )
        --    OR
        -- table that have a clustered index without any nonclustered indexes
        --(o.type='U' 
        --        AND o.OBJECT_ID NOT IN (
        --    SELECT OBJECT_ID 
        --        FROM sys.indexes 
        --        WHERE index_id>1))  
set global sql_mode = 'ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,ERROR_FOR_DIVISION_BY_ZERO,NO_ENGINE_SUBSTITUTION';
create or replace table `reby-cloud.reby_marketing_eu.user_360` as 
select
  created_at,
  meta_user_id,
  type,
  concat(
    'amount:',if(amount is null,'na',cast(amount as string)),'; ',
    'minutes:',if(minutes is null,'na',cast(minutes as string)),'; ',
    'service_area:',if(service_area is null, 'na',service_area),'; ',
    'vehicle_type:',if(vehicle_type is null, 'na',vehicle_type),'; ',
    'app_used:',if(app_promoting_co is null, 'na',app_promoting_co),'; ',
    'vehicle_co:',if(owner_co is null,'na',owner_co),'; ',
    'topup_through:',if(topup_trough_co is null, 'na',topup_trough_co)
  ) as details
from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` pyrsc
where type not in ('vehicle_reservation')

union all

SELECT
  timestamp(created_at) as created_at,
  user_id as meta_user_id,
  'cx-conversation' as type,
  concat (
    'conversation_id:',conversation_id,'; ',
    'conact_reason1:',if(cantact_reason_level1 is null, 'na',cantact_reason_level1),'; ',
    'conact_reason2:',if(cantact_reason_level2 is null, 'na',cantact_reason_level2)
  ) as details
FROM `reby-cloud.kustomer_eu.conversation_messages_combination` 

union all

select
  created_at,
  user_id as meta_user_id,
  'user-created' as type,
  concat ('company:',co.name) as details
from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on cu.company_id = co.id
select w2.id,w2.value, w.name from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%002619%' limit 1)));
select registrationnum,idcpicregistration from professionaldata where id =
(select  professionaldata_id from account where id =
(select senderaccount_id from workflowexecution where ordernumber like '%010598%' limit 1));
select w2.id,w2.value from workflowstepfieldexecution w2
         inner join workflowstepfield w on w2.workflowstepfield_id = w.id
         where w.name like '%MATRICULA%' AND
        workflowstepexecution_id in
        (select id from workflowstepexecution where workflowdefinitionexecution_id in  (select id from workflowdefinitionexecution
         where workflowexecution_id = (select id from workflowexecution where ordernumber like '%010598%' limit 1)));
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `analytics_reby_v1_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_demand_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `ridestatus_post_gdpr.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_fin_eu.__TABLES__` --where table_id = 'table_id'
union all
SELECT *, TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),HOUR) as hour_diff,TIMESTAMP_DIFF(current_timestamp,TIMESTAMP_MILLIS(last_modified_time),MINUTE) as minutes_diff
FROM `reby_marketing_eu.__TABLES__` --where table_id = 'table_id'
SELECT col1, col2
FROM table
ORDER BY col1, col2;
insert into WORKQUEUE (ID, facilitycode, workaction, description)
  values ((select max(ID)+1 from WORKQUEUE), 'J', 'II', 'TESTVALUES')
# from google.cloud import bigquery
# client = bigquery.Client()
# project = client.project
# dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
# filepath = 'path/to/your_file.csv'

# Retrieves the destination table and checks the length of the schema
table_id = "my_table"
table_ref = dataset_ref.table(table_id)
table = client.get_table(table_ref)
print("Table {} contains {} columns.".format(table_id, len(table.schema)))

# Configures the load job to append the data to the destination table,
# allowing field addition
job_config = bigquery.LoadJobConfig()
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
job_config.schema_update_options = [
    bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION
]
# In this example, the existing table contains only the 'full_name' column.
# 'REQUIRED' fields cannot be added to an existing schema, so the
# additional column must be 'NULLABLE'.
job_config.schema = [
    bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"),
    bigquery.SchemaField("age", "INTEGER", mode="NULLABLE"),
]
job_config.source_format = bigquery.SourceFormat.CSV
job_config.skip_leading_rows = 1

with open(filepath, "rb") as source_file:
    job = client.load_table_from_file(
        source_file,
        table_ref,
        location="US",  # Must match the destination dataset location.
        job_config=job_config,
    )  # API request

job.result()  # Waits for table load to complete.
print(
    "Loaded {} rows into {}:{}.".format(
        job.output_rows, dataset_id, table_ref.table_id
    )
)

# Checks the updated length of the schema
table = client.get_table(table)
print("Table {} now contains {} columns.".format(table_id, len(table.schema)))
CREATE TABLE #Procesos (
    SPID        INT,
    Status      VARCHAR(255),
    Login       VARCHAR(255),
    HostName    VARCHAR(255),
    BlkBy       VARCHAR(255),
    DBName      VARCHAR(255),
    Command     VARCHAR(255),
    CPUTime     INT,
    DiskIO      INT,
    LastBatch   VARCHAR(255),
    ProgramName VARCHAR(255),
    SPID2       INT,
    REQUESTID   INT
)

INSERT INTO #Procesos (SPID, Status, Login, HostName, BlkBy, DBName, Command, CPUTime, DiskIO, LastBatch, ProgramName, SPID2, REQUESTID)
EXEC SP_WHO2

SELECT  *
    FROM #Procesos

DROP TABLE #Procesos
SELECT 
    t.NAME AS TableName,
    s.Name AS SchemaName,
    p.rows,
    SUM(a.total_pages) * 8 AS TotalSpaceKB, 
    CAST(ROUND(((SUM(a.total_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS TotalSpaceMB,
    SUM(a.used_pages) * 8 AS UsedSpaceKB, 
    CAST(ROUND(((SUM(a.used_pages) * 8) / 1024.00), 2) AS NUMERIC(36, 2)) AS UsedSpaceMB, 
    (SUM(a.total_pages) - SUM(a.used_pages)) * 8 AS UnusedSpaceKB,
    CAST(ROUND(((SUM(a.total_pages) - SUM(a.used_pages)) * 8) / 1024.00, 2) AS NUMERIC(36, 2)) AS UnusedSpaceMB
FROM 
    sys.tables t
INNER JOIN      
    sys.indexes i ON t.OBJECT_ID = i.object_id
INNER JOIN 
    sys.partitions p ON i.object_id = p.OBJECT_ID AND i.index_id = p.index_id
INNER JOIN 
    sys.allocation_units a ON p.partition_id = a.container_id
LEFT OUTER JOIN 
    sys.schemas s ON t.schema_id = s.schema_id
WHERE 
    t.NAME NOT LIKE 'dt%' 
    AND t.is_ms_shipped = 0
    AND i.OBJECT_ID > 255 
GROUP BY 
    t.Name, s.Name, p.Rows
ORDER BY 
    TotalSpaceMB DESC, t.Name
SELECT      COLUMN_NAME AS 'ColumnName'
            ,TABLE_NAME AS  'TableName'
FROM        INFORMATION_SCHEMA.COLUMNS
WHERE       COLUMN_NAME LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
library(DBI)
connection <- dbConnect(RMySQL::MySQL(),
                 dbname = "name",
                 host = "adress.amazonaws.com",
                 port = number,
                 user = "id",
                 password = "pw")
info <- dbGetQuery(connection, "SELECT column1 FROM database WHERE argument = something")

info
WITH
T AS (   SELECT CAST(CONCAT ('$', IIF(TRY_CAST([Key] AS int) IS NOT NULL, CONCAT ('[', [Key], ']'), '.' + [Key])) AS nvarchar(MAX)) AS Path
              , [Key]
              , Value
              , Type
              , 1                                                                                                                   Lvl
         FROM OPENJSON (@json_doc)
         UNION ALL
         SELECT CAST(CONCAT (T.Path, IIF(TRY_CAST(O.[Key] AS int) IS NOT NULL, CONCAT ('[', O.[Key], ']'), '.' + O.[Key])) AS nvarchar(MAX))
              , O.[Key]
              , O.Value
              , O.Type
              , T.Lvl + 1
         FROM T
             CROSS APPLY OPENJSON (T.Value) O
         WHERE T.Type IN ( 4, 5 ))
SELECT Path, T.[Key], T.Value, T.Type, T.Lvl FROM T;
select
  *
from
  db1.SomeTable a
    inner join 
  db2.SomeTable b on b.SomeColumn = a.SomeColumn;
SELECT *
FROM tbl_name
WHERE 
(id_field IN ('value1', 'value2', 'value3') OR id_field IS NULL)
INSERT `reby-cloud.bq_logs.bigquery_usage_datepart` 
SELECT
    timestamp AS Date,
    resource.labels.project_id AS ProjectId,
    protopayload_auditlog.serviceName AS ServiceName,
    protopayload_auditlog.methodName AS MethodName,
    protopayload_auditlog.status.code AS ErrorCode,
    protopayload_auditlog.status.message AS ErrorMessage,
    protopayload_auditlog.authenticationInfo.principalEmail AS UserId,
    logName AS JobId,
    JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query") AS Query,
     JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") AS DestinationTableId,
    CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) AS BillableBytes,
    (CAST(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobStats.queryStats.totalBilledBytes") AS INT64) / 1099511627776) * 6 AS TotalCost,
    1 AS QueryCount,
     CASE
    WHEN JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.destinationTable") LIKE '%anon%' THEN 'Query'
    ELSE 'ETL'
  END AS JobType,
  ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL(JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"),r"(?i)\s+(?:FROM|JOIN)\s+([^\s\(]+\.[^\s]+)") ) ) AS x
    ORDER BY
      x),', ') AS QueryTables,
      ARRAY_TO_STRING(ARRAY(
    SELECT
      DISTINCT x
    FROM
      UNNEST(ARRAY_CONCAT(REGEXP_EXTRACT_ALL( REGEXP_REPLACE( JSON_EXTRACT_SCALAR(protopayload_auditlog.metadataJson,"$.jobChange.job.jobConfig.queryConfig.query"), r"(?i)\s+(z_+pivot_[a-z0-9_.]+)", ""),r"(?i)\s+(?:WHERE|AND|OR|ON)\s+(?:\s|\(|CAST|`)*([a-z0-9_.]+)(?:AND)?") ) ) AS x
    ORDER BY
      x),', ') AS QueryWhereColumns
  FROM
    `reby-cloud.bq_logs.cloudaudit_googleapis_com_data_access`
  WHERE
    protopayload_auditlog.serviceName = 'bigquery.googleapis.com' and DATE(timestamp) >= '2021-11-28' # Change date to start of gap
  and DATE(timestamp) <= DATE_ADD(CURRENT_DATE(), INTERVAL -1 DAY)
UPDATE photos
   SET caption = REPLACE(caption,'"','\'')
SELECT note as note_original, 

    REPLACE(
        REPLACE(
            REPLACE(
                REPLACE(
                    REPLACE(
                        REPLACE(
                            REPLACE(
                                REPLACE(
                                    REPLACE(
                                        REPLACE(
                                            REPLACE(
                                                REPLACE(
                                                    REPLACE(
                                                        REPLACE(
                                                            REPLACE(
                                                                REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(
                                                                                                REPLACE(
                                                                                                    REPLACE(
                                                                                                        REPLACE(
                                                                    REPLACE(
                                                                        REPLACE(
                                                                            REPLACE(
                                                                                REPLACE(
                                                                                    REPLACE(
                                                                                        REPLACE(
                                                                                            REPLACE(note, '\"', ''),
                                                                                        '.', ''),
                                                                                    '?', ''),
                                                                                '`', ''),
                                                                            '<', ''),
                                                                        '=', ''),
                                                                    '{', ''),
                                                                                                        '}', ''),
                                                                                                    '[', ''),
                                                                                                ']', ''),
                                                                                            '|', ''),
                                                                                        '\'', ''),
                                                                                    ':', ''),
                                                                                ';', ''),
                                                                            '~', ''),
                                                                        '!', ''),
                                                                    '@', ''),
                                                                '#', ''),
                                                            '$', ''),
                                                        '%', ''),
                                                    '^', ''),
                                                '&', ''),
                                            '*', ''),
                                        '_', ''),
                                    '+', ''),
                                ',', ''),
                            '/', ''),
                        '(', ''),
                    ')', ''),
                '-', ''),
            '>', ''),
        ' ', '-'),
    '--', '-') as note_changed FROM invheader
ALTER DATABASE [servername/databasename] MODIFY NAME = [servername/newdatabasename]
/* AQUI CREARE LA TABLA */
CREATE DATABASE fejesus;

CREATE TABLE leccion (
    id_leccion INT NOT NULL AUTO_INCREMENT, 
    titulo VARCHAR(30),
    instructor VARCHAR(50),
    no_leccion INT,

    PRIMARY KEY(id_leccion),
    INDEX(no_leccion)
    
) ENGINE=INNODB;

DESCRIBE leccion;

CREATE TABLE preguntas (
    id_pregunta INT NOT NULL AUTO_INCREMENT,
    pregunta VARCHAR(200),
    verso VARCHAR(40),
    id_leccion INT NOT NULL,
    id_seccion INT NOT NULL,

    PRIMARY KEY (id_pregunta),
    INDEX(id_leccion),
    INDEX(id_seccion),

    FOREIGN KEY (id_leccion)
        REFERENCES leccion(id_leccion)
        ON UPDATE CASCADE ON DELETE RESTRICT
    
) ENGINE=INNODB;

DESCRIBE preguntas;

CREATE TABLE seccion ( 
    id_seccion INT NOT NULL AUTO_INCREMENT, 
    leccion_id INT NOT NULL, 
    titulo VARCHAR(100), 

    PRIMARY KEY(id_seccion), 
    INDEX (leccion_id), 
        
    FOREIGN KEY (leccion_id) 
        REFERENCES leccion(id_leccion) ON UPDATE CASCADE ON DELETE RESTRICT

) ENGINE=INNODB;

DESCRIBE seccion;
ORDER BY array_position(ARRAY['f', 'p', 'i', 'a']::varchar[], x_field)
SELECT add_continuous_aggregate_policy('conditions_summary_daily',
     start_offset => INTERVAL '1 month',
     end_offset => INTERVAL '1 day',
     schedule_interval => INTERVAL '1 hour');
CREATE MATERIALIZED VIEW conditions_summary_daily
WITH (timescaledb.continuous) AS
SELECT device,
   time_bucket(INTERVAL '1 day', time) AS bucket,
   AVG(temperature),
   MAX(temperature),
   MIN(temperature)
FROM conditions
GROUP BY device, bucket;
CREATE SEQUENCE public.hibernate_sequence INCREMENT 1 START 1 MINVALUE 1;
select id from workflowexecution where subject like '%FOZG-069338%';

update workflowexecution set state = 4, cancellationDate=CURRENT_TIMESTAMP where id = 34470;

update workflowdefinitionexecution set state = 4 , cancellationDate=CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34470);

update workflowstepexecution set state = 4 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34470));
select id from workflowexecution where subject like '%OZLO-0907%';

update workflowexecution set state = 1, enddate = CURRENT_TIMESTAMP where id = 34168;

update workflowdefinitionexecution set state = 1, enddate = CURRENT_TIMESTAMP where workflowexecution_id = (select id from workflowexecution where id = 34168);

update workflowstepexecution set state = 1 where workflowdefinitionexecution_id in (select id from workflowdefinitionexecution where workflowexecution_id = (select id from workflowexecution where id = 34168)); 
SELECT *
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = N'Customers'
"""A sciprt to drop optimization runs based on a run_date"""
import os
from ams.db import sql_execute

connection_string = os.environ['DB_A']

uk_models = [
    'UKGR',
    'UKCO',
    'UKCB',
    'UKBG',
    'UKBAL'
]

uk_models_policies = [
    'UKGR',
    'UKCO',
    'UKCB',
    'UKBG',
    'UKBAL',
    'PUKBAL',
    'PUKBG',
    'PUKCB',
    'PUKCO',
    'PUKGR'
]

uk_asset_classes = [
    'EMD_GBP',
    'UK_LARGE',
    'UK_MID',
    'UK_GOV',
    'US_AGG_GBP_H',
    'US_SHORT_GBP',
    'EUR_AGG_XUK_GBP_H',
    'PACIFIC_XJP_GBP',
    'EM_GBP',
    'US_HY_GBP_H',
    'US_LC_GBP',
    'UK_SHORT_GOV',
    'US_SHORT_GBP_H',
    'UK_CORP',
    'US_SMALL_GBP',
    'EUR_AGG_XUK_GBP',
    'EMD_GBP_H',
    'UK_CASH',
    'EUR_XUK_GBP',
    'JAPAN_GBP',
    'US_HY_GBP',
    'UK_LINK',
    'UK_SHORT_CORP',
    'US_AGG_GBP'
]

muni_models = [
    'HBGMN',
    'HBGMT',
    'HBMN',
    'HBMT',
    'HCBMN',
    'HCBMT',
    'HCMN',
    'HCMT'
]

muni_models_policies = [
    'HBGMN',
    'HBGMT',
    'HBMN',
    'HBMT',
    'HCBMN',
    'HCBMT',
    'HCMN',
    'HCMT',
    'PMUNBA',
    'PMUNBG',
    'PMUNCB',
    'PMUNCO'
]

muni_asset_classes = [
    'NT_MUNI',
    'MUNI_AA',
    'MUNI_BB',
    'MUNI_B',
    'MUNI_CCC',
    'EAFE_AC_MUNI',
    'US_LC_MUNI',
    'US_DEF_MUNI',
    'US_SMID_MUNI',
    'EAFE_MUNI',
    'EAFE_SC_MUNI',
    'EM_MUNI',
    'EMD_MUNI',
    'INTL_AGG_MUNI',
    'EMLC_MUNI',
    'CASH_MUNI',
    'MA_MUNI',
    'MF_MUNI',
    'US_LONG_MUNI',
    'US_MID_MUNI',
    'US_SMALL_MUNI',
    'MUNI',
    'MUNI_HY',
    'MUNI_SHORT',
    'MUNI_AAA',
    'MUNI_A',
    'MUNI_BBB'
]

uk_models = tuple(uk_models)
uk_models_policies = tuple(uk_models_policies)
uk_asset_classes = tuple(uk_asset_classes)
muni_models = tuple(muni_models)
muni_models_policies = tuple(muni_models_policies)
muni_asset_classes = tuple(muni_asset_classes)

def delete_fi_cmas(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(
        connection_string,
        f"""DELETE FROM ams_equilibrium_cma WHERE run_date = '{run_date}'"""
        )
    sql_execute(
        connection_string,
        f"""DELETE FROM ams_fi_cma WHERE run_date = '{run_date}'"""
        )
    sql_execute(
        connection_string,
        f"""DELETE FROM ams_fi_inputs WHERE run_date = '{run_date}'"""
        )


def delete_pre_opt(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_asset_class_stats WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_distributions WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_matrices WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_model_stats WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_scenarios WHERE run_date = '{run_date}'""")


def delete_opt(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_account WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_policy WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_account WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_policy WHERE run_date = '{run_date}'""")


def delete_opt_to(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_weights_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_stats_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_weights_acct WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_stats_acct WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_weights_pol WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_to_stats_pol WHERE run_date = '{run_date}'""")


def delete_pre_opt_eqinc(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_eq_inc_matrices WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eq_inc_scenarios WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eq_inc_fund_stats WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eq_inc_coefficients WHERE run_date = '{run_date}'""")


def delete_opt_eqinc(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_eqinc_ax_weights_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eqinc_ax_stats_rebal WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eqinc_ax_weights_account WHERE run_date = '{run_date}'""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_eqinc_ax_stats_account WHERE run_date = '{run_date}'""")


def delete_opt_uk(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_account 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_policy 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_rebal 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_rebal 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_account 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_policy 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models)}""")


def delete_pre_opt_uk(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_asset_class_stats 
                WHERE run_date = '{run_date}' 
                AND asset_class in 
                {repr(uk_asset_classes)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_distributions 
                WHERE run_date = '{run_date}' 
                AND asset_class in 
                {repr(uk_asset_classes)}""")
    sql_execute(connection_string,
    f"""DELETE FROM ams_matrices 
        WHERE run_date = '{run_date}' 
        AND (asset_class_1 in 
        {repr(uk_asset_classes)} 
        OR asset_class_2 in 
        {repr(uk_asset_classes)})""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_model_stats 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(uk_models_policies)}""")
    sql_execute(connection_string,
        f"""DELETE FROM ams_scenarios 
        WHERE run_date = '{run_date}' 
        AND asset_class in 
        {repr(uk_asset_classes)}""")

def delete_opt_muni(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_account 
                WHERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_policy 
                WHERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_weights_rebal 
                WHERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_rebal W
                HERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_account 
                WHERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_axioma_stats_policy 
                WHERE run_date = '{run_date}' 
                AND model_code in {repr(muni_models)}""")


def delete_pre_opt_muni(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_asset_class_stats 
                WHERE run_date = '{run_date}' 
                AND asset_class in 
                {repr(muni_asset_classes)}""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_distributions 
                WHERE run_date = '{run_date}' 
                AND asset_class in 
                {repr(muni_asset_classes)}""")
    sql_execute(connection_string, f"""DELETE FROM ams_matrices 
        WHERE run_date = '{run_date}' 
        AND (asset_class_1 in 
        {repr(muni_asset_classes)}
        OR asset_class_2 in 
        {repr(muni_asset_classes)})""")
    sql_execute(connection_string,
                f"""DELETE FROM ams_model_stats 
                WHERE run_date = '{run_date}' 
                AND model_code in 
                {repr(muni_models_policies)}""")
    sql_execute(connection_string, f"""DELETE FROM ams_scenarios 
        WHERE run_date = '{run_date}' 
        AND asset_class in 
        {repr(muni_asset_classes)}""")


def delete_raw_returns(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_raw_returns WHERE time_series_date = '{run_date}'""")


def delete_fundamentals(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_fundamentals WHERE run_date = '{run_date}'""")


def delete_equilibrium(run_date):
    """
    This function deletes SQL output from the script in the function name for a given run_date

    Args:
    run_date (str): The date to delete from the SQL tables
    """
    sql_execute(connection_string,
                f"""DELETE FROM ams_equilibrium_cma WHERE run_date = '{run_date}'""")


def delete_optimization_output(script, run_date):
    """
    This function deletes SQL output from the optimization script runs for a given run_date.
    List of Scripts [delete_raw_returns, delete_fundamentals, delete_equilibrium, delete_fi_cmas, 
    delete_pre_opt, delete_opt, delete_opt_to, delete_pre_opt_eqinc, delete_opt_eqinc, delete_opt_uk,
    delete_pre_opt_uk, delete_opt_muni, delete_pre_opt_muni]

    Args:
    script (str): The script that was run to delete from the SQL tables
    run_date (str): The date to delete from the SQL tables
    """
    run_funcs = {
        'delete_raw_returns', 
        'delete_fundamentals', 
        'delete_equilibrium', 
        'delete_fi_cmas', 
        'delete_pre_opt', 
        'delete_opt', 
        'delete_opt_to', 
        'delete_pre_opt_eqinc', 
        'delete_opt_eqinc', 
        'delete_opt_uk',
        'delete_pre_opt_uk', 
        'delete_opt_muni', 
        'delete_pre_opt_muni'
        }
    assert script in run_funcs, 'script not in dictionary'
    func_return = globals()[script](run_date)
    return func_return


DELETE_DATE = '30-SEP-2023'
delete_optimization_output('delete_opt', DELETE_DATE)
 protected override void OnModelCreating(ModelBuilder builder)
        {
            base.OnModelCreating(builder);

            builder.Entity<InternetUsersLog>()
           .HasOne<AspNetBranchs>(sc => sc.Branch)
           .WithMany(s => s.Users)
           .HasForeignKey(sc => sc.Branch_Id);


        }
Scaffold-DbContext "Data Source=.;Initial Catalog=Name;Persist Security Info=True;User ID=Username;Password=@!n$p!r3;Integrated Security=True" Microsoft.EntityFrameWorkCore.SqlServer -outputdir Repository/Models -context AccDbContext -contextdir Repository -DataAnnotations -Force
SELECT Query, UserId, SUM(TotalCost) FROM `reby-cloud.bq_logs.bigquery_usage_datepart` 
WHERE DATE(Date) >= "2022-03-01" and DATE(Date) <= "2022-03-30" 
GROUP BY 1,2
ORDER BY 3 DESC
SELECT Table_Name, 
    Column_Name
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_CATALOG = 'EDM-PRD'
   AND COLUMN_NAME LIKE '%cost%';
SELECT (date '2022-01-09', date '2022-02-10') OVERLAPS
       (date '2022-02-10', date '2022-03-10');
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

--GET THE FIRST VEHICLE RESERVATION PER USER/APP/VEHICLE
create or replace table temp_eu.delete_first_ride_reserve as 

with first_reservations as (
    select
        vrh.created_at,
        vrh.user_id,
        vrh.company_vehicle_id as vehicle_company_id,
        vrh.company_id as app_co_id,
        co1.name as vehicle_company_name,
        co2.name as app_company_name,
        row_number() over (partition by vrh.user_id,vrh.company_vehicle_id,vrh.company_id order by vrh.created_at asc) as r_n
    from `reby-cloud.analytics_reby_v1_eu.pg_vehicle_reservation_history` vrh
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co1
    on vrh.company_vehicle_id = co1.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2
    on vrh.company_vehicle_id = co2.id
    QUALIFY r_n = 1
),
--FINISH

--GET THE FIRST RIDE PER USER/APP/VEHICLE
rides as (
    select 
        id,
        created_at,
        user_id,
        if(vehicle_company_id is null,'c_3qteetfhxjshx4j54111',vehicle_company_id) as vehicle_company_id,
        if(company_id is null,'c_3qteetfhxjshx4j54111',company_id) as company_id
    from analytics_reby_v1_eu.pg_rides_json
),

first_ride_pre as (
    select
        r.id,
        r.user_id,
        r.created_at as ride_date,
        r.vehicle_company_id,
        r.company_id as app_co_id,
        co.name as vehicle_company_name,
        co2.name as app_company_name,
        cu.id as cu_id_vehicle,
        cu2.id as cu_id_app,
        --row_number() over (partition by r.user_id,r.vehicle_company_id order by r.created_at asc) as rank_
    from rides r
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
    left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.user_id = cu.user_id and r.vehicle_company_id = cu.company_id
    left join `reby-cloud.reby_marketing_eu.pg_company_user` cu2 on r.user_id = cu2.user_id and r.company_id = cu2.company_id
),

first_ride_per_app_vehicle as (
select *, row_number() over (partition by user_id, app_company_name, vehicle_company_name order by ride_date asc) as rank_ from first_ride_pre qualify rank_ = 1
),
--FINISH

--GET FIRST DATE BETWEEN FIRST RIDE AND/OR FIRST RESERVATION
first_date_ride_reserv as (
    select
        user_id,
        vehicle_company_name,
        app_company_name,
        created_at,
        vehicle_company_id,
        app_co_id,
        row_number() over (partition by user_id, app_company_name, vehicle_company_name order by created_at asc) as rank_
        from(
            --UNION BETWEEN RIDES AND RESERVATIONS
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                created_at,
                vehicle_company_id,
                app_co_id
            from first_reservations 
            UNION ALL
            select
                user_id,
                vehicle_company_name,
                app_company_name,
                ride_date as created_at,
                vehicle_company_id,
                app_co_id
            from first_ride_per_app_vehicle
        ) QUALIFY rank_=1
)

select * from first_date_ride_reserv
--FINISH
;
DECLARE fecha_migracion TIMESTAMP;
SET fecha_migracion = '2021-06-16 00:52:53.131944 UTC';

drop table if exists misc_eu.user_consent;
create or replace table `reby-cloud.misc_eu.user_consent` as 
with 
first_user_session as (
    SELECT
    us.*,
    cu.id as cu_id,
    co.name as app_company_name,
    co.id as company_id,
    row_number() over(partition by cu.id order by us.created_at asc) as rank_
FROM `reby-cloud.analytics_reby_v1_eu.pg_user_session` us
LEFT JOIN `reby-cloud.reby_marketing_eu.pg_company_user` cu
    on us.company_id = cu.company_id and us.user_id = cu.user_id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
    on cu.company_id = co.id
QUALIFY rank_ = 1
),
--t1 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t1 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
--t2 as (select * from UNNEST(['Avant','IberScot','TuCycle','Oiz','Reby']) as franquicias),
t2 as (select * from UNNEST(['c_3r2qzjfpj8tq2sq3pan1','c_3r4b2rhbvku3zfsmrej1','c_3r4b37b3t924p8j94uch','c_3r4b34725va8zzyete8h','c_3qteetfhxjshx4j54111']) as franquicias),
all_users as (select id from `reby-cloud.analytics_reby_v1_eu.pg_users_json`),

company_users as (
  select
    cu.id,
    cu.user_id,
    co.name as from_company,
    cu.created_at,
    cu.company_id,
    cu.is_deleted,
    consent.consent_from_reby
  from `reby-cloud.reby_marketing_eu.pg_company_user` cu
  left join `reby-cloud.analytics_reby_v1_eu.pg_consent` consent
      on cu.id = consent.company_user_id
  left join `reby-cloud.analytics_reby_v1_eu.pg_company` co
      on cu.company_id = co.id
),
cross_joins as (
SELECT t1.franquicias as from_co_id, t2.franquicias as to_co_id, all_users.id as user_id
FROM t1 CROSS JOIN t2 CROSS JOIN all_users group by 1,2,3),

cross_cus as (
select
  from_co_id,
  to_co_id,
  cu1.consent_from_reby,
  cj.user_id,
  cu1.id as from_co_cu_id,
  cu1.created_at as from_co_cu_id_created,
  cu2.id as to_co_cu_id,
  cu2.created_at as to_co_cu_id_created,
from cross_joins cj
left join company_users cu1 on cj.from_co_id = cu1.company_id and cj.user_id = cu1.user_id
left join company_users cu2 on cj.to_co_id = cu2.company_id and cj.user_id = cu2.user_id
),

filtered_users as (
select *
from cross_cus
where to_co_id = 'c_3qteetfhxjshx4j54111'
  and from_co_cu_id is not null
  and from_co_id != 'c_3qteetfhxjshx4j54111'
  and to_co_cu_id is not null
),

join_first_ride_reserve_session as (
select
  fu.*,
  frr.created_at as ts_frr,
  fs1.created_at as first_session_from_co,
--  fs2.created_at as first_session_to_co,
  if(from_co_cu_id_created <= fecha_migracion,'pre-migracion','post-migracion') as pre_post
from filtered_users fu
left join temp_eu.delete_first_ride_reserve frr 
  on frr.vehicle_company_id = fu.from_co_id and frr.app_co_id = fu.to_co_id and fu.user_id = frr.user_id
left join first_user_session fs1 on fs1.cu_id = fu.from_co_cu_id 
--left join first_user_session fs2 on fs2.cu_id = fu.to_co_cu_id
),

consent_ts as (
select
  jfrrs.*,
  case
    when pre_post = 'pre-migracion' then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is null  and consent_from_reby is true then from_co_cu_id_created
    when pre_post = 'post-migracion' and first_session_from_co is not null and ts_frr is not null then ts_frr
    when pre_post = 'post-migracion' and first_session_from_co is not null and consent_from_reby is true and ts_frr is null then timestamp_add(greatest(to_co_cu_id_created,from_co_cu_id_created),INTERVAL 3 HOUR)
    else null
  end as consent_timestamp,
  row_number() over (partition by from_co_id,to_co_id,consent_from_reby,user_id) as rn_
from join_first_ride_reserve_session jfrrs
QUALIFY rn_ = 1
)

select
  *,
  case
    when consent_timestamp is not null and pre_post = 'pre-migracion' and abs(timestamp_diff(from_co_cu_id_created,first_session_from_co,HOUR))<3 then 'consent-from-co-app'
    when consent_timestamp is not null and pre_post = 'pre-migracion' then 'consent-from-reby-app'
    when consent_timestamp is not null and pre_post = 'post-migracion' then 'consent-from-reby-app'
  else 'other'
  end as consent_from
from consent_ts

SELECT DISTINCT column_list
FROM table_list
  JOIN table ON join_condition
WHERE row_filter
ORDER BY column
LIMIT count OFFSET offset
GROUP BY column
HAVING group_filter;
Code language: SQL (Structured Query Language) (sql)
IF object_id('tempdb..#table') IS NOT NULL
BEGIN
   DROP TABLE #table
END

Create Table #table(
	[State] varchar(500),
	[City] varchar(500)
);


Insert into #table ([State], [City]) values ('OH', 'Toledo,Columbus');
Insert into #table ([State], [City]) values ('TN', 'Nashville,Memphis');



SELECT A.[State]
	,Split.a.value('.', 'VARCHAR(100)') AS String
FROM (
	SELECT [State]
		,CAST('<M>' + REPLACE([City], ',', '</M><M>') + '</M>' AS XML) AS String
	FROM #table
	) AS A
CROSS APPLY String.nodes('/M') AS Split(a);
where o.createdate >= dateadd(year, -1, current_date)
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*) DESC;
SELECT age, COUNT(*) FROM campers GROUP BY age ORDER BY COUNT(*);
SELECT COUNT(*) FROM campers WHERE counselor="Ashley";
SELECT COUNT(column_name) FROM table_name;
SELECT 
    cu.id as user_id,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,longitude) longitude,
    if(date_diff(current_date,date(last_location_at),DAY) > 190,null,latitude) latitude,
    last_location_at
FROM `reby-cloud.alvaro_misc.pg_user_last_location` ull
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on cu.company_id = ull.company_id and cu.user_id = ull.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
;
select
    uc.from_co_cu_id as user_id,
    uc.consent_timestamp,
    uc.consent_from
from `reby-cloud.misc_eu.user_consent` uc
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = uc.from_co_cu_id
select * from (
select
    r.id,
    cu.id as user_id,
    r.created_at as tiempo_utc,
    r.minutes,
    r.distance/1000 as distancia_km,
    r.cost[offset(0)].amount as coste,
    r.path[safe_offset(0)].latitude as latitude_initial,
    r.path[safe_offset(0)].longitude as longitude_initial,
    r.path[safe_offset(array_length(path)-1)].latitude as latitude_final,
    r.path[safe_offset(array_length(path)-1)].longitude as longitude_final,
    r.starting_battery_level,
    r.ending_battery_level,
    co.name as vehicle_company,
    co2.name as app_company
from `reby-cloud.analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co on r.vehicle_company_id = co.id
left join `reby-cloud.analytics_reby_v1_eu.pg_company` co2 on r.company_id = co2.id
left join `reby-cloud.reby_marketing_eu.pg_company_user` cu on r.company_id = cu.company_id and r.user_id = cu.user_id
join `reby-cloud.temp_eu.200_users_aepd_avant` aepd on aepd.from_co_cu_id = cu.id
--order by r.created_at asc
) where vehicle_company = 'Avant'
Select Count(*)
From   mySchema.myTable
Where  Cast(Col1 As NVarChar(128)) +
       Cast(Col2 As NVarChar(128)) +
       Cast(Coln As NVarChar(128)) Like '%?%'
INSERT INTO TableName (
  Field1,
  Field2,
  Field3
) VALUES (
  'Value1',
  22,
  GETDATE()
)
SELECT Suref, FORMAT(DateFrom, 'd', 'en-gb') AS 'Great Britain English format'
FROM [dbo].[BOCClientIndex - AdultServiceDetails]
WHERE [DateFrom] >= '01/01/2020'
Order by DateFrom
INNER JOIN:
is used when retrieving data from multiple
tables and will return only matching data.

LEFT OUTER JOIN:
is used when retrieving data from
multiple tables and will return
left table and any matching right table records.

RIGHT OUTER JOIN:
is used when retrieving data from
multiple tables and will return right
table and any matching left table records

FULL OUTER JOIN:
is used when retrieving data from
multiple tables and will return both
table records, matching and non-matching.



INNER JOIN :
SELECT select_list From TableA A
Inner Join TableB B
On A.Key = B.Key


LEFT OUTER JOIN :
SELECT select_list From TableA A
Left Join TableB B
On A.Key = B.Key

(where b.key is null)//For delete matching data



RIGTH OUTER JOIN :
SELECT select_list From TableA A
Right Join TableB B
On A.Key = B.Key


FULL JOIN :
SELECT select_list From TableA A
FULL OUTER Join TableB B
On A.Key = B.Key

-- how many stations within 1/10 mile range of each zip code?
SELECT * 
FROM
 (SELECT
        vehicle,
        date1,
        code,
        metric,
        safe_cast(lon as FLOAT64) as lon,
        safe_cast(lat as FLOAT64) as lat,
        ST_GeogPoint(safe_cast(lon as FLOAT64), safe_cast(lat as FLOAT64)) AS point,
    FROM 
        `nytint-prd.gfx_nyc_fleet_logs.test_log_subset_3mo_geo`
    WHERE ABS(safe_cast(lat as FLOAT64)) < 90
    AND ABS(safe_cast(lon as FLOAT64)) < 180) as logs,
    `nytint-prd.gfx_nyc_fleet_logs.school_locations` as schools
WHERE ST_DWithin(
        ST_GeogPoint(safe_cast(schools.LONGITUDE as FLOAT64), safe_cast(schools.LATITUDE as FLOAT64)),
        logs.point,
        1609.34/10)
limit 100
-- how many stations within 1 mile range of each zip code?
SELECT
    zip_code AS zip,
    ANY_VALUE(zip_code_geom) AS polygon,
    COUNT(*) AS bike_stations
FROM
    `bigquery-public-data.new_york.citibike_stations` AS bike_stations,
    `bigquery-public-data.geo_us_boundaries.zip_codes` AS zip_codes
WHERE ST_DWithin(
         zip_codes.zip_code_geom,
         ST_GeogPoint(bike_stations.longitude, bike_stations.latitude),
         1609.34)
GROUP BY zip
ORDER BY bike_stations DESC
SELECT
  *,
  ST_GeogPoint(pLongitude, pLatitude) AS p
FROM
  mytable
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bese - BuildExectionSummaryEnd</Title>
      <Description>execution plan end</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
endofscript:

PRINT '--------------------------------------------------';
PRINT ' ';
PRINT 'elapsed time '+CONVERT(VARCHAR, GETDATE() - @started, 108);
PRINT ' ';
PRINT 'completed '+CONVERT(VARCHAR, GETDATE(), 108);
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>best - BuildExectionSummaryTitle</Title>
      <Description>add title to execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '     '
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
PRINT '----------------------------------------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummaryStep</Title>
      <Description>executionplan step</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT ' ';
PRINT '--------------------------------------------------';
PRINT ' ';
SET @stepdescription = '';
SET @executionsummary = @executionsummary + CHAR(13) + ' - ' + @stepdescription;
PRINT CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription;
PRINT ' ';
IF @step = @step
BEGIN
	--------------------------------------------------------
	--START

	SELECT 1;

	--END
	--------------------------------------------------------
END
PRINT ' ';
PRINT 'END ' + CONVERT(VARCHAR(50), @step) + ') ' + @stepdescription + ' : ' + CONVERT(VARCHAR, GETDATE() - @started, 108);
SET @step += 1;
PRINT ' ';
PRINT '--------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>bess - BuildExecutionSummary</Title>
      <Description>execution plan start</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '--------------------------------------------';
PRINT '--Execution Summary-------------------------';
PRINT '--------------------------------------------';
PRINT '
-  
';
PRINT '--------------------------------------------';
PRINT '--END Execution Summary---------------------';
PRINT '--------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
PRINT '--------------------------------------------------';
DECLARE
       @started          DATETIME
     , @step             INT
     , @stepdescription  VARCHAR(500)
	 , @executionsummary VARCHAR(max);

SET @started = GETDATE();
SET @step = 1;
SET @stepdescription='';
SET @executionsummary='';
PRINT 'started '+CONVERT(VARCHAR, @started, 108);

PRINT ' ';
PRINT '--------------------------------------------------';


]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets xmlns="http://schemas.microsoft.com/VisualStudio/2005/CodeSnippet">
  <CodeSnippet Format="1.0.0">
    <Header>
      <Title>besg - BuildExecutionSummaryGenerate</Title>
      <Description>prints out the generated execution plan</Description>
      <Author>Rick M</Author>
      <Shortcut />
      <SnippetTypes>
        <SnippetType>Expansion</SnippetType>
      </SnippetTypes>
    </Header>
    <Snippet>
      <Code Language="SQL"><![CDATA[
PRINT '-------------------------------------------------------';
PRINT '--Generated Execution Summary--------------------------';
PRINT '-------------------------------------------------------';
PRINT @executionsummary;
PRINT '----------------------------------------------------------';
PRINT '--END Generated Execution Summary-------------------------';
PRINT '----------------------------------------------------------';
]]></Code>
    </Snippet>
  </CodeSnippet>
</CodeSnippets>
IF EXISTS (
    SELECT * FROM sysobjects WHERE id = object_id(N'Split_Int') 
    AND xtype IN (N'FN', N'IF', N'TF')
)
    DROP FUNCTION [dbo].[Split_Int]
GO

SET ANSI_NULLS ON
GO

SET QUOTED_IDENTIFIER ON
GO

CREATE FUNCTION [dbo].[Split_Int]
       (
         @InputList VARCHAR(8000) = '',
         @Delimiter NVARCHAR(1) = ','
       )
RETURNS @List TABLE
       (
         [Index] INT NOT NULL
                        IDENTITY(0, 1)
                        PRIMARY KEY,
         [Value] INT NULL
       )
AS BEGIN




	DECLARE @Item varchar(8000)
    WHILE CHARINDEX(@Delimiter, @InputList, 0) <> 0
          BEGIN
                SELECT  @Item = RTRIM(LTRIM(SUBSTRING(@InputList, 1,
                                                       CHARINDEX(@Delimiter, @InputList, 0)
                                                       - 1))) ,
                        @InputList = RTRIM(LTRIM(SUBSTRING(@InputList,
                                                            CHARINDEX(@Delimiter, @InputList, 0)
                                                            + LEN(@Delimiter),
                                                            LEN(@InputList))));

							
 
                IF LEN(@Item) > 0 
                   INSERT   INTO @List
                            SELECT  @Item
          END

    IF LEN(@InputList) > 0 
       INSERT   INTO @List
                SELECT  @InputList -- Put the last item in
   

RETURN
END



GO



select * from dbo.Split_Int('1,2,3,4',',')


select * from dbo.Split_Int('	','	')
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%MyName%'
ORDER BY    TableName
            ,ColumnName;
DECLARE @ClarionDate INT = 47563
DECLARE @SqlDateTime DATETIME 

-- Convert the clarion DATE into and SQL DateTime
SET @SqlDateTime = DateAdd(day, @ClarionDate  - 4, '1801-01-01') 

SELECT @SqlDateTime AS 'SQL Date Time'

-- Now convert it back from and SQL DateTime to a Clarion Date
SET @ClarionDate = DateDiff(day, DateAdd(day, -4, '1801-01-01'), @SqlDateTime)
SELECT @ClarionDate AS 'Clarion Date'
DECLARE @Object AS INT;
DECLARE @ResponseText AS VARCHAR(8000);
DECLARE @Body AS VARCHAR(8000) = 
'{
    "what": 1,
    "ever": "you",
    "need": "to send as the body"
}'  

EXEC sp_OACreate 'MSXML2.XMLHTTP', @Object OUT;
EXEC sp_OAMethod @Object, 'open', NULL, 'post','http://requestb.in/1h83e3n1', 'false'

EXEC sp_OAMethod @Object, 'setRequestHeader', null, 'Content-Type', 'application/json'
EXEC sp_OAMethod @Object, 'send', null, @body

EXEC sp_OAMethod @Object, 'responseText', @ResponseText OUTPUT
SELECT @ResponseText

EXEC sp_OADestroy @Object
BEGIN TRY
    DROP TABLE [Dim].[Date]
END TRY

BEGIN CATCH
    /*No Action*/
END CATCH

CREATE TABLE [Dim].[Date]
(
    [DateKey] INT primary key, 
    [Date] DATETIME,
    [FullDate] CHAR(10),-- Date in MM-dd-yyyy format
    [DayOfMonth] VARCHAR(2), -- Field will hold day number of Month
    [DaySuffix] VARCHAR(4), -- Apply suffix as 1st, 2nd ,3rd etc
    [DayName] VARCHAR(9), -- Contains name of the day, Sunday, Monday 
    [DayOfWeek] CHAR(1),-- First Day Sunday=1 and Saturday=7
    [DayOfWeekInMonth] VARCHAR(2), --1st Monday or 2nd Monday in Month
    [DayOfWeekInYear] VARCHAR(2),
    [DayOfQuarter] VARCHAR(3), 
    [DayOfYear] VARCHAR(3),
    [WeekOfMonth] VARCHAR(1),-- Week Number of Month 
    [WeekOfQuarter] VARCHAR(2), --Week Number of the Quarter
    [WeekOfYear] VARCHAR(2),--Week Number of the Year
    [Month] VARCHAR(2), --Number of the Month 1 to 12
    [MonthName] VARCHAR(9),--January, February etc
    [MonthOfQuarter] VARCHAR(2),-- Month Number belongs to Quarter
    [Quarter] CHAR(1),
    [QuarterName] VARCHAR(9),--First,Second..
    [Year] CHAR(4),-- Year value of Date stored in Row
    [YearName] CHAR(7), --CY 2012,CY 2013
    [MonthYear] CHAR(10), --Jan-2013,Feb-2013
    [MMYYYY] CHAR(6),
    [FirstDayOfMonth] DATE,
    [LastDayOfMonth] DATE,
    [FirstDayOfQuarter] DATE,
    [LastDayOfQuarter] DATE,
    [FirstDayOfYear] DATE,
    [LastDayOfYear] DATE,
    [IsHoliday] BIT,-- Flag 1=National Holiday, 0-No National Holiday
    [IsWeekday] BIT,-- 0=Week End ,1=Week Day
    [HolidayName] VARCHAR(50),--Name of Holiday in US
)
GO

--=========================================================================================
--Specify Start Date and End date here
--Value of Start Date Must be Less than Your End Date 
--=========================================================================================

DECLARE @StartDate DATETIME = '12/29/2014' --Starting value of Date Range
DECLARE @EndDate DATETIME = '01/01/2100' --End Value of Date Range

--Temporary Variables To Hold the Values During Processing of Each Date of Year
DECLARE
    @DayOfWeekInMonth INT,
    @DayOfWeekInYear INT,
    @DayOfQuarter INT,
    @WeekOfMonth INT,
    @CurrentYear INT,
    @CurrentMonth INT,
    @CurrentQuarter INT

/*Table Data type to store the day of week count for the month and year*/
DECLARE @DayOfWeek TABLE
(
    DOW INT,
    MonthCount INT,
    QuarterCount INT,
    YearCount INT
)

INSERT INTO @DayOfWeek VALUES (1, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (2, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (3, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (4, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (5, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (6, 0, 0, 0)
INSERT INTO @DayOfWeek VALUES (7, 0, 0, 0)

--Extract and assign various parts of Values from Current Date to Variable

DECLARE @CurrentDate AS DATETIME = @StartDate
SET @CurrentMonth = DATEPART(MM, @CurrentDate)
SET @CurrentYear = DATEPART(YY, @CurrentDate)
SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)

/********************************************************************************************/
--Proceed only if Start Date(Current date) is less than End date you specified above

WHILE @CurrentDate < @EndDate
/*Begin day of week logic*/
BEGIN
    /*Check for Change in Month of the Current date if Month changed then 
    Change variable value*/
    IF @CurrentMonth != DATEPART(MM, @CurrentDate) 
    BEGIN
        UPDATE @DayOfWeek
        SET [MonthCount] = 0
        SET @CurrentMonth = DATEPART(MM, @CurrentDate)
    END

    /* Check for Change in Quarter of the Current date if Quarter changed then change 
        Variable value*/
    IF @CurrentQuarter != DATEPART(QQ, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET [QuarterCount] = 0
        SET @CurrentQuarter = DATEPART(QQ, @CurrentDate)
    END

    /* Check for Change in Year of the Current date if Year changed then change 
        Variable value*/
    IF @CurrentYear != DATEPART(YY, @CurrentDate)
    BEGIN
        UPDATE @DayOfWeek
        SET YearCount = 0
        SET @CurrentYear = DATEPART(YY, @CurrentDate)
    END

    -- Set values in table data type created above from variables
    UPDATE @DayOfWeek
    SET 
        MonthCount = MonthCount + 1,
        QuarterCount = QuarterCount + 1,
        YearCount = YearCount + 1
    WHERE DOW = DATEPART(DW, @CurrentDate)

    SELECT
        @DayOfWeekInMonth = MonthCount,
        @DayOfQuarter = QuarterCount,
        @DayOfWeekInYear = YearCount
    FROM @DayOfWeek
    WHERE DOW = DATEPART(DW, @CurrentDate)
    
/*End day of week logic*/


/* Populate Your Dimension Table with values*/
    
    INSERT INTO [Dim].[Date]
    SELECT
        
        CONVERT (char(8),@CurrentDate,112) as 'DateKey',
        @CurrentDate AS 'Date',
        CONVERT (char(10),@CurrentDate,101) as 'FullDate',
        DATEPART(DD, @CurrentDate) AS 'DayOfMonth',
        --Apply Suffix values like 1st, 2nd 3rd etc..
        CASE 
            WHEN DATEPART(DD,@CurrentDate) IN (11,12,13) THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 1 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'st'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 2 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'nd'
            WHEN RIGHT(DATEPART(DD,@CurrentDate),1) = 3 THEN CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'rd'
            ELSE CAST(DATEPART(DD,@CurrentDate) AS VARCHAR) + 'th' 
        END AS 'DaySuffix',
        
        DATENAME(DW, @CurrentDate) AS 'DayName',
        DATEPART(DW, @CurrentDate) AS 'DayOfWeek',
        @DayOfWeekInMonth AS 'DayOfWeekInMonth',
        @DayOfWeekInYear AS 'DayOfWeekInYear',
        @DayOfQuarter AS 'DayOfQuarter',
        DATEPART(DY, @CurrentDate) AS 'DayOfYear',
        DATEPART(WW, @CurrentDate) + 1 - DATEPART(WW, CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)) + '/1/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'WeekOfMonth',
        (DATEDIFF(DD, DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0), @CurrentDate) / 7) + 1 AS 'WeekOfQuarter',
        DATEPART(WW, @CurrentDate) AS 'WeekOfYear',
        DATEPART(MM, @CurrentDate) AS 'Month',
        DATENAME(MM, @CurrentDate) AS 'MonthName',
        CASE
            WHEN DATEPART(MM, @CurrentDate) IN (1, 4, 7, 10) THEN 1
            WHEN DATEPART(MM, @CurrentDate) IN (2, 5, 8, 11) THEN 2
            WHEN DATEPART(MM, @CurrentDate) IN (3, 6, 9, 12) THEN 3
        END AS 'MonthOfQuarter',
        DATEPART(QQ, @CurrentDate) AS 'Quarter',
        CASE DATEPART(QQ, @CurrentDate)
            WHEN 1 THEN 'First'
            WHEN 2 THEN 'Second'
            WHEN 3 THEN 'Third'
            WHEN 4 THEN 'Fourth'
        END AS 'QuarterName',
        DATEPART(YEAR, @CurrentDate) AS 'Year',
        'CY ' + CONVERT(VARCHAR, DATEPART(YEAR, @CurrentDate)) AS 'YearName',
        LEFT(DATENAME(MM, @CurrentDate), 3) + '-' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MonthYear',
        RIGHT('0' + CONVERT(VARCHAR, DATEPART(MM, @CurrentDate)),2) + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate)) AS 'MMYYYY',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, @CurrentDate) - 1), @CurrentDate))) AS 'FirstDayOfMonth',
        CONVERT(DATETIME, CONVERT(DATE, DATEADD(DD, - (DATEPART(DD, (DATEADD(MM, 1, @CurrentDate)))), DATEADD(MM, 1, @CurrentDate)))) AS 'LastDayOfMonth',
        DATEADD(QQ, DATEDIFF(QQ, 0, @CurrentDate), 0) AS 'FirstDayOfQuarter',
        DATEADD(QQ, DATEDIFF(QQ, -1, @CurrentDate), -1) AS 'LastDayOfQuarter',
        CONVERT(DATETIME, '01/01/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'FirstDayOfYear',
        CONVERT(DATETIME, '12/31/' + CONVERT(VARCHAR, DATEPART(YY, @CurrentDate))) AS 'LastDayOfYear',
        NULL AS 'IsHoliday',
        CASE DATEPART(DW, @CurrentDate)
            WHEN 1 THEN 0
            WHEN 2 THEN 1
            WHEN 3 THEN 1
            WHEN 4 THEN 1
            WHEN 5 THEN 1
            WHEN 6 THEN 1
            WHEN 7 THEN 0
        END AS 'IsWeekday',
        NULL AS 'HolidayName'

    SET @CurrentDate = DATEADD(DD, 1, @CurrentDate)
END
        
--============================================================================
-- Step 3.
-- Update Values of Holiday as per USA Govt. Declaration for National Holiday.
--============================================================================

/*Update HOLIDAY Field of USA In dimension*/
    /* New Years Day - January 1 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'New Year''s Day'
    WHERE [Month] = 1 AND [DayOfMonth] = 1

    /* Martin Luther King, Jr. Day - Third Monday in January starting in 1983 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Martin Luther King, Jr. Day'
    WHERE
        [Month] = 1 AND
        [DayOfWeek] = 'Monday' AND
        [Year] >= 1983 AND
        DayOfWeekInMonth = 3

    /* Valentine's Day - February 14 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Valentine''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfMonth] = 14

    /* President's Day - Third Monday in February */
    UPDATE [Dim].[Date]
        SET HolidayName = 'President''s Day'
    WHERE
        [Month] = 2 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 3

    /* Saint Patrick's Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Saint Patrick''s Day'
    WHERE
        [Month] = 3 AND
        [DayOfMonth] = 17

    /* Memorial Day - Last Monday in May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Memorial Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MAX(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'May' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Mother's Day - Second Sunday of May */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Mother''s Day'
    WHERE
        [Month] = 5 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 2

    /* Father's Day - Third Sunday of June */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Father''s Day'
    WHERE
        [Month] = 6 AND
        [DayOfWeek] = 'Sunday' AND
        [DayOfWeekInMonth] = 3

    /* Independence Day */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Independance Day'
    WHERE [Month] = 7 AND [DayOfMonth] = 4

    /* Labor Day - First Monday in September */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Labor Day'
    FROM [Dim].[Date]
    WHERE DateKey IN 
    (
        SELECT
            MIN(DateKey)
        FROM [Dim].[Date]
        WHERE
            [MonthName] = 'September' AND
            [DayOfWeek] = 'Monday'
        GROUP BY
            [Year],
            [Month]
    )

    /* Columbus Day - Second MONDAY in October */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Columbus Day'
    WHERE
        [Month] = 10 AND
        [DayOfWeek] = 'Monday' AND
        [DayOfWeekInMonth] = 2

    /* Halloween - 10/31 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Halloween'
    WHERE
        [Month] = 10 AND
        [DayOfMonth] = 31

    /* Veterans Day - November 11 */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Veterans Day'
    WHERE
        [Month] = 11 AND
        [DayOfMonth] = 11
    
    /* Thanksgiving - Fourth THURSDAY in November */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Thanksgiving Day'
    WHERE
        [Month] = 11 AND
        [DayOfWeek] = 'Thursday' AND
        [DayOfWeekInMonth] = 4

    /* Christmas */
    UPDATE [Dim].[Date]
        SET HolidayName = 'Christmas Day'
    WHERE [Month] = 12 AND
          [DayOfMonth]  = 25
    
    /* Election Day - The first Tuesday after the first Monday in November */
    BEGIN
    DECLARE @Holidays TABLE
    (
        [ID] INT IDENTITY(1,1),
        [DateID] INT,
        [Week] TINYINT,
        [Year] CHAR(4),
        [Day] CHAR(2)
    )

        INSERT INTO @Holidays([DateID], [Year], [Day])
            SELECT
                [DateKey],
                [Year],
                [DayOfMonth] 
            FROM [Dim].[Date]
            WHERE
                [Month] = 11 AND 
                [DayOfWeek] = 'Monday'
            ORDER BY
                [Year],
                [DayOfMonth]

        DECLARE @CNTR INT,
                @POS INT,
                @STARTYEAR INT,
                @ENDYEAR INT,
                @MINDAY INT

        SELECT @CURRENTYEAR = MIN([Year])
             , @STARTYEAR = MIN([Year])
             , @ENDYEAR = MAX([Year])
        FROM @Holidays

        WHILE @CURRENTYEAR <= @ENDYEAR
        BEGIN
            SELECT @CNTR = COUNT([Year])
            FROM @Holidays
            WHERE [Year] = @CURRENTYEAR

            SET @POS = 1

            WHILE @POS <= @CNTR
            BEGIN
                SELECT @MINDAY = MIN(DAY)
                FROM @Holidays
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Week] IS NULL

                UPDATE @Holidays
                    SET [Week] = @POS
                WHERE
                    [Year] = @CURRENTYEAR AND
                    [Day] = @MINDAY

                SELECT @POS = @POS + 1
            END

            SELECT @CURRENTYEAR = @CURRENTYEAR + 1
        END

        UPDATE [Dim].[Date]
            SET HolidayName  = 'Election Day'
        FROM [Dim].[Date] DT
            JOIN @Holidays HL ON (HL.DateID + 1) = DT.DateKey
        WHERE
            [Week] = 1
    END
    --set flag for USA holidays in Dimension
    UPDATE [Dim].[Date]
        SET IsHoliday = CASE WHEN HolidayName IS NULL THEN 0
                                WHEN HolidayName IS NOT NULL THEN 1 END

/*****************************************************************************************/

/* Add Fiscal Calendar columns into table DimDate */

ALTER TABLE [Dim].[Date] ADD
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] CHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] CHAR(4),
    [FiscalYearName] CHAR(7),
    [FiscalMonthYear] CHAR(10),
    [FiscalMMYYYY] CHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE

GO

/***************************************************************************
The following section needs to be populated for defining the fiscal calendar
***************************************************************************/

DECLARE
    @dtFiscalYearStart SMALLDATETIME = 'December 29, 2014',
    @FiscalYear INT = 2015,
    @LastYear INT = 2100,
    @FirstLeapYearInPeriod INT = 2012

/*****************************************************************************************/

DECLARE
    @iTemp INT,
    @LeapWeek INT,
    @CurrentDate DATETIME,
    @FiscalDayOfYear INT,
    @FiscalWeekOfYear INT,
    @FiscalMonth INT,
    @FiscalQuarter INT,
    @FiscalQuarterName VARCHAR(10),
    @FiscalYearName VARCHAR(7),
    @LeapYear INT,
    @FiscalFirstDayOfYear DATE,
    @FiscalFirstDayOfQuarter DATE,
    @FiscalFirstDayOfMonth DATE,
    @FiscalLastDayOfYear DATE,
    @FiscalLastDayOfQuarter DATE,
    @FiscalLastDayOfMonth DATE

/*Holds the years that have 455 in last quarter*/

DECLARE @LeapTable TABLE (leapyear INT)

/*TABLE to contain the fiscal year calendar*/

DECLARE @tb TABLE
(
    [PeriodDate] DATETIME,
    [FiscalDayOfYear] VARCHAR(3),
    [FiscalWeekOfYear] VARCHAR(3),
    [FiscalMonth] VARCHAR(2), 
    [FiscalQuarter] VARCHAR(1),
    [FiscalQuarterName] VARCHAR(9),
    [FiscalYear] VARCHAR(4),
    [FiscalYearName] VARCHAR(7),
    [FiscalMonthYear] VARCHAR(10),
    [FiscalMMYYYY] VARCHAR(6),
    [FiscalFirstDayOfMonth] DATE,
    [FiscalLastDayOfMonth] DATE,
    [FiscalFirstDayOfQuarter] DATE,
    [FiscalLastDayOfQuarter] DATE,
    [FiscalFirstDayOfYear] DATE,
    [FiscalLastDayOfYear] DATE
)

/*Populate the table with all leap years*/

SET @LeapYear = @FirstLeapYearInPeriod
WHILE (@LeapYear < @LastYear)
    BEGIN
        INSERT INTO @leapTable VALUES (@LeapYear)
        SET @LeapYear = @LeapYear + 6
    END

/*Initiate parameters before loop*/

SET @CurrentDate = @dtFiscalYearStart
SET @FiscalDayOfYear = 1
SET @FiscalWeekOfYear = 1
SET @FiscalMonth = 1
SET @FiscalQuarter = 1
SET @FiscalWeekOfYear = 1

IF (EXISTS (SELECT * FROM @LeapTable WHERE @FiscalYear = leapyear))
    BEGIN
        SET @LeapWeek = 1
    END
    ELSE
    BEGIN
        SET @LeapWeek = 0
    END

/*******************************************************************************************/

/* Loop on days in interval*/

WHILE (DATEPART(yy,@CurrentDate) <= @LastYear)
BEGIN
    
/*SET fiscal Month*/
    SELECT @FiscalMonth = CASE
        /* 
        /*Use this section for a 4-5-4 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 9 THEN 2 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 10 AND 13 THEN 3 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 22 THEN 5 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 23 AND 26 THEN 6 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 35 THEN 8 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 36 AND 39 THEN 9 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (48+@LeapWeek) THEN 11 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN (49+@LeapWeek) AND (52+@LeapWeek) THEN 12 /*4 weeks (5 weeks on leap year)*/
        */

        /*Use this section for a 4-4-5 calendar.  
        Every leap year the result will be a 4-5-5*/
        WHEN @FiscalWeekOfYear BETWEEN 1 AND 4 THEN 1 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 5 AND 8 THEN 2 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 9 AND 13 THEN 3 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 14 AND 17 THEN 4 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 18 AND 21 THEN 5 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 22 AND 26 THEN 6 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 27 AND 30 THEN 7 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 31 AND 34 THEN 8 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 35 AND 39 THEN 9 /*5 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 40 AND 43 THEN 10 /*4 weeks*/
        WHEN @FiscalWeekOfYear BETWEEN 44 AND (47+@leapWeek) THEN 11 /*4 weeks (5 weeks on leap year)*/
        WHEN @FiscalWeekOfYear BETWEEN (48 + @leapWeek) AND (52 + @leapWeek) THEN 12 /*5 weeks*/
        
    END

    /*SET Fiscal Quarter*/
    SELECT @FiscalQuarter = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 1
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 2
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 3
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 4
    END
    
    SELECT @FiscalQuarterName = CASE 
        WHEN @FiscalMonth BETWEEN 1 AND 3 THEN 'First'
        WHEN @FiscalMonth BETWEEN 4 AND 6 THEN 'Second'
        WHEN @FiscalMonth BETWEEN 7 AND 9 THEN 'Third'
        WHEN @FiscalMonth BETWEEN 10 AND 12 THEN 'Fourth'
    END
    
    /*Set Fiscal Year Name*/
    SELECT @FiscalYearName = 'FY ' + CONVERT(VARCHAR, @FiscalYear)

    INSERT INTO @tb
    (
        PeriodDate,
        FiscalDayOfYear,
        FiscalWeekOfYear,
        FiscalMonth,
        FiscalQuarter,
        FiscalQuarterName,
        FiscalYear,
        FiscalYearName
    ) VALUES (
        @CurrentDate,
        @FiscalDayOfYear,
        @FiscalWeekOfYear,
        @FiscalMonth,
        @FiscalQuarter,
        @FiscalQuarterName,
        @FiscalYear,
        @FiscalYearName
    )

    /*SET next day*/
    SET @CurrentDate = DATEADD(dd, 1, @CurrentDate)
    SET @FiscalDayOfYear = @FiscalDayOfYear + 1
    SET @FiscalWeekOfYear = ((@FiscalDayOfYear-1) / 7) + 1


    IF (@FiscalWeekOfYear > (52+@LeapWeek))
    BEGIN
        /*Reset a new year*/
        SET @FiscalDayOfYear = 1
        SET @FiscalWeekOfYear = 1
        SET @FiscalYear = @FiscalYear + 1
        IF (EXISTS (SELECT * FROM @leapTable WHERE @FiscalYear = leapyear))
        BEGIN
            SET @LeapWeek = 1
        END
        ELSE
        BEGIN
            SET @LeapWeek = 0
        END
    END
END

/********************************************************************************************/

/*Set first and last days of the fiscal months*/
UPDATE @tb
SET
    FiscalFirstDayOfMonth = minmax.StartDate,
    FiscalLastDayOfMonth = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalMonth,
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) AS StartDate, 
            MAX(PeriodDate) AS EndDate
        FROM @tb
        GROUP BY
            FiscalMonth,
            FiscalQuarter,
            FiscalYear
    ) minmax

WHERE
    t.FiscalMonth = minmax.FiscalMonth AND
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal quarters*/

UPDATE @tb
SET FiscalFirstDayOfQuarter = minmax.StartDate,
    FiscalLastDayOfQuarter = minmax.EndDate
FROM
    @tb t,
    (
        SELECT
            FiscalQuarter,
            FiscalYear,
            MIN(PeriodDate) as StartDate,
            MAX(PeriodDate) as EndDate
        FROM
            @tb
        GROUP BY
            FiscalQuarter,
            FiscalYear
    ) minmax
WHERE
    t.FiscalQuarter = minmax.FiscalQuarter AND
    t.FiscalYear = minmax.FiscalYear 

/*Set first and last days of the fiscal years*/

UPDATE @tb
SET
    FiscalFirstDayOfYear = minmax.StartDate,
    FiscalLastDayOfYear = minmax.EndDate
FROM
@tb t,
(
    SELECT FiscalYear, min(PeriodDate) as StartDate, max(PeriodDate) as EndDate
    FROM @tb
    GROUP BY FiscalYear
) minmax
WHERE
    t.FiscalYear = minmax.FiscalYear 

/*Set FiscalYearMonth*/
UPDATE @tb
SET
    FiscalMonthYear = 
        CASE FiscalMonth
        WHEN 1 THEN 'Jan'
        WHEN 2 THEN 'Feb'
        WHEN 3 THEN 'Mar'
        WHEN 4 THEN 'Apr'
        WHEN 5 THEN 'May'
        WHEN 6 THEN 'Jun'
        WHEN 7 THEN 'Jul'
        WHEN 8 THEN 'Aug'
        WHEN 9 THEN 'Sep'
        WHEN 10 THEN 'Oct'
        WHEN 11 THEN 'Nov'
        WHEN 12 THEN 'Dec'
        END + '-' + CONVERT(VARCHAR, FiscalYear)

/*Set FiscalMMYYYY*/
UPDATE @tb
SET
    FiscalMMYYYY = RIGHT('0' + CONVERT(VARCHAR, FiscalMonth),2) + CONVERT(VARCHAR, FiscalYear)

/********************************************************************************************/

UPDATE [Dim].[Date]
    SET FiscalDayOfYear = a.FiscalDayOfYear
      , FiscalWeekOfYear = a.FiscalWeekOfYear
      , FiscalMonth = a.FiscalMonth
      , FiscalQuarter = a.FiscalQuarter
      , FiscalQuarterName = a.FiscalQuarterName
      , FiscalYear = a.FiscalYear
      , FiscalYearName = a.FiscalYearName
      , FiscalMonthYear = a.FiscalMonthYear
      , FiscalMMYYYY = a.FiscalMMYYYY
      , FiscalFirstDayOfMonth = a.FiscalFirstDayOfMonth
      , FiscalLastDayOfMonth = a.FiscalLastDayOfMonth
      , FiscalFirstDayOfQuarter = a.FiscalFirstDayOfQuarter
      , FiscalLastDayOfQuarter = a.FiscalLastDayOfQuarter
      , FiscalFirstDayOfYear = a.FiscalFirstDayOfYear
      , FiscalLastDayOfYear = a.FiscalLastDayOfYear
FROM @tb a
    INNER JOIN [Dim].[Date] b ON a.PeriodDate = b.[Date]

/********************************************************************************************/

SELECT * FROM [Dim].[Date]
-- rebuild all indexes online
ALTER INDEX ALL ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- rebuild single index online
ALTER INDEX IX_IndexName ON Table1
REBUILD WITH (ONLINE = ON);   
GO  
-- all fragmented indexes on current db, % fragmentation > 30
SELECT a.index_id, OBJECT_NAME(a.object_id), name, avg_fragmentation_in_percent  
FROM sys.dm_db_index_physical_stats (DB_ID(DB_NAME()), 
      NULL, NULL, NULL, NULL) AS a  
     JOIN sys.indexes AS b 
     ON a.object_id = b.object_id AND a.index_id = b.index_id
where avg_fragmentation_in_percent > 30
order by avg_fragmentation_in_percent desc
GO 
-- to understand who is doing what, alternative view/representation
SELECT
	CAST((SELECT qt.text FROM sys.dm_exec_sql_text(qs.sql_handle) AS qt FOR XML PATH('')) as xml) as query_text,
	qs.blocking_session_id,
	qs.start_time, 
	datediff(ss, qs.start_time, getdate()) as ExecutionTime_Seconds,
	getdate() as  CurrentDate,
	datediff(MINUTE, qs.start_time, getdate()) as ExecutionTime_Minutes,
	qs.session_id,
	qs.command,
	qs.status,
	qs.cpu_time, 
	qs.reads, 
	qs.writes, 
	qs.plan_handle,
	qp.query_plan,
	s.host_name, s.login_name, s.program_name,
	qs.wait_type, qs.open_transaction_count, qs.open_resultset_count, qs.row_count, qs.granted_query_memory, qs.transaction_isolation_level
	--,qs.*
FROM sys.dm_exec_requests AS qs
left join sys.dm_exec_sessions s on s.session_id = qs.session_id ---OUTER APPLY sys.dm_exec_sql_text(qs.sql_handle) AS qt
OUTER APPLY sys.dm_exec_query_plan(qs.plan_handle) AS qp
WHERE 	qs.session_id <> @@SPID
	and qs.command not in ('RESOURCE MONITOR', 'XE TIMER', 'XE DISPATCHER', 'LOG WRITER', 'LOCK MONITOR', 'TASK MANAGER', 'TASK MANAGER', 'CHECKPOINT', 'BRKR TASK', 'LAZY WRITER', 'SIGNAL HANDLER', 'TRACE QUEUE TASK', 'BRKR EVENT HNDLR', 'GHOST CLEANUP', 'RECOVERY WRITER', 'SYSTEM_HEALTH_MONITOR', 'RECEIVE', 'UNKNOWN TOKEN', 'FT FULL PASS', 'FT CRAWL MON')
	and isnull(s.program_name, '') <> 'SQL diagnostic manager Collection Service'
ORDER BY ExecutionTime_Minutes DESC;
/* CHECK SIZE OF DB OBJECTS */
SELECT TOP(10)
      o.[object_id]
    , obj = SCHEMA_NAME(o.[schema_id]) + '.' + o.name
    , o.[type]
    , i.total_rows
    , i.total_size
FROM sys.objects o
JOIN (
    SELECT
          i.[object_id]
        , total_size = CAST(SUM(a.total_pages) * 8. / 1024 AS DECIMAL(18,2))
        , total_rows = SUM(CASE WHEN i.index_id IN (0, 1) AND a.[type] = 1 THEN p.[rows] END)
    FROM sys.indexes i
    JOIN sys.partitions p ON i.[object_id] = p.[object_id] AND i.index_id = p.index_id
    JOIN sys.allocation_units a ON p.[partition_id] = a.container_id
    WHERE i.is_disabled = 0
        AND i.is_hypothetical = 0
    GROUP BY i.[object_id]
) i ON o.[object_id] = i.[object_id]
WHERE o.[type] IN ('V', 'U', 'S')
ORDER BY i.total_size DESC;
/* Monitor query plans */

SELECT
    highest_cpu_queries.plan_handle,  
    highest_cpu_queries.total_worker_time, 
    q.dbid, 
    q.objectid, 
    q.number, 
    q.encrypted, 
    q.[text] 
FROM 
    (SELECT TOP 50  
        qs.plan_handle,  
        qs.total_worker_time 
     FROM 
        sys.dm_exec_query_stats qs 
     ORDER BY qs.total_worker_time desc) AS highest_cpu_queries 
     CROSS APPLY sys.dm_exec_sql_text(plan_handle) AS q 
ORDER BY highest_cpu_queries.total_worker_time desc;
/* Find top 10 queries */

SELECT TOP 10 query_stats.query_hash AS "Query Hash", 
    SUM(query_stats.total_worker_time) / SUM(query_stats.execution_count) AS "Avg CPU Time",
    MIN(query_stats.statement_text) AS "Statement Text"
FROM 
    (SELECT QS.*, 
    SUBSTRING(ST.text, (QS.statement_start_offset/2) + 1,
    ((CASE statement_end_offset 
        WHEN -1 THEN DATALENGTH(st.text)
        ELSE QS.statement_end_offset END 
            - QS.statement_start_offset)/2) + 1) AS statement_text
     FROM sys.dm_exec_query_stats AS QS
     CROSS APPLY sys.dm_exec_sql_text(QS.sql_handle) as ST) as query_stats
GROUP BY query_stats.query_hash
ORDER BY 2 DESC;
GO
/* DMV to find useful indexes: */

PRINT 'Missing Indexes: '
PRINT 'The "improvement_measure" column is an indicator of the (estimated) improvement that might '
PRINT 'be seen if the index was created. This is a unitless number, and has meaning only relative '
PRINT 'the same number for other indexes. The measure is a combination of the avg_total_user_cost, '
PRINT 'avg_user_impact, user_seeks, and user_scans columns in sys.dm_db_missing_index_group_stats.'
PRINT ''
PRINT '-- Missing Indexes --'
SELECT CONVERT (varchar, getdate(), 126) AS runtime, 
  mig.index_group_handle, mid.index_handle, 
  CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) AS improvement_measure, 
  'CREATE INDEX missing_index_' + CONVERT (varchar, mig.index_group_handle) + '_' + CONVERT (varchar, mid.index_handle) 
  + ' ON ' + mid.statement 
  + ' (' + ISNULL (mid.equality_columns,'') 
    + CASE WHEN mid.equality_columns IS NOT NULL AND mid.inequality_columns IS NOT NULL THEN ',' ELSE '' END + ISNULL (mid.inequality_columns, '')
  + ')' 
  + ISNULL (' INCLUDE (' + mid.included_columns + ')', '') AS create_index_statement, 
  migs.*, mid.database_id, mid.[object_id]
FROM sys.dm_db_missing_index_groups mig
INNER JOIN sys.dm_db_missing_index_group_stats migs ON migs.group_handle = mig.index_group_handle
INNER JOIN sys.dm_db_missing_index_details mid ON mig.index_handle = mid.index_handle
WHERE CONVERT (decimal (28,1), migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans)) > 10
ORDER BY migs.avg_total_user_cost * migs.avg_user_impact * (migs.user_seeks + migs.user_scans) DESC
PRINT ''
GO
/* FIND UNUSED INDEXES - MIGHT AFFECT LOG WRITES */

SELECT o.name Object_Name,
i.name Index_name, 
i.Type_Desc
FROM sys.objects AS o
JOIN sys.indexes AS i
ON o.object_id = i.object_id 
LEFT OUTER JOIN 
sys.dm_db_index_usage_stats AS s 
ON i.object_id = s.object_id 
AND i.index_id = s.index_id
WHERE o.type = 'u'
-- Clustered and Non-Clustered indexes
AND i.type IN (1, 2) 
-- Indexes without stats
AND (s.index_id IS NULL) OR
-- Indexes that have been updated but not used
(s.user_seeks = 0 AND s.user_scans = 0 AND s.user_lookups = 0 );
/* STORED PROCEDURES AND FUNCTIONS EXECUTION TIME, COUNT AND AVERAGE */

SELECT DB_NAME(st.dbid) DBName
      ,OBJECT_SCHEMA_NAME(st.objectid,dbid) SchemaName
      ,OBJECT_NAME(st.objectid,dbid) StoredProcedure
      ,max(cp.usecounts) Execution_count
      ,sum(qs.total_worker_time) total_cpu_time
      ,sum(qs.total_worker_time) / (max(cp.usecounts) * 1.0)  avg_cpu_time
 
FROM sys.dm_exec_cached_plans cp join sys.dm_exec_query_stats qs on cp.plan_handle = qs.plan_handle
     CROSS APPLY sys.dm_exec_sql_text(cp.plan_handle) st
where DB_NAME(st.dbid) is not null and cp.objtype = 'proc'
group by DB_NAME(st.dbid),OBJECT_SCHEMA_NAME(objectid,st.dbid), OBJECT_NAME(objectid,st.dbid) 
order by sum(qs.total_worker_time) desc;
SELECT  creation_time 
        ,last_execution_time
        ,total_physical_reads
        ,total_logical_reads 
        ,total_logical_writes
        , execution_count
        , total_worker_time
        , total_elapsed_time
        , (total_elapsed_time / execution_count) avg_elapsed_time
        ,SUBSTRING(st.text, (qs.statement_start_offset/2) + 1,
         ((CASE statement_end_offset
          WHEN -1 THEN DATALENGTH(st.text)
          ELSE qs.statement_end_offset END
            - qs.statement_start_offset)/2) + 1) AS statement_text
FROM sys.dm_exec_query_stats AS qs
CROSS APPLY sys.dm_exec_sql_text(qs.sql_handle) st
WHERE execution_count > 10 -- filter out rare requests
ORDER BY total_elapsed_time / execution_count DESC;
SELECT DB_NAME(dbid) as "Database", COUNT(dbid) as "Number Of Open Connections",
loginame as LoginName
FROM sys.sysprocesses
WHERE dbid > 0
GROUP BY dbid, loginame;
-- Rows with ID existing in both a, b and c
-- JOIN is equivalent to INNER JOIN
SELECT a.ID, a.NAME, b.VALUE1, c.VALUE1 FROM table1 a 
  JOIN table2 b ON a.ID = b.ID
  JOIN table3 c ON a.ID = c.ID
WHERE a.ID >= 1000;
-- ⇓ Test it ⇓ (Fiddle source link)
JOINING 2 Tables in sql

SELECT X.Column_Name , Y.Column_Name2
FROM TABLES1_NAME X 
INNER JOIN TABLES2_NAME Y ON X.Primary_key = Y.Foreign_key;


--FOR EXAMPLE
--GET THE FIRST_NAME AND JOB_TITLE
--USE EMPLOYEES AND JOBS TABLE
--THE RELATIONSHIP IS JOB_ID

SELECT E.FIRST_NAME , J.JOB_TITLE
FROM EMPLOYEES E
INNER JOIN JOBS J ON J.JOB_ID = E.JOB_ID;

/*
For this challenge you need to create a simple SELECT statement that will return all columns from the products table, and join to the companies table so that you can return the company name.

products table schema:
id
name
isbn
company_id
price

companies table schema:
id
name

You should return all product fields as well as the company name as "company_name".
*/

SELECT products.*, companies.name AS company_name
FROM products
JOIN companies ON company_id = companies.id
SELECT * FROM INFORMATION_SCHEMA.Tables WHERE Table_Name LIKE '%sequence%'
SELECT      c.name  AS 'ColumnName'
            ,t.name AS 'TableName'
FROM        sys.columns c
JOIN        sys.tables  t   ON c.object_id = t.object_id
WHERE       c.name LIKE '%formulatypeid%'
ORDER BY    TableName
            ,ColumnName;
create materialized view MV1    
build immediate    
refresh fast on commit    
as     
     
create materialized view log on <table>    
with sequence, rowId (...)    
including new values    
     
create table <table_name> (    
        DateMonth Date not null,    
        DateYear Date not null,    
        Name varchar(256) not null,    
        Total integer not null);    
     
insert into <table_name> () (select ...);    
     
create trigger trigger    
after insert or insert of on <table>    
for each row     
declare     
     
begin       
end    


create materialized view GROUPBYMonthYearMuseum
build immediate
refresh FAST ON COMMIT
--enable query rewrite
as
SELECT Month, Year, ticket_type , SUM(num_tickets) as NumTickets,
 SUM(revenue) as TotRevenue
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, ticket_type;

CREATE MATERIALIZED VIEW LOG ON museums_tickets
WITH SEQUENCE, ROWID (id_time, ticket_type, num_tickets, Revenue)
INCLUDING NEW VALUES;
CREATE MATERIALIZED VIEW LOG ON TIMEDIM
WITH SEQUENCE, ROWID (id_time, Month, Year)
INCLUDING NEW VALUES;
---- Trigger Manually
CREATE TABLE VM1 (
DateMonth DATE NOT NULL),
DateYear INTEGER NOT NULL,
Ticket_Type VARCHAR(20) NOT NULL,
TOT_NumberOfTickets INTEGER,
TOT_Revenue INTEGER);

INSERT INTO VM1 (DateMonth, DateYear, Ticket_Type, TOT_NumberOfTickets,
TOT_Revenue)
(SELECT Month, Year, Ticket_Type,
SUM(NumberOfTickets), SUM(Revenue)
FROM museums_tickets mt, timedim t
WHERE mt.id_time = t.id_time
GROUP BY Month, Year, Ticket_Type);
create TRIGGER TriggerForViewVM1
AFTER INSERT ON museums_tickets
FOR EACH ROW
DECLARE
N NUMBER;
VAR_DateMonth DATE;
VAR_DateYear NUMBER;
BEGIN
UPDATE MS 
    SET    MS.NAME = OC.NAME
    FROM   OPENQUERY(WZ12,
      'select EMP, NAME from Test') OC 
   INNER JOIN [dbo].[TEST1] MS 
     ON OC.EMP = MS.EMP
MERGE my-dataset.dimension_table as MAIN using
my-dataset.temporary_table as TEMP
on MAIN.PRD_SK = TEMP.PRD_SK
when matched then
UPDATE SET
MAIN.PRD_CATEGORY = TEMP.PRD_CATEGORY
when not matched then
INSERT VALUES(TEMP.PRD_SK, TEMP. PRD_ID, TEMP. PRD_SK, TEMP.
PRD_CATEGORY)
with account_session_ste as (
SELECT 
*
FROM `table`
where exists(select 1 from unnest(hits) h 
            where regexp_contains(h.page.hostname, r'signin.account.gov.uk'))
and _table_suffix = FORMAT_DATE('%Y%m%d', DATE_SUB(CURRENT_DATE(), INTERVAL 4 DAY))
)
-- group by 1,2)
select 
fullVisitorId AS user_id,
hits.page.hostname as hostname,   
hits.page.pagePath as pagePath,
hits.hitNumber as hit_number 
-- *
from account_session_ste, unnest(hits) as hits
DECLARE Counter INT64;

BEGIN TRANSACTION;
    SET Counter = 0;
    CREATE TEMP TABLE tmp
    (
    dte date,
    days INT64,
    users INT64
    );
    
    WHILE Counter < 28 DO
        INSERT INTO tmp

        with dte as (
            SELECT dte
            FROM unnest(generate_date_array(date('2021-02-01'), date('2021-12-30'))) dte
        ),
        ids as (
            SELECT clientId,
            PARSE_DATE("%Y%m%d", date) as dte
            FROM `govuk-bigquery-analytics.87773428.ga_sessions_2021*`
        WHERE NOT device.operatingSystem = "iOS"
        )

        SELECT dte.dte as dte, max(Counter) as days, count( distinct ids.clientId) AS users
        FROM dte, ids
        WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL Counter DAY) and dte.dte
        group by 1
        order by 1 asc;

        SET Counter = Counter + 1;

    END WHILE;
COMMIT TRANSACTION;
--select * from tmp


SELECT * FROM
  (SELECT * FROM tmp)
  PIVOT(SUM(users) FOR days IN (0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27))
  order by 1 asc
select "field1", "field2", count(*)
from "tableName"
group by "field1", "field2"
HAVING count(*) > 1
### MAKE DIR
mkdir split && cd split

### SPLIT FILE
split -l 1000 /<path>/output-google.sql /<path>/split/split-

### CHANGE EXTENTION
ls | xargs -I % mv % %.sql
CREATE DATABASE mydatabase CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE TABLE dbo.my_queue
(
    id UNIQUEIDENTIFIER NOT NULL PRIMARY KEY,
    created_at SMALLDATETIME NOT NULL,
    popped_at SMALLDATETIME NOT NULL,
    errors VARCHAR(MAX) NULL,
    payload NVARCHAR(MAX) NOT NULL
);
select * from (select * from table order by data_column asc limit count(*)/2) 
order by data_column desc limit 1;
Private Sub Select_Sector()

Dim rs As DAO.Recordset

Dim RegEx As Object
Set RegEx = CreateObject("vbscript.regexp")

Dim qdef As QueryDef
Set qdef = getCurrentDb.QueryDefs("qry_Select_Sector")
qdef.Connect = CurrentDb.TableDefs("BOCClientIndex").Connect


RegEx.Pattern = "IIf\(\[ServiceStatus\]=3,30,20\)\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "IIf([ServiceStatus]=3,30,20))=" & [Forms]![MainMenu_Services]![SelectedStatusIndicator])

RegEx.Pattern = "\(View_qryServiceProviderOrganisationalStructure\.SectorCode\)=([0-9]+)"
qdef.SQL = RegEx.Replace(qdef.SQL, "(View_qryServiceProviderOrganisationalStructure.SectorCode)=" & [Forms]![MainMenu_Services]![SectorCode])


'For Testing purposes only - Do not use in production code
Set rs = qdef.OpenRecordset

Dim i As Long
For i = 0 To rs.Fields.Count - 1
        Debug.Print rs.Fields(i).Name,
    Next
    rs.MoveFirst
    Do Until rs.EOF
        Debug.Print
        For i = 0 To rs.Fields.Count - 1
            Debug.Print rs.Fields(i).value,
        Next
        rs.MoveNext
    Loop
End Sub
  
create or replace function decode_url(url text)
  returns text as
$BODY$
DECLARE result text;
BEGIN
    if url isnull then
        return null;
    end if;

    BEGIN
        with str AS (
            select
                   case when url ~ '^%[0-9a-fA-F][0-9a-fA-F]'
                   then array['']
                   end
            || regexp_split_to_array(url, '(%[0-9a-fA-F][0-9a-fA-F])+', 'i') plain,

            array(select (regexp_matches(url, '((?:%[0-9a-fA-F][0-9a-fA-F])+)', 'gi'))[1]) encoded
            )

        select string_agg(plain[i] || coalesce(convert_from(decode(replace(encoded[i], '%',''), 'hex'), 'utf8'),''),'')
        from str, (select generate_series(1, array_upper(encoded, 1) + 2) i FROM str) serie
        into result;

    EXCEPTION WHEN OTHERS THEN
        raise notice 'failed: %', url;
        return url;
    END;

    return coalesce(result, url);

END;

$BODY$
  LANGUAGE plpgsql IMMUTABLE STRICT;
CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
OUT res boolean)
 RETURNS boolean

AS $func$
-- DECLARE res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_pk(tablename TEXT,
OUT res boolean) RETURNS boolean AS $func$ 
res boolean DEFAULT FALSE;

BEGIN
SELECT
    (count(constraint_name)>0)::boolean AS res
INTO
    res
FROM
    information_schema.table_constraints
WHERE
    table_name = tablename
    AND constraint_type = 'PRIMARY KEY';
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_column(tablename TEXT,
columnname TEXT,
OUT res boolean)
 RETURNS boolean
 
AS $func$ 

BEGIN
SELECT
    (count(column_name) > 0)::boolean AS res
INTO
    res
FROM
    information_schema.columns
WHERE
    table_name = tablename
    AND column_name = columnname;
END;

$func$ LANGUAGE plpgsql;

CREATE OR REPLACE
FUNCTION table_has_columns(tablename TEXT,
VARIADIC columnname TEXT) RETURNS boolean AS $func$
 DECLARE res boolean DEFAULT FALSE;

DECLARE
  x TEXT;

res boolean DEFAULT TRUE;

BEGIN
  FOREACH x IN ARRAY columnname LOOP
      EXIT
WHEN res = FALSE;

SELECT
    table_has_column(tablename,
    x) AS res
INTO
    res;
END;
END;

$func$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION
random_text(n_length INTEGER default 50)
RETURNS TEXT
LANGUAGE SQL
AS $$

select
string_agg(substr(characters, (random() * length(characters) + 0.5)::integer, 1), '') as random_word
from (values('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_-')) as symbols(characters)
-- length of word
join generate_series(1, n_length) on 1 = 1;

$$;
user = User.first

result = user.user_books.in_order_of(:status, %w[to_read currently_reading read])

=> #<ActiveRecord::Relation [#<UserBook id: 3, user_id: 1, status: "to_read">, #<UserBook id: 4, user_id: 1, status: "to_read">, #<UserBook id: 5, user_id: 1, status: "currently_reading">, #<UserBook id: 6, user_id: 1, status: "read">]>
ASCII -- Returns the equivalent ASCII value for a specific character.

CHAR_LENGTH -- Returns the character length of a string.

CHARACTER_LENGTH -- Same as CHAR_LENGTH.

CONCAT -- Adds expressions together, with a minimum of 2.

CONCAT_WS -- Adds expressions together, but with a separator between each value.

FIELD -- Returns an index value relative to the position of a value within a list of values.

FIND IN SET -- Returns the position of a string in a list of strings.

FORMAT -- When passed a number, returns that number formatted to include commas (eg 3,400,000).

INSERT -- Allows you to insert one string into another at a certain point, for a certain number of characters.

INSTR -- Returns the position of the first time one string appears within another.

LCASE -- Converts a string to lowercase.

LEFT -- Starting from the left, extracts the given number of characters from a string and returns them as another.

LENGTH -- Returns the length of a string, but in bytes.

LOCATE -- Returns the first occurrence of one string within another,

LOWER -- Same as LCASE.

LPAD -- Left pads one string with another, to a specific length.

LTRIM -- Removes any leading spaces from the given string.

MID -- Extracts one string from another, starting from any position.

POSITION -- Returns the position of the first time one substring appears within another.

REPEAT -- Allows you to repeat a string

REPLACE -- Allows you to replace any instances of a substring within a string, with a new substring.

REVERSE	-- Reverses the string.

RIGHT -- Starting from the right, extracts the given number of characters from a string and returns them as another.

RPAD -- Right pads one string with another, to a specific length.

RTRIM -- Removes any trailing spaces from the given string.

SPACE -- Returns a string full of spaces equal to the amount you pass it.

STRCMP -- Compares 2 strings for differences

SUBSTR -- Extracts one substring from another, starting from any position.

SUBSTRING -- Same as SUBSTR

SUBSTRING_INDEX	-- Returns a substring from a string before the passed substring is found the number of times equals to the passed number.

TRIM --	Removes trailing and leading spaces from the given string. Same as if you were to run LTRIM and RTRIM together.

UCASE -- Converts a string to uppercase.

UPPER -- Same as UCASE.
BIT(size) -- A bit-value type with a default of 1. The allowed number of bits in a value is set via the size parameter, which can hold values from 1 to 64.

TINYINT(size) -- A very small integer with a signed range of -128 to 127, and an unsigned range of 0 to 255. Here, the size parameter specifies the maximum allowed display width, which is 255.

BOOL -- Essentially a quick way of setting the column to TINYINT with a size of 1. 0 is considered false, whilst 1 is considered true.

BOOLEAN	-- Same as BOOL.

SMALLINT(size) -- A small integer with a signed range of -32768 to 32767, and an unsigned range from 0 to 65535. Here, the size parameter specifies the maximum allowed display width, which is 255.

MEDIUMINT(size) -- A medium integer with a signed range of -8388608 to 8388607, and an unsigned range from 0 to 16777215. Here, the size parameter specifies the maximum allowed display width, which is 255.

INT(size) -- A medium integer with a signed range of -2147483648 to 2147483647, and an unsigned range from 0 to 4294967295. Here, the size parameter specifies the maximum allowed display width, which is 255.

INTEGER(size) -- Same as INT.

BIGINT(size) -- A medium integer with a signed range of -9223372036854775808 to 9223372036854775807, and an unsigned range from 0 to 18446744073709551615. Here, the size parameter specifies the maximum allowed display width, which is 255.

FLOAT(p) -- A floating point number value. If the precision (p) parameter is between 0 to 24, then the data type is set to FLOAT(), whilst if it's from 25 to 53, the data type is set to DOUBLE(). This behaviour is to make the storage of values more efficient.

DOUBLE(size, d) -- A floating point number value where the total digits are set by the size parameter, and the number of digits after the decimal point is set by the d parameter.

DECIMAL(size, d) -- An exact fixed point number where the total number of digits is set by the size parameters, and the total number of digits after the decimal point is set by the d parameter.

DEC(size, d) -- Same as DECIMAL.
% -- Equates to zero or more characters.
-- Example: Find all customers with surnames ending in ‘ory’.
SELECT * FROM customers
WHERE surname LIKE '%ory';

_ -- Equates to any single character.
-- Example: Find all customers living in cities beginning with any 3 characters, followed by ‘vale’.
SELECT * FROM customers
WHERE city LIKE '_ _ _vale';

[charlist] -- Equates to any single character in the list.
-- Example: Find all customers with first names beginning with J, K or T.
SELECT * FROM customers
WHERE first_name LIKE '[jkt]%';
BIN -- Returns the given number in binary.

BINARY -- Returns the given value as a binary string.

CAST -- Converst one type into another.

COALESCE -- From a list of values, returns the first non-null value.

CONNECTION_ID -- For the current connection, returns the unique connection ID.

CONV -- Converts the given number from one numeric base system into another.

CONVERT -- Converts the given value into the given datatype or character set.

CURRENT_USER -- Returns the user and hostname which was used to authenticate with the server.

DATABASE -- Gets the name of the current database.

GROUP BY -- Used alongside aggregate functions (COUNT, MAX, MIN, SUM, AVG) to group the results.

HAVING -- Used in the place of WHERE with aggregate functions.

IF -- If the condition is true it returns a value, otherwise it returns another value.

IFNULL -- If the given expression equates to null, it returns the given value.

ISNULL -- If the expression is null, it returns 1, otherwise returns 0.

LAST_INSERT_ID -- For the last row which was added or updated in a table, returns the auto increment ID.

NULLIF -- Compares the 2 given expressions. If they are equal, NULL is returned, otherwise the first expression is returned.

SESSION_USER -- Returns the current user and hostnames.

SYSTEM_USER -- Same as SESSION_USER.

USER -- Same as SESSION_USER.

VERSION -- Returns the current version of the MySQL powering the database.
ADDDATE -- Adds a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/30).

ADDTIME -- Adds a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (07:00).

CURDATE -- Gets the current date.

CURRENT_DATE -- Same as CURDATE.

CURRENT_TIME -- Gest the current time.

CURRENT_TIMESTAMP -- Gets the current date and time.

CURTIME -- Same as CURRENT_TIME.

DATE -- Extracts the date from a datetime expression.

DATEDIFF -- Returns the number of days between the 2 given dates.

DATE_ADD -- Same as ADDDATE.

DATE_FORMAT -- Formats the date to the given pattern.

DATE_SUB -- Subtracts a date interval (eg: 10 DAY) to a date (eg: 20/01/20) and returns the result (eg: 20/01/10).

DAY -- Returns the day for the given date.

DAYNAME -- Returns the weekday name for the given date.

DAYOFWEEK -- Returns the index for the weekday for the given date.

DAYOFYEAR -- Returns the day of the year for the given date.

EXTRACT -- Extracts from the date the given part (eg MONTH for 20/01/20 = 01).

FROM DAYS -- Returns the date from the given numeric date value.

HOUR -- Returns the hour from the given date.

LAST DAY -- Gets the last day of the month for the given date.

LOCALTIME -- Gets the current local date and time.

LOCALTIMESTAMP -- Same as LOCALTIME.

MAKEDATE -- Creates a date and returns it, based on the given year and number of days values.

MAKETIME -- Creates a time and returns it, based on the given hour, minute and second values.

MICROSECOND -- Returns the microsecond of a given time or datetime.

MINUTE -- Returns the minute of the given time or datetime.

MONTH -- Returns the month of the given date.

MONTHNAME -- Returns the name of the month of the given date.

NOW -- Same as LOCALTIME.

PERIOD_ADD -- Adds the given number of months to the given period.

PERIOD_DIFF -- Returns the difference between 2 given periods.

QUARTER -- Returns the year quarter for the given date.

SECOND -- Returns the second of a given time or datetime.

SEC_TO_TIME -- Returns a time based on the given seconds.

STR_TO_DATE -- Creates a date and returns it based on the given string and format.

SUBDATE -- Same as DATE_SUB.

SUBTIME -- Subtracts a time interval (eg: 02:00) to a time or datetime (05:00) and returns the result (03:00).

SYSDATE -- Same as LOCALTIME.

TIME -- Returns the time from a given time or datetime.

TIME_FORMAT -- Returns the given time in the given format.

TIME_TO_SEC -- Converts and returns a time into seconds.

TIMEDIFF -- Returns the difference between 2 given time/datetime expressions.

TIMESTAMP -- Returns the datetime value of the given date or datetime.

TO_DAYS -- Returns the total number of days that have passed from ‘00-00-0000’ to the given date.

WEEK -- Returns the week number for the given date.

WEEKDAY -- Returns the weekday number for the given date.

WEEKOFYEAR -- Returns the week number for the given date.

YEAR -- Returns the year from the given date.

YEARWEEK -- Returns the year and week number for the given date.
ABS -- Returns the absolute value of the given number.

ACOS -- Returns the arc cosine of the given number.

ASIN -- Returns the arc sine of the given number.

ATAN -- Returns the arc tangent of one or 2 given numbers.

ATAN2 -- Returns the arc tangent of 2 given numbers.

AVG -- Returns the average value of the given expression.

CEIL -- Returns the closest whole number (integer) upwards from a given decimal point number.

CEILING -- Same as CEIL.

COS -- Returns the cosine of a given number.

COT -- Returns the cotangent of a given number.

COUNT -- Returns the amount of records that are returned by a SELECT query.

DEGREES -- Converts a radians value to degrees.

DIV -- Allows you to divide integers.

EXP -- Returns e to the power of the given number.

FLOOR -- Returns the closest whole number (integer) downwards from a given decimal point number.

GREATEST -- Returns the highest value in a list of arguments.

LEAST -- Returns the smallest value in a list of arguments.

LN -- Returns the natural logarithm of the given number.

LOG -- Returns the natural logarithm of the given number, or the logarithm of the given number to the given base.

LOG10 -- Does the same as LOG, but to base 10.

LOG2 -- Does the same as LOG, but to base 2.

MAX -- Returns the highest value from a set of values.

MIN -- Returns the lowest value from a set of values.

MOD -- Returns the remainder of the given number divided by the other given number.

PI -- Returns PI.

POW -- Returns the value of the given number raised to the power of the other given number.

POWER -- Same as POW.

RADIANS -- Converts a degrees value to radians.

RAND -- Returns a random number.

ROUND -- Rounds the given number to the given amount of decimal places.

SIGN -- Returns the sign of the given number.

SIN -- Returns the sine of the given number.

SQRT -- Returns the square root of the given number.

SUM -- Returns the value of the given set of values combined.

TAN -- Returns the tangent of the given number.

TRUNCATE -- Returns a number truncated to the given number of decimal places.
with agg_ as (
SELECT
    ec.ride_id, 
    ev.ride_id as segment_received_ride,
    ev.user_id as segment_received_user_id,
    ev.full_cost,
    ev.service_area_name,
    ev.minutes,
    ev.company_id,
    ec.user_id,
    array_agg(struct(ec.time_publish,ec.phase) ORDER BY time_publish asc) as status
FROM `reby-cloud.temp_eu.email_comms` ec
LEFT JOIN `reby-safir.data_python.end_of_ride_email_v4_view` ev
    on ec.ride_id = ev.ride_id
group by 1,2,3,4,5,6,7,8
)

select *
    , (
        select as struct 
            time_publish,
            phase
        from unnest(status) 
        where phase = 'sent-to-segment'
        order by time_publish asc
        limit 1
    ).*
from agg_
order by time_publish desc
left_df.merge(right_df, on='user_id', how='left', indicator=True)
create or replace table `temp_eu.delete_stripe_refunds` as (
select * from (
with pre_process as (
    SELECT
		*
		FROM
		`reby-cloud.reby_stripe.py_stripe_preprocessed`
		--where id in (select id from`reby-cloud.temp_eu.delete_stripe` where amount_refunded > 0 )
		--where customer = 'cus_GorMSGP2VREHQs'
		where rn = 1
		order by customer,id,rn asc
)

select
    case when refunds.charge is not null and charges.type ='refund' then refunds.balance_transaction else charges.balance_transaction
        end as balance_transaction,
    case when refunds.charge is not null and charges.type ='refund' then refunds.created_at else charges.created_at
        end as created_at,
    charges.charge_id,
    charges.type,
    charges.paid,
    charges.amount as charge_amount,
    case when refunds.charge is not null and charges.type ='refund' then refunds.amount else null
        end as refund_amount,
    charges.customer,
    charges.user_id
from (
    select
        spp.balance_transaction,
        s.*,
        u.id as user_id,
        sa.service_area
    from `reby-cloud.reby_stripe.py_stripe_processed` s
    left join `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
        on s.customer = u.payment_processor[safe_offset(0)].token
    left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` sa
        on u.id = sa.user_id
    left join pre_process spp
            on spp.id = s.charge_id
    --order by 1,2,3,4,5,6,7,8,9,10,11,12,13
    where date(s.created_at) between '2021-02-01' and '2021-09-03'
    --order by balance_transaction desc
    ) as charges
    left join `reby-cloud.reby_stripe.stripe_refunds_api` refunds on charges.charge_id = refunds.charge
    --where refunds.status = 'succeeded'
)
)
;
select
  st.*,
  str.created_at,
  str.balance_transaction,
  str.user_id,
  str.charge_amount 
 from `reby-cloud.temp_eu.delete_stripe_downloaded_transactions` st
 left join `reby-cloud.temp_eu.delete_stripe_refunds` str on st.balance_transaction_id = str.balance_transaction 
SELECT o.name, 
       ps.last_execution_time 
FROM   sys.dm_exec_procedure_stats ps 
INNER JOIN 
       sys.objects o 
       ON ps.object_id = o.object_id 
WHERE  DB_NAME(ps.database_id) = '' 
ORDER  BY 
       ps.last_execution_time DESC  
SELECT * FROM companies 
WHERE company_name SIMILAR TO '%(apple|google|microsoft)%';
SQL> with t1(col) as(
  2    select 1    from dual union all
  3    select 2    from dual union all
  4    select null from dual union all
  5    select 3    from dual
  6  )
  7  select *
  8    from t1
  9  order by col asc nulls last
  10  ;
select ord_num, agent_code, ord_date, ord_amount
from orders
where(agent_code, ord_amount) IN
(SELECT agent_code, MIN(ord_amount)
FROM orders 
GROUP BY agent_code);  
ALTER TABLE stars
ALTER COLUMN name TYPE varchar(50);
DECLARE @AnyDate DATETIME
SET @AnyDate = GETDATE()

SELECT @AnyDate AS 'Input Date',
  DATEADD(q, DATEDIFF(q, 0, @AnyDate), 0) 
                        AS 'Quarter Start Date',       
  DATEADD(d, -1, DATEADD(q, DATEDIFF(q, 0, @AnyDate) + 1, 0)) 
                        AS 'Quarter End Date'
DECLARE @Year DATE = '2013-01-01'
DECLARE @Quarter INT = 4;

SELECT  DATEADD(QUARTER, @Quarter - 1, @Year) ,
        DATEADD(DAY, -1, DATEADD(QUARTER,  @Quarter, @Year))
SELECT title, extract("year" from current_date) - "year" AS age
  FROM films
  ORDER BY age ASC;
postgres-# ( terminal commands)
- list all databases : \l
- list of all tables : \d
- list only the tables : \dt
- list all fields in a table : \d person (name of the table)
- import from external file : \i c:/postgrescourse/person.sql
Ex. \i /home/forge/forge33-5-16-23.sql


- connect to database psql and import .sql file
> psql -h localhost -U postgres -d forgeprod -f C:\forgeprod.sql
https://kb.objectrocket.com/postgresql/how-to-run-an-sql-file-in-postgres-846

- access to database to query 
> psql -U postgres -d forgeprod

- datatypes info:
bigserial : increment int in sequence
date : year,month,day



- create database : create database test;
- drop database : drop database test;
- drop table : drop table person;
- connect to a database ( two forms ) :  
\c test (or) -h localhost -p 5432 -U postgres test
- create table sintax : create table table_name ( column name + data type + constraints if any ) 

- create table : 
create table person ( 
	id int,
	first_name varchar(50),
  	last_name varchar(50),
    gender varchar(6),
    date_of_birth date
    );
- create table w/constraints ( need to satisfied rules): 
create table person (
 	id bigserial not null primary key,
 	first_name varchar(50) not null,
 	last_name varchar(50) not null,
    gender varchar(6) not null, 
    date_of_birth date not null
	);

- create table : 
insert into person ( first_name, last_name, gender, date_of_birth) values ('Anne','Smith','Female',date '1988-01-09');

- Using OFFSET and LIMIT : select * from person OFFSET 5 LIMIT 5;
- Using OFFSET and FETCH : select * from person OFFSET 5 FETCH 5 ROW ONLY;
- Using BETWEEN : select * from person where date_of_birth BETWEEN DATE '2000-01-01' and '2015-01-01';
- Diferences between LIKE and ILIKE : ILIKE is case insensitive. 

- Using Group By : select country_of_birth, count(*) from person group by country_of_birth; ( will count how many people are from that country )
- Using Having with Group By : select country_of_birth, count(*) from person having count(*) > 5 group by country_of_birth; ( must have above 5 to show )





 USE Your_Database;
 GO
 EXECUTE AS USER = N'the_user_name';
 GO
 SELECT 
    s.name,
    o.name,
    p.[permission_name]
 FROM sys.objects AS o 
 INNER JOIN sys.schemas AS s
 ON o.[schema_id] = s.[schema_id]
 CROSS APPLY sys.fn_my_permissions(QUOTENAME(s.name) 
   + N'.' + QUOTENAME(o.name), N'OBJECT') AS p
   WHERE o.[type] IN (N'U', N'V') -- tables and views
   AND p.subentity_name = N''; -- ignore column permissions 
 GO
 REVERT;
SELECT setval(
        pg_get_serial_sequence('TABLE','COLUMN'), 
        max(id)) 
FROM TABLE;
<?php
$results = $wpdb->get_results( "SELECT * FROM $table_name"); // Query to fetch data from database table and storing in $results
if(!empty($results))                        // Checking if $results have some values or not
{    
    echo "<table width='100%' border='0'>"; // Adding <table> and <tbody> tag outside foreach loop so that it wont create again and again
    echo "<tbody>";      
    foreach($results as $row){   
    $userip = $row->user_ip;               //putting the user_ip field value in variable to use it later in update query
    echo "<tr>";                           // Adding rows of table inside foreach loop
    echo "<th>ID</th>" . "<td>" . $row->id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>User IP</th>" . "<td>" . $row->user_ip . "</td>";   //fetching data from user_ip field
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Post ID</th>" . "<td>" . $row->post_id . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    echo "<tr>";        
    echo "<th>Time</th>" . "<td>" . $row->time . "</td>";
    echo "</tr>";
    echo "<td colspan='2'><hr size='1'></td>";
    }
    echo "</tbody>";
    echo "</table>"; 

}
?>
SELECT column-names
  FROM table-name1
 WHERE value IN (SELECT column-name
                   FROM table-name2
                  WHERE condition)
SELECT Fname, Lname
FROM Employee
ORDER BY Salary
OFFSET 2 ROWS;
select 
  schema_name(tab.schema_id) as [schema_name], 
  tab.[name] as table_name 
from 
  sys.tables tab 
  left outer join sys.indexes pk on tab.object_id = pk.object_id 
  and pk.is_primary_key = 1 
where 
  pk.object_id is null 
order by 
  schema_name(tab.schema_id), 
  tab.[name]
DELETE FROM [dbo].[logs] WHERE date < DATEADD(DAY, -30, GETDATE())
DECLARE
  dates STRING;
SET
  dates = (
  SELECT
    CONCAT('("', STRING_AGG(DISTINCT REPLACE(LEFT(started_at,10),"-",""), '", "'), '")'),
  FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption` );

EXECUTE IMMEDIATE
  FORMAT("""
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in %s)""",dates)
SELECT DISTINCT * FROM
    (SELECT
    user_id,
    group_id,
    REPLACE(LEFT(started_at,10),"-","") as started_at,
    FROM
    `analytics-dev-308300.dtm_engagement.ft_content_consumption`)
PIVOT
(COUNT(*) as s
for started_at in (list_of_values))
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT DISTINCT * FROM talentcards.users
CREATE OR REPLACE TABLE talentcards.users
AS
SELECT
## replace "sale" with variable name and "sales" with table name
SELECT PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY sale) FROM sales;
\d tablename # view table
\dt # show all tables
\q # exit psql console
\du # show all users
\l # show all databases
\conninfo # get connection info
psql -U username -d dbname
psql -h 127.0.0.1 -U postgres
sudo -u username createdb dbname
sudo -u postgres createuser --superuser username
sudo /etc/init.d/postgresql restart
DELETE FROM tbl_sku
WHERE sku <> 'hello';
INSERT INTO tbl_name (col1, col2, col3) VALUES ('03/03/2021', 13.76, 'MAS-465');
ALTER ROLE postgres
WITH PASSWORD 'newpassword';
ALTER TABLE tbl_sku
ALTER COLUMN col_name TYPE character(50);
ALTER TABLE tbl_sku
RENAME COLUMN old_name TO new_name;
ALTER TABLE tbl_sku AS ts
ADD COLUMN new_col character(15);
SELECT 
	t.col1,
	s.col3
FROM tbl1 AS t, tbl2 AS s
WHERE t.col2 = s.col1;
CREATE TEMPORARY TABLE __copy AS (SELECT * FROM tbl_sku LIMIT 0);

COPY __copy (col_1, col_2, col_3) FROM 'C:/Users/CB06/Downloads/upload5.csv' WITH (FORMAT csv, DELIMITER ',', FORCE_NULL (sku, title, supplier, ean), HEADER); # import from csv file

UPDATE tbl_sku SET col1 = __copy.col1, col2 = __copy.col2 FROM __copy WHERE tbl_sku.sku = __copy.sku # update rows
           UPDATE table1 
           SET status = (SELECT t2.status FROM table2 t2 WHERE t2.trans_id = id) , 
               name = (SELECT t2.name FROM table2 t2 WHERE t2.trans_id = id)
           WHERE id IN (SELECT trans_id FROM table2 t2 WHERE t2.trans_id= id)
SELECT CompanyName, 
       ProductCount = (SELECT COUNT(P.id)
                         FROM [Product] P
                        WHERE P.SupplierId = S.Id)
  FROM Supplier S
DATE_SUB(CURRENT_DATE('UTC-3:00'), INTERVAL 1 DAY)
CURRENT_DATE('UTC-3:00')
SELECT
     *
     FROM
     EXTERNAL_QUERY("reby-cloud.eu.reby_prod",
     '''
     select 
     concat('cu_',id_to_text(cu.id)) as id,
     id_to_time(cu.id) as created_at,
     concat('acc_',id_to_text(cu.balance_account_id)) as account_id,
     concat('c_',id_to_text(cu.company_id)) as company_id,
     concat('usr_',id_to_text(cu.user_id)) as user_id,
     --id_to_time(cu.user_id) as user_first_created_at,
     firebase_user_token,
     is_deleted,
     updated_at
     from company_user cu 
     --where id >= id_from_time(NOW() - INTERVAL '1 DAY')
     --where date(id_to_time(id)) > now() - interval '3 day'
     where id < id_from_time(date('2020-06-01'))
     ''' )
     ;
# Packages
 
import os
from google.cloud import bigquery
from google.oauth2 import service_account
 
 
# Parameters
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]='/Users/jmbenedetto/Library/Mobile Documents/com~apple~CloudDocs/#Formação/Dissertation/Research/02 Research v2/00 Base/key.json'
 
client = bigquery.Client(project=project_name)
dataset_ref = client.dataset(dataset_name)
load_config = bigquery.LoadJobConfig()
 
 
# Code
 
query = """
        DELETE 
        FROM dataset_name.table_name
        WHERE criteria;
"""
query_params = [
    bigquery.ArrayQueryParameter("file", "STRING", file_source)
]
 
job_config = bigquery.QueryJobConfig()
job_config.query_parameters = query_params
query_job = client.query(
    query,
    # Location must match that of the dataset(s) referenced in the query.
    location="EU",
    job_config=job_config,
)  # API request - starts the query
 
query_job.result(timeout=60.0)
assert query_job.state == "DONE"
You are able to search only 15 private snippets.  to search them all.
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
--usuarios con varias tarjetas
SELECT concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
       count(distinct(card__fingerprint))
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null
GROUP BY 1
having count(distinct(card__fingerprint))>5
ORDER BY 2 DESC
LIMIT 100;


--tarjetas utilizadas por varias cuentas
SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
card__last_four,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null
GROUP BY 1,2
having count(distinct(ppg.user_id))>3
ORDER BY 3 DESC;

--usuarios que utilizan la tarjeta con código que introduzcáis:
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint='+7qy4w/5VsCCCtwf'   
ORDER BY 1 DESC
LIMIT 100


--usuarios que han utilizado tarjetas que se han utilizado en más de 5 cuentas
with data as (SELECT --concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id)),
card__fingerprint,
       count(distinct(ppg.user_id))
FROM payment_processor_gateway ppg
left join public.user on public.user.id=ppg.user_id
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where reason is null and card__fingerprint is not null and public.user.is_staff is false and public.user.is_deleted=false
GROUP BY 1
having count(distinct(ppg.user_id))>3
ORDER BY 2 DESC)
select
distinct(public.user.phone_number),
public.user.first_name,
public.user.last_name
--data.card__fingerprint
--distinct(ppg.user_id),
FROM data 
left join payment_processor_gateway ppg on ppg.card__fingerprint=data.card__fingerprint 
left join public.user on public.user.id=ppg.user_id
and public.user.is_staff is false and public.user.phone_number is not null
--order by data.card__fingerprint;

--usuarios con dispositivos en los que se han registrado más de 5 cuentas
with data as (SELECT hash_device,
       count(hash_device) as number_accounts
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT distinct(public.user.phone_number),
public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       --data.hash_device,
       service_area.name,
       user_last_location.last_location_at
FROM DATA
LEFT JOIN user_device ud ON ud.hash_device=data.hash_device
LEFT JOIN public.user ON public.user.id=ud.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
where service_area.name='Terrassa'
ORDER BY user_last_location.last_location_at desc
;

--Análisis usuario Alba
SELECT 
distinct(concat('https://admin.reby.tech/riders/usr_', id_to_text(ppg.user_id))),
--distinct(ppg.user_id),
card__fingerprint,
card__last_four
FROM payment_processor_gateway ppg
LEFT JOIN banned_device bd ON ppg.user_id=bd.user_id
where /* reason is null and*/ card__last_four in('1058','9219')  
ORDER BY 1 DESC
LIMIT 100

--usuarios que se han registrado en más de 5 dispositivos y balance negativo
with data as (SELECT id_to_text(user_id) as user_id,
       count(hash_device) as number_devices
FROM user_device ud
GROUP BY 1
HAVING count(hash_device)>=5
ORDER BY 2 desc)
SELECT public.user.first_name,
       public.user.last_name,
       public.user.phone_number,
       data.user_id,
       data.number_devices,
       service_area.name,
       user_last_location.last_location_at,
       account.balance__amount
FROM DATA
LEFT JOIN public.user ON id_to_text(public.user.id)=data.user_id
AND public.user.is_staff IS FALSE
AND public.user.phone_number IS NOT NULL
and public.user.is_deleted=false
left join user_last_location on public.user.id=user_last_location.user_id 
left join service_area on user_last_location.last_service_area_id = service_area.id
left join account on public.user.balance_account_id=account.id
where service_area.name='Terrassa' and account.balance__amount<0
ORDER BY user_last_location.last_location_at desc
;
postgresql://[user[:password]@][netloc][:port][/dbname][?param1=value1&...]
--Creación de usuarixs nuevxs
with base_data as (
select 
    date(u.created_at) as date,
    date_trunc(date(u.created_at),MONTH) as month,
    mv.service_area,
    sum(if(mv.first_ride_date is not null,1,0)) as has_ride,
    count(*) users_created
from `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv
    on u.id = mv.user_id
where mv.service_area = 'Sevilla'
group by 1,2,3
--order by 1 desc
)
select 
    *
    --lag(users_created) over (partition by month order by month asc) as prev_month_users
from base_data
order by date desc
;
--métricas de viajes individuales
select
    date(created_at) as date,
    r.id as ride_id,
    r.minutes,
    extract(hour from datetime(created_at,"Europe/Madrid")) as hour,
    if(plan_usage_id is null,0,1) as plan_usage,
    FORMAT_DATE('%A', date(created_at)) AS weekday_name,
    r.distance/100 as distance_km,
    r.ride_cost_time as ride_cost,
    geo.Distri_11D as distrito_inicial,
    geo2.Distri_11D as distrito_final,
from analytics_reby_v1_eu.pg_rides r
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo
    on st_within(st_geogpoint(r.longitude_initial,r.latitude_initial),geo.geometry)
join `reby-cloud.analytics_reby_v1_eu.geo_sevilla_distritos` geo2
    on st_within(st_geogpoint(r.longitude_final,r.latitude_final),geo2.geometry)
where r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
order by 1 desc
;
--ride linestrings
with rides_data as (
select
	id,
  created_at,
  starting.latitude as lat_init,
  starting.longitude as long_init,
  ending.latitude as lat_fin,
  ending.longitude as long_fin,
  distance/1000 as dist_km,
  minutes,
	--CONCAT(starting.latitude,',',starting.longitude) AS marker_start,
	--CONCAT(ending.latitude,',',ending.longitude) AS marker_end
from (
select
  r.created_at,
  r.minutes,
  r.distance,
  r.id,
  --path,
  path[offset(0)] as starting,
  path[offset(array_length(path)-1)] as ending,
  --array_length(path)
from `analytics_reby_v1_eu.pg_rides_json` r
left join `reby-cloud.analytics_reby_v1_eu.pg_vehicles` v
  on r.vehicle_id = v.id
where array_length(path) > 3
and date(r.created_at) >= '2021-06-01'
and date(r.created_at) <= '2021-09-01'
and r.service_area_id = 'sa_3qr9213ajv94b6v49h5h'
--and v.vehicle_type = 'motorcycle'
--where id = 'r_3qm5ua4jymv1ta3tbmq1'
--group by 1
  )
),

linestrings as (
select 
  ri.id,
  --st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  concat("Linestring(",string_agg(concat(p.longitude," ",p.latitude), ","),")") as path
from `analytics_reby_v1_eu.pg_rides_json` ri, unnest(path) as p
where id in (select id from rides_data)
and date(created_at) >= '2021-06-01'
and date(created_at) <= '2021-09-01'
group by 1
),

linestrings_geo as (
select 
  rd.id,
  st_asgeojson(st_geogpoint(rd.long_init,rd.lat_init)) as starting_point,
  st_asgeojson(st_geogpoint(rd.long_fin,rd.lat_fin)) as ending_point,
  st_asgeojson(SAFE.st_geogfromtext(path)) as paths
from linestrings ls
join rides_data rd on ls.id = rd.id
)

select * from linestrings_geo
with dte as (
    SELECT dte
    FROM unnest(generate_date_array(date('2021-03-11'), date('2021-07-31'))) dte
),
ids as (
    SELECT clientId,
    PARSE_DATE("%Y%m%d", date) as dte
    FROM `*table*`
)

SELECT dte.dte, count( distinct ids.clientId)
FROM dte, ids
WHERE ids.dte BETWEEN DATE_SUB(dte.dte, INTERVAL 7 DAY) and dte.dte
group by 1
order by 1 asc 
create or replace table `temp_eu.delete_gerardm_rcnp_` as (
select npr_rscb = RCnp as bool,* from (
SELECT 
        --vd.*,
        r.created_at,
        r.user_id,
        r.id as ride_id,
        SUM(rc.theoretical_cost)/100 AS RC, 
        SUM(rc.non_promoted)/100 AS RCnp, 
        SUM(rc.theoretical_cost-rc.non_promoted)/100 AS RCp,
        --ops_managers_dashboard.TRUNC_DATE_ISO('DAY', date(datetime(r.created_at,"Europe/Madrid"))) as week_at 
    FROM`reby-cloud.analytics_reby_v1_eu.pg_rides` AS r
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.vehicle_daily` AS vd
        ON r.vehicle_id = vd.vehicle_id AND date(datetime(r.created_at,"Europe/Madrid")) = vd.date
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` AS rc
        ON r.id = rc.ride_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_vehicles` AS v
        ON r.vehicle_id = v.id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_service_area` AS sa
        ON r.service_area_id = sa.service_area_id
    LEFT JOIN `reby-cloud.analytics_reby_v1_eu.pg_tpl` AS tpl
        ON vd.tpl_provider_id = tpl.id
   -- WHERE 
        --date(datetime(r.created_at,"Europe/Madrid")) >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND ST_DISTANCE (ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GEOGPOINT(tpl.longitude, tpl.latitude)) > 200
        --AND ST_WITHIN(ST_GEOGPOINT(r.longitude_initial, r.latitude_initial), ST_GeogFromText(sa.geometry))
        --AND v.printed_code IS NOT NULL AND vd.date >= ops_managers_dashboard.DATE_AGO_MANAGERS_ISO('DAY')
        --AND rc.ride_id IS NOT NULL
        --where r.id = 'r_3rytmr4br7g5qlu4jbqh'
        group by 1,2,3
    --GROUP BY week_at 
    --ORDER BY week_at DESC
) as a--where ride_id = 'r_3t26l79xmu2xa7nyu3n1'
full join (select ride_id ride_id_rscb,service_area, sum(non_promoted)/100 as npr_rscb
            from `reby-cloud.analytics_reby_v1_eu.py_ridestatus_combined` group by 1,2) b
    on a.ride_id = b.ride_id_rscb
)

/*
create or replace table `reby-cloud.analytics_reby_v1_eu.vehicle_daily` 
PARTITION BY date
CLUSTER BY service_area_id,company_id
AS
select * except(r_n) from (
select
    *,
    row_number() over (partition by date,vehicle_id order by created_at asc) as r_n
from `reby-cloud.analytics_reby_v1_eu.vehicle_daily`
) where r_n = 1
*/
WHERE DATE(cat_tbl.date)
    BETWEEN PARSE_DATE('%Y%m%d', @DS_START_DATE) AND
    PARSE_DATE('%Y%m%d', @DS_END_DATE)
select * from table where random() < 0.01 limit 1000;
%sql SELECT name FROM sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';
INSERT `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
    (date, name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount)
SELECT date('2021-07-17'), name, company_id,service_area_id,vehicle_type,price_per_minute,
    price_base_amount,original_price_per_minute,original_price_base_amount
FROM `reby-cloud.analytics_reby_v1_eu.service_area_pricing_daily`
where date = '2021-07-18'
--'usr_3t7nh9pkvax1j6412nv1'
select * from (
with promotions as (
SELECT * FROM EXTERNAL_QUERY("reby-cloud.eu.reby_prod", 
'''
SELECT
    id_to_time(id) as created_at,
    concat('acc_',id_to_text(account_id)) as account_id,
    balance__amount as balance_amount,
    balance__currency as balance_currency,
    concat('c_',id_to_text(company_id)) as company_id,
    free_unlocks,
    concat('cpua_',id_to_text(id)) as id,
    concat('tx_',id_to_text(last_promotion_transaction_id)) as transaction_id,
    concat('usr_',id_to_text(user_id)) as user_id
FROM company_user_promotion_account
--where user_id = id_from_text('3t7nh9pkvax1j6412nv1')
;
'''
    )
)
select
    *
from (
    SELECT
    --promotions.balance_amount,
    sa.service_area,
    cp.code,
    cpu.*
    FROM
    `reby-cloud.analytics_reby_v1_eu.pg_company_promotion_usage` cpu
    left join `reby-cloud.analytics_reby_v1_eu.pg_company_promotion` cp on cpu.company_promotion_id = cp.id
    left join `reby-cloud.analytics_reby_v1_eu.reby_users_servicearea` sa on sa.user_id = cpu.user_id
    --left join promotions on cpu.user_id =promotions.user_id
    --where sa.service_area in ('Zaragoza','Gijón')
    --and date(expiration_date) > current_date and date(cpu.created_at) > current_date -15
    --where cpu.user_id = 'usr_3t7nh9pkvax1j6412nv1'
    --order by cpu.created_at desc
)
where
    (service_area = 'Zaragoza' AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Sevilla' AND code in ('REBYBCN','REBYJON','REBYGONA','TOTAREBY','HOLAMOTOS','REBYGO')) OR
    (service_area = 'Gijón' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES','TOTAREBY','HOLAMOTOS')) OR
    (service_area = 'Terrassa' AND code in ('SEVILLAES','REBYBCN','REBYGONA','SEVILLAES')) OR
    (service_area in ('Napoles','Lecce','Minturno','Bergamo','Grosseto','Caserta') AND code in ('SEVILLAES','REBYBCN','REBYJON','REBYGONA','SEVILLAES','REBYGO','HOLAMOTOS'))
)
--where user_id = 'usr_3t7nh9pkvax1j6412nv1'
where value_remaining_amount > 0
and is_expired is false
with ads_data as (
SELECT
    ad_id,
    adset_id,
    campaign_id,
    status,
    sum(impressions) clicks,
    sum(spend) as investment
FROM `reby-cloud.facebook_ads.insights` ai
left join `reby-cloud.facebook_ads.ads` ads on ai.ad_id = ads.id

--WHERE DATE(_PARTITIONTIME) = "2021-07-06"
group by 1,2,3,4
)
select * from ads_data where ad_id = '23843560033720454' order by clicks desc
--with application_opened as (
select
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id
from(
SELECT
    original_timestamp,
    anonymous_id,
    context_ip,
    context_network_carrier,
    context_os_name,
    context_os_version,
    user_id,
    row_number() over (partition by anonymous_id order by original_timestamp asc) as r_n
FROM
  `reby-safir.react_native.install_attributed`
WHERE
  context_app_name = 'Reby'
  --and user_id = 'usr_3t8h6t8c4ppubjp9vjvh'
  and anonymous_id = '1eafe4e3-f4d8-416d-80c1-db0a95191b55'
  /*
  and _PARTITIONTIME BETWEEN TIMESTAMP_TRUNC(TIMESTAMP_MICROS(UNIX_MICROS(CURRENT_TIMESTAMP()) - 60 * 60 * 60 * 24 * 1000000), DAY, 'UTC')
					AND TIMESTAMP_TRUNC(CURRENT_TIMESTAMP(), DAY, 'UTC')
    
			
    */
    )
--where r_n = 1
order by r_n asc
--)
;
select *
from `reby-safir.react_native.identifies`
where user_id = 'usr_3t8cxanx6kehxg38n5m1'
order by original_timestamp asc
;
select *
from `reby-safir.react_native.finish_ride`
where user_id = 'usr_3t8ethqfzs32sp1mcu5h'
order by original_timestamp asc
;

SELECT
    id,
    stats[offset(0)].total_rides,
    mv.service_area
FROM
  `reby-cloud.analytics_reby_v1_eu.pg_users_json` u
left join `reby-cloud.reby_marketing_eu.users_servicearea_first_mv_ride` mv on u.id = mv.user_id
where date(created_at) = current_date -2
order by 2 desc
;
select
    money_in,
    money_out,
    count(*),
    sum(amount)
from (
select
 amount,
 date(date_ts) as date,
 case
    when amount between 0 and 20 then "0-20"
    when amount between 21 and 500 then "20-500"
    when amount between 21 and 500 then "500-1000"
    when amount >1000 then "+1000"
    else "other"
end as money_in,
case
    when amount between -20 and 0 then "0-20"
    when amount between -500 and -1000 then "20-500"
    when amount between -1000 and -500 then "500-1000"
    when amount <-1000 then "+1000"
    else "other"
end as money_out,

from `reby-cloud.reby_fin_eu.payments_all_platforms`
where
  date(date_ts) BETWEEN '2021-04-01' and '2021-06-30'
  and payment_provider = 'kernel'
--order by date_ts desc
)
group by 1,2
with data as (
select
    date(created_at) as date,
    sum(if(app_promoting_co = 'Reby' and owner_co != 'Reby',1,0)) as hey,
    count(*) as all_
from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
where type like 'ride%'
    and date(created_at) > current_date - 50
    and service_area = 'Barcelona'
group by 1
--order by 1 desc
)
select hey / all_ from data order by 1 desc
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Search column</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[SELECT s.name [Schema], tb.name [Table], c.name [Column], tp.name [Type]
FROM sys.schemas s
JOIN sys.tables tb ON tb.schema_id = s.schema_id
JOIN sys.columns c ON c.object_id = tb.object_id
JOIN sys.types tp ON tp.system_type_id = c.system_type_id
WHERE c.name LIKE N'$ColumnName$'
AND NOT tp.name = N'sysname'
ORDER BY [Schema], [Table], [Column];]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
<?xml version="1.0" encoding="utf-8"?>
<CodeSnippets>
	<CodeSnippet>
		<Header>
			<Title>Check nr of fractional digits</Title>
		</Header>
		<Snippet>
			<Declarations>
				<Literal>
					<ID>Exponent</ID>
					<ToolTip>Exponentiation</ToolTip>
					<Default>2</Default>
				</Literal>
				<Literal>
					<ID>ColumnName</ID>
					<ToolTip>Name of the column</ToolTip>
					<Default>ColumnName</Default>
				</Literal>
				<Literal>
					<ID>TableName</ID>
					<ToolTip>Name of the table</ToolTip>
					<Default>TableName</Default>
				</Literal>
			</Declarations>
			<Code Language="SQL">
<![CDATA[DECLARE @power DECIMAL = POWER(10, $Exponent$);
SELECT Value
FROM (SELECT $ColumnName$ AS Value FROM dbo.$TableName$) AS Focus
WHERE ROUND(Value * @power, 0, 1) <> (Value * @power);]]>
			</Code>
		</Snippet>
	</CodeSnippet>
</CodeSnippets>
select pgc.conname as constraint_name,
       ccu.table_schema as table_schema,
       ccu.table_name,
       ccu.column_name,
       pgc.* as definition
from pg_constraint pgc
join pg_namespace nsp on nsp.oid = pgc.connamespace
join pg_class  cls on pgc.conrelid = cls.oid
left join information_schema.constraint_column_usage ccu
          on pgc.conname = ccu.constraint_name
          and nsp.nspname = ccu.constraint_schema
where contype ='c'
order by pgc.conname;
CREATE TABLE test ( 
  id INT NOT NULL AUTO_INCREMENT,
  name VARCHAR(50) NOT NULL,
  email VARCHAR(100) NOT NULL,
  PRIMARY KEY (id)
);
import wrds
db = wrds.Connection(wrds_username='joe')
db.raw_sql('SELECT date,dji FROM djones.djdaily')
/* Gets reps */
SELECT fieldA, COUNT(*)
FROM tableA
GROUP BY fieldA
HAVING COUNT(*) > 1

/* Use reps to filter results */
SELECT a.*
FROM tableA a
JOIN (
	SELECT fieldA, COUNT(*) as 'count'
	FROM tableA
	GROUP BY fieldA
	HAVING COUNT(*) > 1
) b
ON a.fieldA = b.fieldA
CREATE TABLE Persons (
    ID int NOT NULL,
    LastName varchar(255) NOT NULL,
    FirstName varchar(255),
    Age int,
    CHECK (Age>=18)
);
select studentID, FirstName, LastName, FirstName + ' ' + LastName as FullName
from student;
select users.last_login,us.*,users.* from users left join user_school us on us.user_id = users.id where school_id is not null
and last_login between '2021-01-01'::date and '2021-12-31'::date
UPDATE public.users
SET name=concat('User',id), email=concat('user',id,'@email.com')
WHERE name not like '%Testbruker';
/*
This script is given "As Is" with no warranties and plenty of caveats. Use at your own risk!
For more on data profiling, see Chapter 10 in "SQL Server 2012 Data Integration Recipes", Apress, 2012
*/
-----------------------------------------------------------------------
-- User-defined variables
-----------------------------------------------------------------------
USE CarSales -- Your database here
GO
DECLARE @TABLE_SCHEMA NVARCHAR(128) = 'dbo'  -- Your schema here
DECLARE @TABLE_NAME NVARCHAR(128) = 'client' -- Your table here
DECLARE @ColumnListIN NVARCHAR(4000) = ''    -- Enter a comma-separated list of specific columns
                                                     -- to profile, or leave blank for all
DECLARE @TextCol BIT = 1  -- Analyse all text (char/varchar/nvarchar) data type columns
DECLARE @NumCol BIT = 1   -- Analyse all numeric data type columns
DECLARE @DateCol BIT = 1  -- Analyse all date data type data type columns
DECLARE @LobCol BIT = 1   -- Analyse all VAR(char/nchar/binary) MAX data type columns (potentially time-consuming)
DECLARE @AdvancedAnalysis BIT = 1 -- Perform advanced analysis (threshold counts/domain analysis) 
                                  --(potentially time-consuming)
DECLARE @DistinctValuesMinimum INT = 200 -- Minimum number of distinct values to suggest a reference 
                                         -- table and/or perform domain analysis
DECLARE @BoundaryPercent NUMERIC(3,2) = 0.57 -- Percent of records at upper/lower threshold to suggest
                                             -- a possible anomaly
DECLARE @NullBoundaryPercent NUMERIC(5,2) = 90.00 -- Percent of NULLs to suggest a possible anomaly
DECLARE @DataTypePercentage INT = 2 -- Percentage variance allowed when suggesting another data type 
                                    -- for a column
-----------------------------------------------------------------------
-- Process variables
-----------------------------------------------------------------------
DECLARE @DATA_TYPE VARCHAR(128) = ''
DECLARE @FULLSQL VARCHAR(MAX) = ''
DECLARE @SQLMETADATA VARCHAR(MAX) = ''
DECLARE @NUMSQL VARCHAR(MAX) = ''
DECLARE @DATESQL VARCHAR(MAX) = ''
DECLARE @LOBSQL VARCHAR(MAX) = ''
DECLARE @COLUMN_NAME VARCHAR(128)
DECLARE @CHARACTER_MAXIMUM_LENGTH INT
DECLARE @ROWCOUNT BIGINT = 0
DECLARE @ColumnList VARCHAR(4000) = ' '
DECLARE @TableCheck TINYINT
DECLARE @ColumnCheck SMALLINT
DECLARE @DataTypeVariance INT
-----------------------------------------------------------------------
-- Start the process:
BEGIN
TRY
-- Test that the schema and table exist
SELECT
 @TableCheck = COUNT (*) 
   FROM INFORMATION_SCHEMA.TABLES 
   WHERE TABLE_SCHEMA = @TABLE_SCHEMA 
   AND TABLE_NAME = @TABLE_NAME
IF @TableCheck <> 1
 BEGIN
  RAISERROR ('The table does not exist',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Parse list of columns to process / get list of columns according to types required
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ColumnList') IS NOT NULL
 DROP TABLE tempdb..#ColumnList;
CREATE TABLE #ColumnList (COLUMN_NAME VARCHAR(128), DATA_TYPE VARCHAR(128), CHARACTER_MAXIMUM_LENGTH INT) -- Used to hold list of columns to process
IF @ColumnListIN <> '' -- See if there is a list of columns to process
BEGIN
 -- Process list
 SET @ColumnList = @ColumnListIN + ','
 DECLARE @CharPosition int
 WHILE CHARINDEX(',', @ColumnList) > 0
  BEGIN
   SET @CharPosition = CHARINDEX(',', @ColumnList)
   INSERT INTO #ColumnList (COLUMN_NAME) VALUES (LTRIM(RTRIM(LEFT(@ColumnList, @CharPosition - 1))))
   SET @ColumnList = STUFF(@ColumnList, 1, @CharPosition, '')
  END -- While loop
-- update with datatype and length
  UPDATE CL
   SET CL.CHARACTER_MAXIMUM_LENGTH = ISNULL(ISC.CHARACTER_MAXIMUM_LENGTH,0)
      ,CL.DATA_TYPE = ISC.DATA_TYPE
   FROM #ColumnList CL
   INNER JOIN INFORMATION_SCHEMA.COLUMNS ISC
     ON CL.COLUMN_NAME = ISC.COLUMN_NAME
  WHERE ISC.TABLE_NAME = @TABLE_NAME
  AND ISC.TABLE_SCHEMA = @TABLE_SCHEMA
 END
-- If test for list of column names
ELSE
 BEGIN
 -- Use all column names, to avoid filtering
  IF @TextCol = 1
   BEGIN
    INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
     SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
     WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar', 'binary')
     AND TABLE_NAME = @TABLE_NAME
     AND TABLE_SCHEMA = @TABLE_SCHEMA
     AND CHARACTER_MAXIMUM_LENGTH > 0
   END
 IF @NumCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
 IF @DateCol = 1
  BEGIN
   INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, ISNULL(CHARACTER_MAXIMUM_LENGTH,0) FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('Date', 'DateTime', 'SmallDateTime', #39;DateTime2', 'time')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
  END
IF @LOBCol = 1
 BEGIN
  INSERT INTO #ColumnList (COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH)
   SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH FROM INFORMATION_SCHEMA.COLUMNS
   WHERE DATA_TYPE IN ('varchar', 'nvarchar', 'varbinary', 'xml')
   AND TABLE_NAME = @TABLE_NAME
   AND TABLE_SCHEMA = @TABLE_SCHEMA
   AND CHARACTER_MAXIMUM_LENGTH = -1
 END
END
-- Else test to get all column names
-----------------------------------------------------------------------
-- Test that there are columns to analyse
SELECT @ColumnCheck = COUNT (*) FROM #ColumnList WHERE DATA_TYPE IS NOT NULL
IF @ColumnCheck = 0
 BEGIN
  RAISERROR('The columns do not exist in the selected database or no columns are selected',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Create Temp table used to hold profiling data
-----------------------------------------------------------------------
IF OBJECT_ID('tempdb..#ProfileData') IS NOT NULL
 DROP TABLE tempdb..#ProfileData;
 CREATE TABLE #ProfileData
 (
  TABLE_SCHEMA NVARCHAR(128),
  TABLE_NAME NVARCHAR(128),
  COLUMN_NAME NVARCHAR(128),
  ColumnDataLength INT,
  DataType VARCHAR(128),
  MinDataLength BIGINT,
  MaxDataLength BIGINT,
  AvgDataLength BIGINT,
  MinDate SQL_VARIANT,
  MaxDate SQL_VARIANT,
  NoDistinct BIGINT,
  NoNulls NUMERIC(32,4),
  NoZeroLength NUMERIC(32,4),
  PercentageNulls NUMERIC(9,4),
  PercentageZeroLength NUMERIC(9,4),
  NoDateWithHourminuteSecond BIGINT NULL,
  NoDateWithSecond BIGINT NULL,
  NoIsNumeric BIGINT NULL,
  NoIsDate BIGINT NULL,
  NoAtLimit BIGINT NULL,
  IsFK BIT NULL DEFAULT 0,
  DataTypeComments NVARCHAR(1500)
 );
-- Get row count
DECLARE @ROWCOUNTTEXT NVARCHAR(1000) = ''
DECLARE @ROWCOUNTPARAM NVARCHAR(50) = ''
SET @ROWCOUNTTEXT = 'SELECT @ROWCOUNTOUT = COUNT (*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)'
SET @ROWCOUNTPARAM = '@ROWCOUNTOUT INT OUTPUT'
EXECUTE sp_executesql @ROWCOUNTTEXT, @ROWCOUNTPARAM, @ROWCOUNTOUT = @ROWCOUNT OUTPUT
-----------------------------------------------------------------------
-- Test that there are records to analyse
IF @ROWCOUNT = 0
 BEGIN
  RAISERROR('There is no data in the table to analyse',16,1)
  RETURN
 END
-----------------------------------------------------------------------
-- Define the dynamic SQL used for each column to analyse
-----------------------------------------------------------------------
SET @SQLMETADATA = 'INSERT INTO #ProfileData (ColumnDataLength,COLUMN_NAME,TABLE_SCHEMA,TABLE_NAME,DataType,MaxDataLength,MinDataLength,AvgDataLength,MaxDate,MinDate,NoDateWithHourminuteSecond,NoDateWithSecond,NoIsNumeric,NoIsDate,NoNulls,NoZeroLength,NoDistinct)'
DECLARE SQLMETADATA_CUR CURSOR LOCAL FAST_FORWARD FOR 
 SELECT COLUMN_NAME, CHARACTER_MAXIMUM_LENGTH, DATA_TYPE FROM #ColumnList
OPEN SQLMETADATA_CUR 
FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
WHILE @@FETCH_STATUS = 0 
 BEGIN 
  SET @SQLMETADATA = @SQLMETADATA +'
  SELECT TOP 100 PERCENT ' + CAST(@CHARACTER_MAXIMUM_LENGTH AS VARCHAR(20)) + ' ,''' + QUOTENAME(@COLUMN_NAME) + '''
  ,''' + QUOTENAME(@TABLE_SCHEMA) + '''
  ,''' + QUOTENAME(@TABLE_NAME) + '''
  ,''' + @DATA_TYPE + ''''
   + CASE
      WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') 
	   AND @CHARACTER_MAXIMUM_LENGTH >= 0 
	     THEN + '
  , MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  , AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISNUMERIC(' + QUOTENAME(@COLUMN_NAME) + ') = 1) 
  ,(SELECT COUNT (*) from ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ISDATE(' + QUOTENAME(@COLUMN_NAME) + ') = 1) '
  WHEN @DATA_TYPE IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN + '
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,AVG(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NUMERIC(36,2)))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('DateTime', 'SmallDateTime') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,(SELECT COUNT (*) from ' 
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE (CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ' ) - FLOOR(CONVERT(NUMERIC(20,12), ' + QUOTENAME(@COLUMN_NAME) + ')) <> 0))
  ,(SELECT COUNT (*) from '
   + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE DATEPART(ss,' + QUOTENAME(@COLUMN_NAME) + ') <> 0 OR DATEPART(mcs,' + QUOTENAME(@COLUMN_NAME) + ') <> 0) 
  ,NULL 
  ,NULL '
    WHEN @DATA_TYPE IN ('DateTime2') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL
  ,NULL
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('Date') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX('
   + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN('
  + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NLL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('xml') THEN + '
  ,MAX(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,MIN(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,AVG(LEN(CAST(' + QUOTENAME(@COLUMN_NAME) + ' AS NVARCHAR(MAX)))) 
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  WHEN @DATA_TYPE IN ('varbinary','varchar','nvarchar') AND  @CHARACTER_MAXIMUM_LENGTH = -1 THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('binary') THEN + '
  ,MAX(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,MIN(LEN(' + QUOTENAME(@COLUMN_NAME) + ')) 
  ,AVG(LEN(' + QUOTENAME(@COLUMN_NAME) + '))
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   WHEN @DATA_TYPE IN ('time') THEN + '
  ,NULL 
  ,NULL 
  ,NULL 
  ,MAX(' + QUOTENAME(@COLUMN_NAME) + ') 
  ,MIN(' + QUOTENAME(@COLUMN_NAME) + ')
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
   ELSE + '
  ,NULL 
  ,NULL
  ,NULL
  ,NULL
  ,NULL
  ,NULL 
  ,NULL 
  ,NULL 
  ,NULL '
  END + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NULL)'
   + CASE
   WHEN @DATA_TYPE IN ('varchar', 'nvarchar', 'char', 'nchar') THEN + '
  ,(SELECT COUNT(*) FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) +  ' WHERE LEN(LTRIM(RTRIM(' + QUOTENAME(@COLUMN_NAME) + '))) = '''')'
   ELSE + '
  ,NULL'
   END + '
  ,(SELECT COUNT(DISTINCT ' + QUOTENAME(@COLUMN_NAME) + ') FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WHERE ' + QUOTENAME(@COLUMN_NAME) + ' IS NOT NULL )
  FROM ' + QUOTENAME(@TABLE_SCHEMA) + '.' + QUOTENAME(@TABLE_NAME) + ' WITH (NOLOCK)
  UNION'
 FETCH NEXT FROM SQLMETADATA_CUR INTO @COLUMN_NAME, @CHARACTER_MAXIMUM_LENGTH, @DATA_TYPE 
END 
CLOSE SQLMETADATA_CUR 
DEALLOCATE SQLMETADATA_CUR 
SET @SQLMETADATA = LEFT(@SQLMETADATA, LEN(@SQLMETADATA) -5)
EXEC (@SQLMETADATA)
-----------------------------------------------------------------------
-- Final Calculations
-----------------------------------------------------------------------
-- Indicate Foreign Keys
; WITH FK_CTE (FKColumnName)
AS
(
 SELECT
   DISTINCT CU.COLUMN_NAME
  FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
   INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CU
     ON TC.CONSTRAINT_NAME = CU.CONSTRAINT_NAME 
     AND TC.TABLE_SCHEMA = CU.TABLE_SCHEMA 
     AND TC.TABLE_NAME = CU.TABLE_NAME
     AND TC.TABLE_SCHEMA = @TABLE_SCHEMA
     AND TC.TABLE_NAME = @TABLE_NAME
     AND CONSTRAINT_TYPE = 'FOREIGN KEY'
)
UPDATE P
 SET P.IsFK = 1
 FROM #ProfileData P
  INNER JOIN FK_CTE CTE
   ON P.COLUMN_NAME = CTE.FKColumnName
-- Calculate percentages
UPDATE #ProfileData
 SET PercentageNulls = (NoNulls / @ROWCOUNT) * 100
    ,PercentageZeroLength = (NoZeroLength / @ROWCOUNT) * 100
-- Add any comments
-- Datatype suggestions
-- First get number of records where a variation could be an anomaly
SET @DataTypeVariance = ROUND((@ROWCOUNT * @DataTypePercentage) / 100, 0)
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the DATE types. '
 WHERE NoIsDate BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be one of the NUMERIC types. '
 WHERE NoIsNumeric BETWEEN (@ROWCOUNT -@DataTypeVariance) AND (@ROWCOUNT + @DataTypeVariance)
 AND DataType IN ('varchar', 'nvarchar', 'char', 'nchar')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be INT type. '
 WHERE MinDataLength >= -2147483648
 AND MaxDataLength <= 2147483648
 AND DataType IN ('bigint')
 
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLINT type. '
 WHERE MinDataLength >= -32768
 AND MaxDataLength <= 32767
 AND DataType IN ('bigint','int')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be TINYINT type. '
 WHERE MinDataLength >= 0
 AND MaxDataLength <= 255
 AND DataType IN ('bigint','int','smallint')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be SMALLDATE type. '
 WHERE NoDateWithSecond = 0
 AND MinDate >= '19000101'
 AND MaxDate <= '20790606'
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATE type (SQL Server 2008 only). '
 WHERE NoDateWithHourminuteSecond = 0
 AND DataType IN ('datetime','datetime2')
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly could be DATETIME type. '
 WHERE MinDate >= '17530101'
 AND MaxDate <= '99991231'
 AND DataType IN ('datetime2')
-- Empty column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Seems empty - is it required? '
 WHERE (PercentageNulls = 100 OR PercentageZeroLength = 100)
 AND IsFK = 0
-- Null column suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'There is a large percentage of NULLs - attention may be required. '
 WHERE PercentageNulls >= @NullBoundaryPercent
-- Distinct value suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for reference/lookup table (contains NULLs).'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND PercentageNulls <> 100
 AND NoNulls <> 0
-- FK suggestions
UPDATE #ProfileData
  SET DataTypeComments = ISNULL(DataTypeComments,'') + 'Few distinct elements - potential for Foreign Key.'
 WHERE NoDistinct < @DistinctValuesMinimum
 AND @ROWCOUNT > @DistinctValuesMinimum
 AND IsFK = 0
 AND NoNulls = 0
 AND DataType NOT LIKE '%Date%'
 AND DataType <> 'Time'
-- Filestream suggestions
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength >= 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
UPDATE #ProfileData
  SET DataTypeComments = 'Possibly not a good candidate for FILESTREAM (SQL Server 2008 only).'
 WHERE AvgDataLength < 1000000
 AND DataType IN ('varbinary')
 AND ColumnDataLength = -1
-- Sparse Column Suggestions
IF OBJECT_ID('tempdb..#SparseThresholds') IS NOT NULL
  DROP TABLE tempdb..#SparseThresholds;
  CREATE TABLE #SparseThresholds (DataType VARCHAR(128), Threshold NUMERIC(9,4))
  INSERT INTO #SparseThresholds (DataType, Threshold)
   VALUES 
    ('tinyint',86),
    ('smallint',76),    
    ('int',64),    
    ('bigint',52),    
    ('real',64),    
    ('float',52),    
    ('money',64),    
    ('smallmoney',64),    
    ('smalldatetime',52),    
    ('datetime',52),    
    ('uniqueidentifier',43),    
    ('date',69),    
    ('datetime2',52),    
    ('decimal',42),    
    ('nuumeric',42),    
    ('char',60),    
    ('varchar',60),    
    ('nchar',60),    
    ('nvarchar',60),    
    ('binary',60),    
    ('varbinary',60),    
    ('xml',60)    
; WITH Sparse_CTE (COLUMN_NAME, SparseComment)
AS
(
SELECT
  P.COLUMN_NAME
 ,CASE
  WHEN P.PercentageNulls >= T.Threshold THEN 'Could benefit from sparse columns. '
  ELSE ''
  END AS SparseComment
FROM #ProfileData P
 INNER JOIN #SparseThresholds T
  ON P.DataType = T.DataType
)
UPDATE PT
  SET PT.DataTypeComments = 
      CASE WHEN PT.DataTypeComments IS NULL THEN CTE.SparseComment
           ELSE ISNULL(PT.DataTypeComments,'') + CTE.SparseComment + '. '
      END
 FROM #ProfileData PT
  INNER JOIN Sparse_CTE CTE
   ON PT.COLUMN_NAME = CTE.COLUMN_NAME
-----------------------------------------------------------------------
-- Optional advanced analysis
-----------------------------------------------------------------------
IF @AdvancedAnalysis = 1
 BEGIN
-----------------------------------------------------------------------
-- Data at data boundaries
-----------------------------------------------------------------------
  IF OBJECT_ID('tempdb..#LimitTest') IS NOT NULL
    DROP TABLE tempdb..#LimitTest;
    CREATE TABLE #LimitTest (COLUMN_NAME VARCHAR(128), NoAtLimit BIGINT);
    DECLARE @advancedtestSQL VARCHAR(MAX) = 'INSERT INTO #LimitTest (COLUMN_NAME, NoAtLimit)' + CHAR(13)
    SELECT @advancedtestSQL = @advancedtestSQL + 'SELECT '''+ COLUMN_NAME + ''', COUNT('+ COLUMN_NAME + ') FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + 
     CASE
       WHEN DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real') THEN ' WHERE '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR '+ COLUMN_NAME + ' = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
       ELSE ' WHERE LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MaxDataLength,0) AS VARCHAR(40)) + ' OR LEN('+ COLUMN_NAME + ') = ' + CAST(ISNULL(MinDataLength,0) AS VARCHAR(40)) + CHAR(13) + ' UNION' + CHAR(13)
     END
    FROM #ProfileData 
    WHERE DataType IN ('numeric', 'int', 'bigint', 'tinyint', 'smallint', 'decimal', 'money', 'smallmoney', 'float','real','varchar', 'nvarchar', 'char', 'nchar', 'binary')
    SET @advancedtestSQL = LEFT(@advancedtestSQL,LEN(@advancedtestSQL) -6) 
    EXEC (@advancedtestSQL)
    UPDATE M
      SET M.NoAtLimit = T.NoAtLimit
         ,M.DataTypeComments = 
           CASE
             WHEN CAST(T.NoAtLimit AS NUMERIC(36,2)) / CAST(@ROWCOUNT AS NUMERIC(36,2)) >= @BoundaryPercent THEN ISNULL(M.DataTypeComments,'') + 'Large numbers of data elements at the max/minvalues. '
             ELSE M.DataTypeComments
           END
    FROM #ProfileData M
     INNER JOIN #LimitTest T
      ON M.COLUMN_NAME = T.COLUMN_NAME
   -----------------------------------------------------------------------
   -- Domain analysis
   -----------------------------------------------------------------------
   IF OBJECT_ID('tempdb..#DomainAnalysis') IS NOT NULL
     DROP TABLE tempdb..#DomainAnalysis;
   CREATE TABLE #DomainAnalysis
   (
    DomainName NVARCHAR(128)
   ,DomainElement NVARCHAR(4000)
   ,DomainCounter BIGINT
   ,DomainPercent NUMERIC(7,4)
   );
   DECLARE @DOMAINSQL VARCHAR(MAX) = 'INSERT INTO #DomainAnalysis (DomainName, DomainElement, DomainCounter) '
   DECLARE SQLDOMAIN_CUR CURSOR LOCAL FAST_FORWARD FOR 
     SELECT COLUMN_NAME, DataType 
	  FROM #ProfileData 
	   WHERE NoDistinct < @DistinctValuesMinimum
   OPEN SQLDOMAIN_CUR 
   FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   WHILE @@FETCH_STATUS = 0 
    BEGIN 
     SET @DOMAINSQL = @DOMAINSQL + 'SELECT ''' + @COLUMN_NAME + ''' AS DomainName, CAST( '+ @COLUMN_NAME + ' AS VARCHAR(4000)) AS DomainElement, COUNT(ISNULL(CAST(' + @COLUMN_NAME + ' AS NVARCHAR(MAX)),'''')) AS DomainCounter FROM ' + @TABLE_SCHEMA + '.' + @TABLE_NAME + ' GROUP BY ' + @COLUMN_NAME + ''
     + ' UNION '
     FETCH NEXT FROM SQLDOMAIN_CUR INTO @COLUMN_NAME, @DATA_TYPE 
   END 
  CLOSE SQLDOMAIN_CUR 
  DEALLOCATE SQLDOMAIN_CUR 
  SET @DOMAINSQL = LEFT(@DOMAINSQL, LEN(@DOMAINSQL) -5) + ' ORDER BY DomainName ASC, DomainCounter DESC '
   EXEC (@DOMAINSQL)
   -- Now calculate percentages (this appraoch is faster than doing it when performing the domain analysis)
   ; WITH DomainCounter_CTE (DomainName, DomainCounterTotal)
   AS
  (
   SELECT DomainName, SUM(ISNULL(DomainCounter,0)) AS DomainCounterTotal
    FROM #DomainAnalysis 
    GROUP BY DomainName
  )
  UPDATE D
    SET D.DomainPercent = (CAST(D.DomainCounter AS NUMERIC(36,4)) / CAST(CTE.DomainCounterTotal AS NUMERIC(36,4))) * 100
   FROM #DomainAnalysis D
    INNER JOIN DomainCounter_CTE CTE
     ON D.DomainName = CTE.DomainName
   WHERE D.DomainCounter <> 0
 END
-- Advanced analysis
-----------------------------------------------------------------------
-- Output results from the profile and domain data tables
-----------------------------------------------------------------------
select
   *
 from #ProfileData
IF @AdvancedAnalysis = 1
 BEGIN
  select
    *
   from #DomainAnalysis
 END
END TRY
BEGIN CATCH
 SELECT
  ERROR_NUMBER() AS ErrorNumber
 ,ERROR_SEVERITY() AS ErrorSeverity
 ,ERROR_STATE() AS ErrorState
 ,ERROR_PROCEDURE() AS ErrorProcedure
 ,ERROR_LINE() AS ErrorLine
 ,ERROR_MESSAGE() AS ErrorMessage;
 
END CATCH
--this will update only one row that matches id = 1
update sessions
set start_date = '2020-04-20 10:12:15.653',
    end_date = '2020-04-22 15:40:30.123'
where id = 1;

--this will update multiple rows that match category = 1
update sessions
set end_date = null
where category = 1;
SELECT u.id, u.name as user_name, u.email as user_email, st.name as street_name, scr.id as scr_id, scr.name as scr_name, scr.orientation, scr.url_token, c.id AS clip_id, c.title AS clip_title FROM users AS u
JOIN city_user AS cu ON cu.user_id = u.id
JOIN streets AS st ON st.city_id = cu.city_id
JOIN screens AS scr ON scr.street_id = st.id
JOIN clip_screen as cs ON cs.screen_id = scr.id
JOIN clips AS c ON c.id = cs.clip_id
JOIN templates AS t ON t.id = c.template_id
WHERE c.id = 217130
GROUP BY u.id, scr.id, c.id
with tc as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined`
    where date(created_at) < current_date
    group by 1
),

trf as (
    select user_id, count(*) as cnt
    from `reby-cloud.analytics_reby_v1_eu.transactions_combined_reassigned_final`
    where date(created_at) < current_date
    group by 1
)
select * from (
select  
    tc.user_id,
    tc.cnt as tc_cnt,
    trf.cnt as trf_cnt
from tc left join trf on tc.user_id = trf.user_id
--where tc.user_id = 'usr_3sqh76qtht1s97xa1qu1'
)
where tc_cnt > trf_cnt
-- these scripts will delete the tables if it's already there

DROP TABLE IF EXISTS StudentEnrollments;
DROP TABLE IF EXISTS Students;
DROP TABLE IF EXISTS Classrooms;


-- create and populate the students table
CREATE TABLE Students
(
	StudentId INTEGER PRIMARY KEY,
	FirstName VARCHAR(200) NOT NULL,
	LastName VARCHAR(200) NOT NULL,
	Nationality VARCHAR(100) NOT NULL,
	DateOfBirth DATETIME NULL
);


INSERT INTO Students
	(StudentId, FirstName, LastName, Nationality, DateOfBirth)
VALUES
	('1','Mickey', 'Mouse', 'American', '1991-05-02'),
	('2','Donald', 'Duck', 'Japanese', '1992-11-12'),
	('3','Goofy', 'Goof', 'American', '1980-04-15'),
	('4','Daisy', 'Duck', 'French', '1985-02-16'),
	('5','Huey', 'Duck', 'French', '1986-05-19'),
	('6','Scrooge', 'McDuck', 'Japanese', '1983-11-11'),
	('7','Minnie', 'Mouse', 'Canadian', '1983-11-30'),
	('8','Louie', 'Duck', 'French', '1985-09-09');

-- create and populate the classroom table
CREATE TABLE Classrooms
(
	ClassroomId INTEGER PRIMARY KEY,
	ClassName VARCHAR(200) NOT NULL,
	Weight DECIMAL NOT NULL
);

INSERT INTO Classrooms
	(ClassroomId, ClassName, Weight)
VALUES
	(1, 'Public Interaction', 0.10),
	(2, 'Pranks', 0.15),
	(3, 'Running', 0.15),
	(4, 'Acting', 0.30),
	(5, 'Making Jokes', 0.30);

-- create and populate the student enrollment table
CREATE TABLE StudentEnrollments
(
	StudentEnrollmentId INTEGER PRIMARY KEY,
	StudentId INTEGER NOT NULL,
	ClassroomId INTEGER NOT NULL,
	Grade DECIMAL NOT NULL,
	FOREIGN KEY(StudentId) REFERENCES Students(StudentId),
	FOREIGN KEY(ClassroomId) REFERENCES Classrooms(ClassroomId)
);

INSERT INTO StudentEnrollments
	(StudentEnrollmentId, StudentId, ClassroomId, Grade)
VALUES
	(1, 1, 1, 91),
	(2, 1, 2, 68),
	(3, 1, 3, 89),
	(4, 1, 4, 60),
	(5, 1, 5, 65),
	(6, 2, 1, 79),
	(7, 2, 2, 85),
	(8, 2, 3, 68),
	(9, 2, 4, 89),
	(10, 2, 5, 80),
	(11, 3, 1, 96),
	(12, 3, 2, 62),
	(13, 3, 3, 78),
	(14, 3, 4, 100),
	(15, 3, 5, 64),
	(16, 4, 1, 81),
	(17, 4, 2, 90),
	(18, 4, 3, 85),
	(19, 4, 4, 95),
	(20, 4, 5, 64),
	(21, 5, 1, 81),
	(22, 5, 2, 73),
	(23, 5, 3, 60),
	(24, 5, 4, 99),
	(25, 5, 5, 70),
	(26, 6, 1, 75),
	(27, 6, 2, 74),
	(28, 6, 3, 69),
	(29, 6, 4, 79),
	(30, 6, 5, 88),
	(31, 7, 1, 60),
	(32, 7, 2, 75),
	(33, 7, 3, 82),
	(34, 7, 4, 66),
	(35, 7, 5, 65),
	(36, 8, 1, 69),
	(37, 8, 2, 81),
	(38, 8, 3, 100),
	(39, 8, 4, 63),
	(40, 8, 5, 62);
CREATE TABLE tbl_price_history (
	pk_price_history SERIAL PRIMARY KEY,
	fk_sku character(10) REFERENCES tbl_sku (sku),
	date_of_change date,
	retail_price numeric
);
SELECT (DATE '2011-01-28', DATE '2011-02-01' + 1) OVERLAPS
       (DATE '2011-02-01', DATE '2011-02-01'    )
select
    v.printed_code,
    id_to_text(vd.id) as id_,
    id_to_time(vd.id) as created_at,
    vd.*
from vehicle_diagnostic vd 
left join vehicle v on vd.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
select
    v.printed_code,
    id_to_text(vr.id) as id_,
    id_to_time(vr.id) as created_at,
    vr.*
from vehicle_repair vr
left join vehicle v on vr.vehicle_id = v.id
where v.printed_code = '999007'
order by 3 desc
----
----
select
    *
from vehicle_distribution v
order by id desc
Better: COPY {table_name}({table_columns}) FROM STDIN WITH (DELIMITER ',', NULL '?', FORMAT CSV, HEADER TRUE);


Also Possible, but worse: copy r from 'file.csv' WITH NULL AS 'NULL' DELIMITER ';' CSV HEADER;
-- updating a table during runtime of the script with a join

ALTER TABLE shops_per_city ADD merchant_type DOUBLE;
/*ALTER TABLE shops_per_city ADD city_latitude DOUBLE*/

UPDATE shops_per_city AS shop
INNER JOIN city_list AS city
ON shop.city_id = city.city_id
SET shop.city_latitude = city.city_latitude;


-- selecting the list for a where statement with a subquery

SELECT * 
FROM shopstation
WHERE Dealer_ID_Long IN 
	(select Dealer_ID_Long
	FROM shop_list);
	
-- also possible to join on a subquery (notice that there is no ; in the subquery)
SELECT *, -- this select needs to have all columns of the result table
FROM shopcounter_category_daily_v2 AS shop
INNER JOIN 
	(SELECT *
	FROM weather_owm_daily_2014
	WHERE cityId = 2761369) AS weather
ON shop.RECORD_DATE = weather.DATE;

-- update a column using CASE
UPDATE creditcard_merchtype 
SET online =
	(CASE
    	WHEN Merchantname LIKE "%.at%" THEN 1
    	WHEN Merchantname LIKE "%.com%" THEN 1
    	ELSE 0
	END);
	
-- alter column name	
ALTER TABLE creditcard_at_v1 CHANGE cityId city_id INT;


-- creating temporary table for later use:

CREATE TEMPORARY TABLE id_list AS
SELECT COUNT(*) AS days, dealer_name, DEALER_ID, Dealer_ID_Long
FROM shopcounter
WHERE YEAR(RECORD_DATE) > 2017
GROUP BY dealer_name, DEALER_ID, Dealer_ID_Long
HAVING days > 752;

CREATE TABLE shopcounter_stable AS
SELECT *
FROM shopcounter
WHERE Dealer_ID_Long IN 
	(SELECT Dealer_ID_Long
	FROM id_list) && YEAR(RECORD_DATE) > 2017;
#!/bin/bash -v

hadoop fs -put ./AllstarFull.csv /user/bigdataproject/AllstarFull.csv
hive -f ./AllstarFull.hive






hadoop fs -put ./Appearances.csv /user/bigdataproject/Appearances.csv
hive -f ./Appearances.hive






hadoop fs -put ./AwardsManagers.csv /user/bigdataproject/AwardsManagers.csv
hive -f ./AwardsManagers.hive
insert into table db_name.table_name
select 'ALL','Done';
-- create the databases
CREATE DATABASE IF NOT EXISTS projectone;

-- create the users for each database
CREATE USER 'projectoneuser'@'%' IDENTIFIED BY 'somepassword';
GRANT CREATE, ALTER, INDEX, LOCK TABLES, REFERENCES, UPDATE, DELETE, DROP, SELECT, INSERT ON `projectone`.* TO 'projectoneuser'@'%';

FLUSH PRIVILEGES;
RUN sed -Ei 's/^(bind-address|log)/#&/' /etc/mysql/my.cnf
select * 
from folder f
  join uploads u ON u.id = f.folderId 
where '8' = ANY (string_to_array(some_column,','))
SELECT string_to_array('xx~^~yy~^~zz', '~^~', 'yy');


ALTER TABLE test1 ADD COLUMN id SERIAL PRIMARY KEY;

This is all you need to:

Add the id column
Populate it with a sequence from 1 to count(*).
Set it as primary key / not null.
SELECT POSITION('our' in 'w3resource');


SELECT id, account_id, amount, created_at, null as type
 	FROM subscription_invoice
UNION ALL
SELECT id, account_id, amount, created_at, 
	`type` as type
FROM subscription_payment
ORDER BY created_at DESC
function processSQLFile(fileName) {

  // Extract SQL queries from files. Assumes no ';' in the fileNames
  var queries = fs.readFileSync(fileName).toString()
    .replace(/(\r\n|\n|\r)/gm," ") // remove newlines
    .replace(/\s+/g, ' ') // excess white space
    .split(";") // split into all statements
    .map(Function.prototype.call, String.prototype.trim)
    .filter(function(el) {return el.length != 0}); // remove any empty ones

  // Execute each SQL query sequentially
  queries.forEach(function(query) {
    batch.push(function(done) {
      if (query.indexOf("COPY") === 0) { // COPY - needs special treatment
        var regexp = /COPY\ (.*)\ FROM\ (.*)\ DELIMITERS/gmi;
        var matches = regexp.exec(query);
        var table = matches[1];
        var fileName = matches[2];
        var copyString = "COPY " + table + " FROM STDIN DELIMITERS ',' CSV HEADER";
        var stream = client.copyFrom(copyString);
        stream.on('close', function () {
          done();
        });
        var csvFile = __dirname + '/' + fileName;
        var str = fs.readFileSync(csvFile);
        stream.write(str);
        stream.end();
      } else { // Other queries don't need special treatment
        client.query(query, function(result) {
          done();
        });
      }
    });
  });
}
SELECT 
    orderNumber,
    orderDate,
    customerName,
    orderLineNumber,
    productName,
    quantityOrdered,
    priceEach
FROM
    orders
INNER JOIN orderdetails 
    USING (orderNumber)
INNER JOIN products 
    USING (productCode)
INNER JOIN customers 
    USING (customerNumber)
ORDER BY 
    orderNumber, 
    orderLineNumber;
ALTER PARTITION SCHEME [sh_trans_date]
 NEXT USED [PRIMARY]
 ALTER PARTITION FUNCTION [pf_trans_date_byhour]() SPLIT RANGE('2019/03/29')
ALTER DATABASE database CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;
CREATE USER 'user'@'localhost' IDENTIFIED BY 'pass';
GRANT ALL PRIVILEGES ON database.* TO 'user'@'localhost';
SELECT 
   SS.SEC_NAME,
   STUFF((SELECT '; ' + US.USR_NAME 
          FROM USRS US
          WHERE US.SEC_ID = SS.SEC_ID
          ORDER BY USR_NAME
          FOR XML PATH('')), 1, 1, '') [SECTORS/USERS]
FROM SALES_SECTORS SS
GROUP BY SS.SEC_ID, SS.SEC_NAME
ORDER BY 1
SELECT
    TableName = tbl.table_schema + '.' + tbl.table_name, 
    TableDescription = tableProp.value,
    ColumnName = col.column_name, 
    ColumnDataType = col.data_type,
    ColumnDescription = colDesc.ColumnDescription
FROM information_schema.tables tbl
INNER JOIN information_schema.columns col 
    ON col.table_name = tbl.table_name
LEFT JOIN sys.extended_properties tableProp 
    ON tableProp.major_id = object_id(tbl.table_schema + '.' + tbl.table_name) 
        AND tableProp.minor_id = 0
        AND tableProp.name = 'MS_Description' 
LEFT JOIN (
    SELECT sc.object_id, sc.column_id, sc.name, colProp.[value] AS ColumnDescription
    FROM sys.columns sc
    INNER JOIN sys.extended_properties colProp
        ON colProp.major_id = sc.object_id
            AND colProp.minor_id = sc.column_id
            AND colProp.name = 'MS_Description' 
) colDesc
    ON colDesc.object_id = object_id(tbl.table_schema + '.' + tbl.table_name)
        AND colDesc.name = col.COLUMN_NAME
WHERE tbl.table_type = 'base table'
--AND tableProp.[value] IS NOT NULL OR colDesc.ColumnDescription IS NOT null
mysql > SET GLOBAL sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
SELECT column_name as 'Column Name', data_type as 'Data Type'
FROM information_schema.columns
WHERE table_name = 'hrStaff' 
-- From https://github.com/bertwagner/SQLServer/blob/master/Non-SARGable%20Execution%20Plans.sql
-- This script will check the execution plan cache for any queries that are non-SARGable.
-- It does this by finding table and index scans that contain a scalar operators

SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED
 
DECLARE @dbname SYSNAME
SET @dbname = QUOTENAME(DB_NAME());
 
WITH XMLNAMESPACES (DEFAULT 'http://schemas.microsoft.com/sqlserver/2004/07/showplan')

SELECT
   stmt.value('(@StatementText)[1]', 'varchar(max)') AS [Query],
   query_plan AS [QueryPlan],
   sc.value('(.//Identifier/ColumnReference/@Schema)[1]', 'varchar(128)') AS [Schema], 
   sc.value('(.//Identifier/ColumnReference/@Table)[1]', 'varchar(128)') AS [Table], 
   sc.value('(.//Identifier/ColumnReference/@Column)[1]', 'varchar(128)') AS [Column] ,
   CASE WHEN s.exist('.//TableScan') = 1 THEN 'TableScan' ELSE 'IndexScan' END AS [ScanType],
   sc.value('(@ScalarString)[1]', 'varchar(128)') AS [ScalarString]
FROM 
	sys.dm_exec_cached_plans AS cp
	CROSS APPLY sys.dm_exec_query_plan(cp.plan_handle) AS qp
	CROSS APPLY query_plan.nodes('/ShowPlanXML/BatchSequence/Batch/Statements/StmtSimple') AS batch(stmt)
	CROSS APPLY stmt.nodes('.//RelOp[TableScan or IndexScan]') AS scan(s)
	CROSS APPLY s.nodes('.//ScalarOperator') AS scalar(sc)
WHERE
    s.exist('.//ScalarOperator[@ScalarString]!=""') = 1 
    AND sc.exist('.//Identifier/ColumnReference[@Database=sql:variable("@dbname")][@Schema!="[sys]"]') = 1
	AND sc.value('(@ScalarString)[1]', 'varchar(128)') IS NOT NULL
select 
				   a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 day'::interval, 'DD-MM-YYYY')  jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
				   a.plafond_or,
				   a.penjaminan nilai_penjaminan,
				   c.is_konvensional,
				   0 nilai_asuransi_1,
					0 nilai_asuransi_2,
				  (a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
				 CASE
				        when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
				 END umur,
				  '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
							--(ijp cogar - feebase cogar) x persen komisi

						

                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_obligee f on a.id_obligee = f.id_obligee
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where 
				--( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND
				--d.id_reference is null and
				--a.flag_deletion is null and
	              -- d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				  a.flag_deletion is null and 		
					d.flag_delete is null and 
					( 
						( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
							( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
						) OR  
						
						( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
					)  
					
	          
				 union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					0 nilai_asuransi_1,
					0 nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    '--' note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							0 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	 
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship != 9
	              
	                union all
				 select 
					a.id_transaksi_suretyship,
                   b.jenis_suretyship,
                   c.suretyship,
                   d.nomor_sk,
                   d.tgl_sk,
				   d.tgl_sk_cetak,
                   e.nama_principal,
                   f.nama_bank_cabang as nama_obligee,
				   a.flag_deletion,
				   a.id_reference,
                   a.nilai_proyek,
                   a.nilai_bond,
                   a.nilai_ijp,
				   a.fee_base persen_fee_base,
				   a.fee_base * a.nilai_ijp/100 fee_base,
				   a.komisi_agen persen_komisi_agen,
				   a.komisi_agen*a.nilai_ijp/100 komisi_agen,
				   a.persen_reasuransi ,
				   --a.persen_reasuransi * a.nilai_ijp/100 reasuransi,
				   a.reasuransi,
				   --coalesce(a.fee_base/100,0)*a.persen_reasuransi * a.nilai_ijp/100 reasuransi_fee_base,
				   a.reasuransi_fee_base,
				   a.persen_coguarantee,
				   a.persen_coguarantee * a.nilai_ijp/100 coguarantee,
				   a.potongan_co persen_potongan_co,
				   a.potongan_co * a.nilai_ijp/100 potongan_co,
				   a.potongan_re persen_potongan_re,
				   a.potongan_re * a.nilai_ijp/100 potongan_re,
				   to_char(a.periode_berlaku + (a.jangka_waktu)* '1 MONTH'::interval, 'DD-MM-YYYY') jatuh_tempo,
				   to_char(a.periode_berlaku, 'DD-MM-YYYY') periode_berlaku,
				   coalesce(e.jumlah_karyawan,0) tenaga_kerja,
				   --a.nilai_bond * (100-coalesce(a.persen_reasuransi,0))/100 as plafond_or
					a.plafond_or,
					a.penjaminan nilai_penjaminan,
					c.is_konvensional, 
					nilai_asuransi_1,
					nilai_asuransi_2,
					(a.fee_base*(a.persen_coguarantee*a.nilai_ijp/100)/100)  coguarantee_fee_base,
					case
					    when cast(usia AS varchar) IS NULL then cast(tanggal_usia AS varchar)
					    else cast(usia AS varchar)
					end    
					 umur,
				    a.catatan note,
				  case
				  	when d.tgl_sk >= '2016-12-01' and d.tgl_sk <= '2017-11-30' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*5/100
				  	when d.tgl_sk >= '2017-12-01' and d.tgl_sk <= '2019-08-31' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*7.5/100
				  	when d.tgl_sk > '2019-09-01' then 
				  	(coalesce(a.reasuransi,0)-(coalesce(a.reasuransi_fee_base,0)))*10/100
				  	else 0
				  	end ri_com,
						CASE
							when  d.tgl_sk >= '2019-09-30' and d.tgl_sk <= '2020-04-30' then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*5/100
							when  d.tgl_sk >= '2020-05-01'  then
							((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)-((COALESCE(a.persen_coguarantee,0)* a.nilai_ijp/100)*(COALESCE(a.fee_base,0)/100)))*7.5/100
							ELSE 0
							end komisi_co,
							nilai_asuransi_2 * 5/100 komisi_asuransi
                from 
                  v_tr_transaksi_suretyship_asuransi a
                  inner join mt_jenis_suretyship b on a.id_jenis_suretyship = b.id_jenis_suretyship
                  inner join mt_suretyship c on b.id_mt_suretyship = c.id_mt_suretyship
                  inner join tr_sertifikat_penjaminan d on a.id_sertifikat = d.id_sertifikat
                  inner join mt_principal e on a.id_principal = e.id_principal
                  inner join mt_bank_cabang f on d.id_mt_bank_cabang = f.id_mt_bank_cabang
				  Left join mt_agen g on a.id_mt_agen=g.id_mt_agen
				  left join mt_broker h on d.reas_id_broker = h.id_broker
    	          left join (
                    SELECT id_transaksi_suretyship, 
                            sum(nilai_asuransi_1)  nilai_asuransi_1, 
                            sum(nilai_asuransi_2)  nilai_asuransi_2 FROM (
                                SELECT
                                    id_transaksi_suretyship,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 1
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_1,
                                        CASE 
                                                    WHEN  
                                                        id_asuransi = 2
                                                    THEN
                                                        nilai_asuransi
                                                    ELSE
                                                        0
                                                END AS nilai_asuransi_2 
                                FROM
                                tr_transaksi_suretyship_ext	 
                            ) ss GROUP BY id_transaksi_suretyship
                ) i ON a.id_transaksi_suretyship = i.id_transaksi_suretyship
	            where
				--	( a.flag_deletion is null OR (a.flag_deletion is not null and a.change_date > '2020-07-31') ) AND				
				--d.id_reference is null and
				--	a.flag_deletion is null and
	             --  d.tgl_sk >='2015-01-01'  and d.tgl_sk<='2020-07-31'
				 a.flag_deletion is null and 		
				d.flag_delete is null and 
				( 
					( tgl_sk >='2015-01-01' and  tgl_sk <='2020-07-31'  and 
						( tgl_sk_cetak is null or ( tgl_sk_cetak is not null and d.id_reference is null) ) 
					) OR  
					
					( tgl_sk_cetak >='2015-01-01' and  tgl_sk_cetak <='2020-07-31' and d.id_reference is not null)
				) and 
				c.id_mt_suretyship = 9
	               
ALTER TABLE provider DROP PRIMARY KEY, ADD PRIMARY KEY(person, place, thing);
Use Sum aggregate function with case statement as below, if you want sum of those t.value based on t.clock then do group by on that column so that you will get clock while sum of values.

select sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
from hosts h, items i,history_uint t
where i.hostid=h.hostid and t.itemid=i.itemid
If it is based on t.clock then

   select t.clock, sum(case when  i.hostid='223344'   and t.itemid = '0223344' then t.value end) as FirstValue,sum(case when  i.hostid='112233' and t.itemid = '0112233' then t.value end) as SecondValue
    from hosts h, items i,history_uint t
    where i.hostid=h.hostid and t.itemid=i.itemid
    group by t.clock
    0

You can try this below logic-

SELECT i.hostid,t.itemid,t.value as OneValues, t.clock as time
FROM hosts h 
INNER JOIN items i 
    ON i.hostid=h.hostid
    AND i.hostid IN ('223344','112233')
    AND i.itemid IN ('0223344','0112233')
INNER JOIN  history_uint t 
    ON t.itemid=i.itemid
dim tempo = listbox1.SelectedItems
dim students as string = ""
for each selected in tempo
students = selected.ToString & "," & students
next
star

Mon Apr 22 2024 03:16:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Apr 15 2024 13:23:41 GMT+0000 (Coordinated Universal Time) https://reintech.io/blog/comprehensive-guide-sql-create-database-statement

#sql
star

Fri Apr 12 2024 05:37:53 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Apr 08 2024 10:27:32 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/63447213/generate-all-combinations

#sql
star

Mon Apr 08 2024 10:26:05 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/2258735/how-to-create-a-query-to-list-all-combinations-possibilities

#sql
star

Mon Apr 08 2024 10:25:57 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/2258735/how-to-create-a-query-to-list-all-combinations-possibilities

#sql
star

Thu Mar 21 2024 07:27:45 GMT+0000 (Coordinated Universal Time) https://www.scaler.com/topics/import-csv-into-mysql/

#sql #csv
star

Thu Mar 14 2024 15:43:46 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/1658340/sql-query-to-group-by-day

#sql
star

Fri Mar 08 2024 10:14:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Mar 08 2024 10:14:06 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Feb 29 2024 22:30:27 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Feb 29 2024 21:38:18 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Feb 26 2024 12:09:54 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jan 24 2024 21:07:14 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Jan 15 2024 14:02:02 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/1007697/how-to-strip-all-non-alphabetic-characters-from-string-in-sql-server

#sql
star

Thu Jan 11 2024 14:55:30 GMT+0000 (Coordinated Universal Time) https://www.rohittechvlog.com/2019/09/display-database-images-in-ssrs-report.html

#sql
star

Mon Jan 01 2024 08:27:48 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Dec 31 2023 17:28:58 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Dec 31 2023 15:13:32 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/12789396/how-can-i-get-multiple-counts-with-one-sql-query

#sql
star

Wed Dec 27 2023 18:17:38 GMT+0000 (Coordinated Universal Time) https://blog.devart.com/if-then-in-sql-server.html

#sql
star

Wed Dec 27 2023 17:31:54 GMT+0000 (Coordinated Universal Time) https://callihandata.com/2022/05/02/multiple-counts-in-one-query/

#sql
star

Wed Dec 27 2023 17:27:39 GMT+0000 (Coordinated Universal Time) https://www.simplilearn.com/tutorials/sql-tutorial/sql-union

#sql
star

Wed Dec 27 2023 17:27:13 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/67688579/sql-get-counts-from-multiple-ctes-into-one-table

#sql
star

Wed Dec 27 2023 09:59:07 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:58:26 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:57:30 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:56:56 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:56:29 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:56:04 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:55:23 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:54:50 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 27 2023 09:54:08 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 26 2023 21:03:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 26 2023 19:15:41 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 26 2023 15:14:46 GMT+0000 (Coordinated Universal Time) https://www.sqlshack.com/different-ways-to-search-for-objects-in-sql-databases/

#sql
star

Tue Dec 26 2023 14:43:15 GMT+0000 (Coordinated Universal Time) https://www.sqlshack.com/different-ways-to-search-for-objects-in-sql-databases/

#sql
star

Tue Dec 26 2023 14:37:35 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:40:19 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:39:36 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:39:08 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:38:34 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:38:04 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:37:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:37:10 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:36:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:36:16 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:35:48 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 25 2023 18:35:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:04:09 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:03:36 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:03:02 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:02:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:02:09 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:01:51 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:01:31 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 12:01:11 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 11:24:18 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 11:23:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 11:23:27 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 10:09:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 22 2023 10:09:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 21 2023 09:22:25 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 21 2023 09:22:02 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 21 2023 09:21:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 17:26:16 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 17:25:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 16:04:25 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 16:03:58 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 15:31:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 15:31:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 20 2023 15:28:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:08:26 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:07:51 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:07:30 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:07:06 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:06:42 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:06:06 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:05:23 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:04:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 19 2023 12:04:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 18 2023 05:37:02 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 18 2023 05:36:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 18 2023 05:36:22 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 18 2023 04:27:52 GMT+0000 (Coordinated Universal Time) https://chat.openai.com/

#changecolumn #toadchangecolumn #oraclechangecolumn #toad #oracle #sql #sqlchange
star

Mon Dec 18 2023 04:25:51 GMT+0000 (Coordinated Universal Time)

#addcolumn #sqladdcolumn #sql #add #oracle #toad
star

Mon Dec 18 2023 04:06:41 GMT+0000 (Coordinated Universal Time) https://chat.openai.com/

#addcolumn #sqladdcolumn #sql #add #addcolumn
star

Thu Dec 07 2023 06:03:45 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 06 2023 10:33:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 05 2023 16:57:49 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 05 2023 08:22:50 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 04 2023 11:35:33 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 04 2023 11:35:02 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 04 2023 11:34:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Dec 02 2023 13:55:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 01 2023 18:10:18 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 01 2023 17:51:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 01 2023 17:51:07 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Nov 21 2023 18:26:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Nov 21 2023 02:44:40 GMT+0000 (Coordinated Universal Time) https://nbviewer.org/github/twosigma/beakerx/blob/master/doc/sql/Sql.ipynb

#beakerx #sql #connect
star

Thu Oct 19 2023 23:58:36 GMT+0000 (Coordinated Universal Time)

#sql #vba #ilink #lis #gqry
star

Tue Oct 17 2023 09:08:37 GMT+0000 (Coordinated Universal Time) https://www.mycompiler.io/view/GdCa0p9NWMS

#sql #vba #ilink #lis #gqry
star

Tue Oct 10 2023 08:09:53 GMT+0000 (Coordinated Universal Time)

#sql #vba #ilink #lis #gqry
star

Thu Oct 05 2023 01:50:24 GMT+0000 (Coordinated Universal Time)

#sql #vba #ilink #lis #gqry
star

Fri Sep 22 2023 08:34:18 GMT+0000 (Coordinated Universal Time)

#sql #vba
star

Tue Sep 05 2023 16:25:02 GMT+0000 (Coordinated Universal Time)

#firebird #sql
star

Tue Aug 29 2023 20:28:16 GMT+0000 (Coordinated Universal Time)

#c# #sql
star

Fri Aug 25 2023 03:39:08 GMT+0000 (Coordinated Universal Time) https://www.mycompiler.io/view/3MNfG0ssCOF

#sql #vba
star

Wed Aug 23 2023 08:07:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Aug 23 2023 08:06:03 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 21 2023 20:21:36 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/1793169/which-is-faster-multiple-single-inserts-or-one-multiple-row-insert

#sql
star

Mon Aug 21 2023 17:39:06 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/4448340/postgresql-duplicate-key-violates-unique-constraint

#sql
star

Sun Aug 20 2023 23:02:46 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/41068237/sql-server-return-sequence-next-value-as-view-column

#sql
star

Wed Aug 16 2023 09:40:38 GMT+0000 (Coordinated Universal Time)

#sql #vba
star

Mon Aug 07 2023 01:44:46 GMT+0000 (Coordinated Universal Time) https://dba.stackexchange.com/questions/149169/binary-path-in-the-pgadmin-preferences

#sql
star

Mon Aug 07 2023 01:44:39 GMT+0000 (Coordinated Universal Time) https://dba.stackexchange.com/questions/149169/binary-path-in-the-pgadmin-preferences

#sql
star

Wed Aug 02 2023 22:49:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 01 2023 16:40:02 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jul 26 2023 04:14:26 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/3327312/how-can-i-drop-all-the-tables-in-a-postgresql-database

#sql
star

Tue Jul 25 2023 13:25:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jul 25 2023 12:54:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jul 25 2023 07:23:58 GMT+0000 (Coordinated Universal Time)

#sql #postgres
star

Mon Jul 24 2023 19:01:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jul 18 2023 16:41:49 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/5676559/how-to-write-not-in-sql-query-using-join

#sql
star

Tue Jul 18 2023 16:41:34 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/5676559/how-to-write-not-in-sql-query-using-join

#sql
star

Fri Jul 14 2023 11:42:10 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Jul 14 2023 02:07:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jul 05 2023 11:11:49 GMT+0000 (Coordinated Universal Time)

#apex #sql #sirf
star

Fri Jun 30 2023 12:27:08 GMT+0000 (Coordinated Universal Time)

#apex #sql
star

Thu Jun 29 2023 12:28:06 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jun 15 2023 04:26:24 GMT+0000 (Coordinated Universal Time)

#sql #vba
star

Tue Jun 06 2023 01:51:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jun 06 2023 01:50:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jun 06 2023 01:50:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jun 06 2023 01:50:10 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed May 31 2023 06:55:48 GMT+0000 (Coordinated Universal Time)

#projection #sql
star

Wed May 31 2023 05:30:03 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed May 31 2023 03:37:55 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed May 24 2023 09:55:41 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon May 08 2023 17:22:35 GMT+0000 (Coordinated Universal Time)

#sql #vba
star

Sat May 06 2023 15:25:42 GMT+0000 (Coordinated Universal Time)

#sql #ddl #snowflake
star

Sat May 06 2023 15:21:14 GMT+0000 (Coordinated Universal Time)

#sql #ddl #snowflake
star

Sat May 06 2023 00:39:32 GMT+0000 (Coordinated Universal Time) https://how.withlookerstudio.com/data-viz/20230503-sparkline-scorecard-with-adaptative-time-granularity/

#sql
star

Fri Apr 28 2023 15:27:58 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Apr 27 2023 06:31:03 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Apr 20 2023 07:54:14 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Apr 20 2023 07:53:29 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Apr 19 2023 08:57:16 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Apr 01 2023 08:57:51 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Mon Mar 27 2023 11:25:06 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Mon Mar 27 2023 11:22:59 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Thu Mar 09 2023 10:18:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Mar 03 2023 10:57:14 GMT+0000 (Coordinated Universal Time)

#postgres #sql #json
star

Thu Mar 02 2023 07:29:18 GMT+0000 (Coordinated Universal Time) https://learn.microsoft.com/en-us/sql/relational-databases/system-catalog-views/sys-objects-transact-sql?view

#sql
star

Fri Feb 24 2023 18:33:49 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Feb 22 2023 09:03:18 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Feb 21 2023 15:07:56 GMT+0000 (Coordinated Universal Time) https://www.mssqltips.com/sqlservertip/1019/crosstab-queries-using-pivot-in-sql-server/

#sql
star

Tue Feb 21 2023 01:34:34 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Feb 21 2023 01:34:33 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Feb 20 2023 15:00:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Feb 16 2023 13:54:17 GMT+0000 (Coordinated Universal Time) https://social.msdn.microsoft.com/Forums/sqlserver/en-US/11a575eb-67c6-436d-bd2c-f5da8ca64e21/get-hierarchy-structure-for-employeemanager?forum

#sql
star

Wed Feb 15 2023 03:42:31 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Feb 10 2023 00:06:25 GMT+0000 (Coordinated Universal Time) https://codebeautify.org/sqlformatter/y23e67982

#sql
star

Mon Feb 06 2023 04:38:34 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Feb 01 2023 22:33:34 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/32474058/open-windows-explorer-to-a-network-folder-from-a-dotnet-client

#sql #sqlserver
star

Tue Jan 31 2023 17:20:10 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jan 31 2023 16:14:03 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Jan 30 2023 03:56:30 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jan 18 2023 07:18:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jan 12 2023 12:15:30 GMT+0000 (Coordinated Universal Time)

#sql #postgresql
star

Wed Jan 11 2023 07:16:21 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Jan 03 2023 17:53:41 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Jan 02 2023 12:12:55 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Dec 19 2022 17:00:41 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/23768717/sql-case-statement-with-or

#sql
star

Thu Dec 15 2022 16:40:26 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/5108876/kill-a-postgresql-session-connection

#sql
star

Thu Dec 15 2022 09:54:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 15 2022 05:12:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 14 2022 20:01:03 GMT+0000 (Coordinated Universal Time) https://michaeljswart.com/2017/07/converting-from-datetime-to-ticks-using-sql-server/

#sql
star

Wed Dec 14 2022 20:00:37 GMT+0000 (Coordinated Universal Time) https://michaeljswart.com/2017/07/converting-from-datetime-to-ticks-using-sql-server/

#sql
star

Wed Dec 14 2022 20:00:07 GMT+0000 (Coordinated Universal Time) https://michaeljswart.com/2017/07/converting-from-datetime-to-ticks-using-sql-server/

#sql
star

Tue Dec 13 2022 02:29:42 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Dec 09 2022 15:49:35 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 08 2022 14:37:54 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 08 2022 08:02:32 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 08 2022 06:29:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Dec 03 2022 01:57:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 01 2022 16:40:50 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Dec 01 2022 01:58:12 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Nov 30 2022 13:37:20 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/5391564/how-to-use-distinct-and-order-by-in-same-select-statement

#sql
star

Sat Nov 26 2022 08:59:35 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Nov 25 2022 07:47:51 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/22282625/how-to-produce-random-hex-color-in-sql

#sql
star

Tue Nov 22 2022 23:12:39 GMT+0000 (Coordinated Universal Time) https://arjunjune.wordpress.com/2013/01/15/emp-and-dept-table-script-sql-server/

#sql
star

Tue Nov 22 2022 19:37:03 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Nov 18 2022 13:15:15 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Nov 17 2022 09:07:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Nov 17 2022 08:49:19 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Nov 17 2022 08:47:46 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Nov 17 2022 08:45:42 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Nov 17 2022 08:41:55 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Nov 14 2022 10:46:17 GMT+0000 (Coordinated Universal Time) https://laravel-news.com/duplicate-database-record

#mysql #sql #mariadb
star

Thu Nov 10 2022 19:26:50 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/15663207/how-to-use-null-or-empty-string-in-sql

#sql
star

Thu Nov 10 2022 15:53:45 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/469318/t-sql-updating-more-than-one-variable-in-a-single-select

#sql
star

Fri Nov 04 2022 17:45:15 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Tue Nov 01 2022 10:04:36 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Oct 27 2022 17:20:09 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Thu Oct 27 2022 02:16:42 GMT+0000 (Coordinated Universal Time) https://www.sommarskog.se/dynamic_sql.html

#sql #dynamic
star

Sun Oct 23 2022 22:02:35 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Fri Oct 21 2022 09:55:25 GMT+0000 (Coordinated Universal Time) https://samsungeuropemarketing.cloud.looker.com/projects/Audience_Builder/files/GA360_AB/GoogleAnalytics/Behaviors_test2/add_to_cart/atc_global.view.lkml

#sql
star

Wed Oct 12 2022 13:04:19 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Wed Oct 12 2022 07:00:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Oct 11 2022 14:30:09 GMT+0000 (Coordinated Universal Time) https://www.w3resource.com/sql/aggregate-functions/Max-having.php

#sql
star

Tue Oct 11 2022 03:03:41 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Oct 10 2022 01:57:01 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Oct 07 2022 11:01:16 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Oct 06 2022 13:14:31 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?newwindow

#sql
star

Thu Oct 06 2022 13:14:28 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?newwindow

#sql
star

Wed Oct 05 2022 12:49:26 GMT+0000 (Coordinated Universal Time)

#erd #sql
star

Wed Sep 28 2022 09:54:29 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/33247908/get-only-records-created-today-in-laravel

#sql
star

Mon Sep 26 2022 16:56:33 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Fri Sep 23 2022 23:41:42 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Fri Sep 23 2022 23:26:41 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Fri Sep 23 2022 21:40:49 GMT+0000 (Coordinated Universal Time)

#sql #sqlserver
star

Fri Sep 23 2022 11:08:32 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/22180930/primary-foreign-key-constraints-confusion

#sql
star

Wed Sep 21 2022 20:38:16 GMT+0000 (Coordinated Universal Time) undefined

#powershell #windows #sql #pwsh
star

Mon Sep 19 2022 00:57:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Sep 18 2022 16:59:33 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Sep 15 2022 18:40:28 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/13607076/conversion-failed-when-converting-the-varchar-value-select-to-data-type-int

#sql
star

Thu Sep 15 2022 15:22:33 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/20935265/select-query-with-case-condition-and-sum

#sql
star

Tue Sep 13 2022 16:03:16 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Sep 07 2022 08:10:48 GMT+0000 (Coordinated Universal Time) https://gist.github.com/nextab/6a9b33919ab15e1f59828b4798682dd4

#mysql #sql
star

Mon Sep 05 2022 04:56:57 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Thu Sep 01 2022 12:51:07 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 30 2022 13:49:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 19:56:27 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 19:54:42 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 19:53:39 GMT+0000 (Coordinated Universal Time)

#bash #sql
star

Mon Aug 29 2022 19:35:02 GMT+0000 (Coordinated Universal Time)

#sql #bash
star

Mon Aug 29 2022 19:27:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 19:19:41 GMT+0000 (Coordinated Universal Time)

#bash #docker #sql
star

Mon Aug 29 2022 19:10:43 GMT+0000 (Coordinated Universal Time)

#bash #sql
star

Mon Aug 29 2022 19:09:59 GMT+0000 (Coordinated Universal Time)

#bash #sql
star

Mon Aug 29 2022 18:52:53 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:43:04 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:42:20 GMT+0000 (Coordinated Universal Time)

#bash #sql
star

Mon Aug 29 2022 18:37:26 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:36:31 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:33:43 GMT+0000 (Coordinated Universal Time)

#sql #bash #docker
star

Mon Aug 29 2022 18:32:04 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:26:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 18:08:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 17:09:44 GMT+0000 (Coordinated Universal Time)

#sql #php
star

Mon Aug 29 2022 17:05:21 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 17:04:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:55:57 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:53:46 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:35:00 GMT+0000 (Coordinated Universal Time) https://gist.github.com/rgreenjr/3637525

#sql
star

Mon Aug 29 2022 16:21:54 GMT+0000 (Coordinated Universal Time)

#php #sql
star

Mon Aug 29 2022 16:19:59 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:19:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:14:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 16:06:01 GMT+0000 (Coordinated Universal Time)

#sql #php
star

Mon Aug 29 2022 16:04:58 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 15:59:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 15:51:59 GMT+0000 (Coordinated Universal Time)

#php #sql
star

Mon Aug 29 2022 15:46:27 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 15:45:43 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 14:43:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 14:41:07 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Mon Aug 29 2022 14:39:43 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Mon Aug 29 2022 14:04:56 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 29 2022 13:47:14 GMT+0000 (Coordinated Universal Time)

#bash #sql
star

Mon Aug 29 2022 07:17:43 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Aug 28 2022 01:58:52 GMT+0000 (Coordinated Universal Time)

#sql #postgresql
star

Wed Aug 24 2022 11:48:28 GMT+0000 (Coordinated Universal Time)

#sqlserver #sql
star

Wed Aug 24 2022 11:46:13 GMT+0000 (Coordinated Universal Time) https://www.gencayyildiz.com/blog/wp-content/uploads/2017/03/T-SQL-Select-Sorgusunda-İstenilen-Sıradaki-Satırı-Elde-Etme-1.png

#sqlserver #sql
star

Sun Aug 21 2022 20:49:59 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/6994680/oracle-to-date-from-mm-dd-yyyy-to-dd-mm-yyyy

#sql
star

Fri Aug 12 2022 04:05:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Aug 12 2022 03:51:56 GMT+0000 (Coordinated Universal Time) https://community.snowflake.com/s/article/COUNT-DISTINCT-and-NULLs

#sql
star

Fri Aug 05 2022 03:15:02 GMT+0000 (Coordinated Universal Time) https://www.aidication.com/adminer/?username

#mysql #sql
star

Fri Jul 29 2022 07:28:53 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Jul 23 2022 02:03:26 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Jul 22 2022 08:03:06 GMT+0000 (Coordinated Universal Time) https://aws.amazon.com/premiumsupport/knowledge-center/redshift-high-cpu-usage/

#sql
star

Thu Jul 21 2022 14:28:15 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jul 21 2022 07:51:42 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jul 21 2022 07:02:58 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jul 20 2022 22:39:22 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/1369551/list-of-tables-without-indexes-in-sql-2008

#sql #sqlserver #dbmaintenance
star

Fri Jul 15 2022 10:29:07 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/44762526/pg-ctl-no-database-directory-specified-and-environment-variable-pgdata-unset

#sql
star

Thu Jul 07 2022 05:22:04 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/36882149/error-1067-42000-invalid-default-value-for-created-at

#sql
star

Wed Jul 06 2022 08:49:07 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Jul 04 2022 20:25:04 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Mon Jul 04 2022 19:02:51 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Mon Jul 04 2022 18:56:51 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Mon Jun 27 2022 08:20:05 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jun 22 2022 20:11:26 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Jun 22 2022 09:41:31 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/8717675/oracle-insert-new-row-with-auto-incremental-id

#sql
star

Tue Jun 21 2022 04:51:54 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jun 16 2022 17:12:40 GMT+0000 (Coordinated Universal Time) https://es.stackoverflow.com/questions/252673/insertar-información-de-un-stored-procedure-a-una-tabla-temporal

#sql
star

Wed Jun 08 2022 08:10:20 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/7892334/get-size-of-all-tables-in-database

#sql
star

Fri Jun 03 2022 07:26:03 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/4849652/find-all-tables-containing-column-with-specified-name-ms-sql-server

#sql
star

Mon May 30 2022 12:30:42 GMT+0000 (Coordinated Universal Time)

#r #sql
star

Tue May 24 2022 12:59:28 GMT+0000 (Coordinated Universal Time) https://dba.stackexchange.com/questions/168303/can-sql-server-2016-extract-node-names-from-json

#sql
star

Sun May 22 2022 12:47:56 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/31761239/finding-the-difference-between-the-latest-and-the-second-latest-term

#sql
star

Sun May 15 2022 16:34:11 GMT+0000 (Coordinated Universal Time) https://s3.amazonaws.com/assets.datacamp.com/email/other/SQL+for+Data+Science.pdf

#sql
star

Tue May 10 2022 06:22:52 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/6824717/sqlite-how-do-you-join-tables-from-different-databases

#sql
star

Tue May 10 2022 05:08:56 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/6362112/in-clause-with-null-or-is-null

#sql
star

Wed May 04 2022 08:25:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon May 02 2022 15:57:59 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/7734077/mysql-replace-character-in-columns

#sql
star

Mon May 02 2022 15:57:15 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/7734077/mysql-replace-character-in-columns

#sql
star

Mon May 02 2022 15:21:16 GMT+0000 (Coordinated Universal Time) https://www.peppedotnet.it/en/how-to-rename-sql-azure-database-from-portal/

#sql
star

Mon May 02 2022 12:00:37 GMT+0000 (Coordinated Universal Time) https://developers.cloudflare.com/firewall/cf-dashboard/edit-expressions/#expression-builder

#sql
star

Mon May 02 2022 11:57:55 GMT+0000 (Coordinated Universal Time) https://developers.cloudflare.com/firewall/cf-dashboard/rules-lists/use-lists-in-expressions/

#sql
star

Sun May 01 2022 19:23:49 GMT+0000 (Coordinated Universal Time)

#mysql #sql #scripting
star

Fri Apr 29 2022 15:30:52 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/39212832/how-to-aggregate-two-postgresql-columns-to-an-array-separated-by-brackets

#sql
star

Fri Apr 29 2022 15:30:22 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/6332043/sql-order-by-multiple-values-in-specific-order

#sql
star

Fri Apr 29 2022 11:29:50 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Apr 29 2022 11:28:25 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Apr 27 2022 14:47:20 GMT+0000 (Coordinated Universal Time) https://ntsim.uk/posts/how-to-use-hibernate-identifier-sequence-generators-properly/

#sql
star

Mon Apr 25 2022 18:35:28 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Mon Apr 25 2022 18:26:33 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Apr 13 2022 12:10:07 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/1054984/how-can-i-get-column-names-from-a-table-in-sql-server

#sql
star

Tue Apr 12 2022 19:07:36 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Tue Apr 12 2022 09:43:01 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Apr 07 2022 08:46:34 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Apr 06 2022 16:40:53 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Fri Apr 01 2022 23:48:45 GMT+0000 (Coordinated Universal Time) https://solutioncenter.apexsql.com/how-to-search-for-column-names-in-sql-server/

#sql
star

Wed Mar 30 2022 12:28:02 GMT+0000 (Coordinated Universal Time) https://database.guide/how-to-test-for-overlapping-dates-in-postgresql/

#sql
star

Thu Mar 24 2022 09:24:34 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/3195125/copy-a-table-from-one-database-to-another-in-postgres

#sql
star

Fri Mar 18 2022 13:04:20 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Fri Mar 18 2022 11:05:29 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Thu Mar 17 2022 21:21:09 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Tue Mar 15 2022 12:50:16 GMT+0000 (Coordinated Universal Time) https://www.sqlitetutorial.net/sqlite-select/

#sql
star

Mon Mar 14 2022 18:35:53 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Mar 14 2022 18:04:17 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/58546071/snowflake-query-last-12-months-of-data

#sql
star

Mon Mar 14 2022 04:32:59 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/sql-distinct-statement-how-to-query-select-and-count/

#sql
star

Mon Mar 14 2022 04:32:20 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/sql-distinct-statement-how-to-query-select-and-count/

#sql
star

Mon Mar 14 2022 04:31:31 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/sql-distinct-statement-how-to-query-select-and-count/

#sql
star

Mon Mar 14 2022 04:30:54 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/sql-distinct-statement-how-to-query-select-and-count/

#sql
star

Wed Mar 09 2022 21:48:21 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Wed Mar 09 2022 21:47:24 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Wed Mar 09 2022 21:45:40 GMT+0000 (Coordinated Universal Time)

#sql #aepd
star

Fri Feb 18 2022 03:17:48 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/32651363/counting-rows-in-the-table-which-have-1-or-more-missing-values

#sql
star

Thu Feb 17 2022 08:38:01 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Feb 15 2022 11:37:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Feb 10 2022 18:46:29 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Tue Feb 08 2022 23:29:25 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Feb 08 2022 23:19:04 GMT+0000 (Coordinated Universal Time) https://cloud.google.com/bigquery/docs/geospatial-data

#sql
star

Tue Feb 08 2022 21:37:12 GMT+0000 (Coordinated Universal Time) https://cloud.google.com/bigquery/docs/geospatial-data

#sql
star

Tue Feb 08 2022 21:36:42 GMT+0000 (Coordinated Universal Time) undefined

#sql
star

Sun Feb 06 2022 14:29:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Feb 06 2022 14:28:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Feb 06 2022 14:27:49 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Feb 06 2022 14:27:07 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Feb 06 2022 14:26:29 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Feb 06 2022 14:23:49 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Feb 03 2022 12:39:10 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/4849652/find-all-tables-containing-column-with-specified-name-ms-sql-server

#sql
star

Thu Feb 03 2022 01:29:48 GMT+0000 (Coordinated Universal Time) https://github.com/LowlyDBA/dba-multitool/blob/master/install_dba-multitool.sql

#sql
star

Wed Feb 02 2022 21:17:06 GMT+0000 (Coordinated Universal Time) https://gist.github.com/fushnisoft/5857270

#sql
star

Wed Feb 02 2022 21:15:43 GMT+0000 (Coordinated Universal Time) https://gist.github.com/theorigin/fa3c58406ff7b4565ca2

#sql
star

Wed Feb 02 2022 21:14:32 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/b8906b1f2c69cf80ef7a417ed18ff116

#sql
star

Wed Feb 02 2022 21:09:14 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:08:55 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:08:15 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:07:41 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:07:11 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:06:31 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:06:08 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:05:06 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:04:15 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:02:48 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Wed Feb 02 2022 21:01:55 GMT+0000 (Coordinated Universal Time) https://gist.github.com/jimbrig/5d91eef57ce1de7d7f799e92d565631d

#sql
star

Tue Feb 01 2022 19:44:50 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Tue Feb 01 2022 19:44:46 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Mon Jan 31 2022 17:50:43 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Jan 30 2022 10:49:14 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Jan 30 2022 10:48:33 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/a/4849704/10753968

#sql
star

Sun Jan 23 2022 19:00:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Jan 21 2022 22:30:07 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/54873585/how-to-update-a-sql-server-table-with-updated-oracle-table-data

#sql
star

Tue Jan 18 2022 03:09:49 GMT+0000 (Coordinated Universal Time) https://cloud.google.com/architecture/bigquery-data-warehouse

#sql
star

Fri Jan 07 2022 11:51:53 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #cdd
star

Wed Jan 05 2022 08:41:05 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #cdn
star

Mon Jan 03 2022 02:32:45 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/

#sql #postgres #duplicate
star

Mon Dec 27 2021 04:24:14 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/

#split #sql #large-sql
star

Wed Dec 22 2021 16:17:30 GMT+0000 (Coordinated Universal Time) https://dba.stackexchange.com/questions/76788/create-a-mysql-database-with-charset-utf-8

#sql #mysql #mysql5.7
star

Sat Dec 11 2021 21:47:51 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Dec 08 2021 17:06:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Dec 07 2021 12:03:01 GMT+0000 (Coordinated Universal Time)

#vba #sql
star

Mon Dec 06 2021 10:27:38 GMT+0000 (Coordinated Universal Time)

#sql #postgres
star

Thu Dec 02 2021 18:53:06 GMT+0000 (Coordinated Universal Time)

#sql #postgres #columns
star

Thu Dec 02 2021 18:49:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Nov 30 2021 16:35:12 GMT+0000 (Coordinated Universal Time) https://blog.saeloun.com/2021/10/05/rails-7-activerecord-adds-in_order_of-method

#sql #arel #query
star

Thu Nov 25 2021 13:02:43 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 25 2021 13:02:25 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 25 2021 13:01:27 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 25 2021 13:01:14 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 25 2021 13:01:00 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 25 2021 13:00:47 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/learn-sql-in-10-minutes/

#sql
star

Thu Nov 18 2021 13:23:48 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Thu Nov 18 2021 01:08:42 GMT+0000 (Coordinated Universal Time) https://pt.stackoverflow.com/questions/72139/qual-codificação-de-caracteres-collation-devo-usar-em-mysql

#sql
star

Tue Nov 09 2021 07:41:28 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Mon Nov 08 2021 10:49:07 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Fri Nov 05 2021 07:32:00 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/23950722/how-to-overcome-error-1045-28000-access-denied-for-user-odbclocalhost-u/30522913

#sql
star

Tue Nov 02 2021 15:46:48 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/595742/last-run-date-on-a-stored-procedure-in-sql-server

#sql
star

Wed Oct 27 2021 21:24:42 GMT+0000 (Coordinated Universal Time) https://gaurav-neema.medium.com/some-useful-postgres-functions-clauses-6c64fbfc587f

#sql
star

Fri Oct 15 2021 14:30:46 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/18698311/oracle-order-null-last-by-default

#sql
star

Fri Oct 15 2021 14:24:45 GMT+0000 (Coordinated Universal Time) https://www.w3resource.com/sql/subqueries/multiple-row-column-subqueries.php

#sql
star

Wed Oct 13 2021 10:16:01 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Oct 09 2021 17:28:32 GMT+0000 (Coordinated Universal Time) https://sqlhints.com/2013/07/20/how-to-get-quarter-start-end-date-sql-server/

#sql
star

Sat Oct 09 2021 17:18:15 GMT+0000 (Coordinated Universal Time) https://social.msdn.microsoft.com/Forums/sqlserver/en-US/7ac4bd64-0d12-4959-b535-6f28624132eb/how-to-get-begin-and-end-date-of-any-quarter-in-sql-server-2008

#sql
star

Wed Oct 06 2021 08:34:02 GMT+0000 (Coordinated Universal Time) https://launchschool.com/lessons/a1779fd2/assignments/fa05a889

#sql
star

Sun Oct 03 2021 01:16:58 GMT+0000 (Coordinated Universal Time) https://www.postgresql.org/docs/

#php #sql #postgresql
star

Thu Sep 30 2021 14:51:49 GMT+0000 (Coordinated Universal Time) https://dba.stackexchange.com/questions/41234/how-to-find-which-tables-and-views-a-user-has-access-to

#sql
star

Wed Sep 22 2021 11:44:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Sep 11 2021 20:10:00 GMT+0000 (Coordinated Universal Time) https://wordpress.stackexchange.com/questions/233086/how-to-fetch-data-in-wordpress-using-mysqli-or-wpdb

#sql
star

Tue Aug 31 2021 23:11:50 GMT+0000 (Coordinated Universal Time) https://www.dofactory.com/sql/subquery

#sql
star

Thu Aug 26 2021 16:54:05 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Mon Aug 23 2021 21:25:13 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 23 2021 12:02:49 GMT+0000 (Coordinated Universal Time)

#sql #tsql
star

Sun Aug 22 2021 17:23:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Aug 22 2021 17:21:44 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Aug 21 2021 14:41:38 GMT+0000 (Coordinated Universal Time)

#sql
star

Sat Aug 21 2021 14:39:50 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed Aug 18 2021 21:04:17 GMT+0000 (Coordinated Universal Time) https://ubiq.co/database-blog/calculate-median-postgresql/

#sql
star

Tue Aug 17 2021 07:47:18 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:47:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:46:45 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:45:51 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:45:28 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:45:10 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:44:48 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:44:29 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:43:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:42:41 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:42:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:42:01 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:41:36 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:41:11 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:40:46 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:40:23 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:39:55 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 07:39:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Aug 17 2021 04:46:38 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/28668817/update-column-with-value-from-another-table-using-sqlite

#sql
star

Fri Aug 13 2021 20:14:09 GMT+0000 (Coordinated Universal Time) https://www.dofactory.com/sql/subquery

#sql
star

Fri Aug 13 2021 19:20:30 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Aug 13 2021 19:16:23 GMT+0000 (Coordinated Universal Time)

#sql
star

Fri Aug 13 2021 06:40:38 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Mon Aug 09 2021 18:09:00 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Aug 09 2021 10:25:54 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Mon Aug 09 2021 10:25:13 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Sun Aug 08 2021 01:06:09 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/3582552/what-is-the-format-for-the-postgresql-connection-string-url

#sql
star

Wed Jul 28 2021 12:26:29 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Wed Jul 28 2021 07:46:52 GMT+0000 (Coordinated Universal Time)

#sql #athena
star

Tue Jul 27 2021 08:33:20 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Mon Jul 26 2021 08:08:44 GMT+0000 (Coordinated Universal Time)

#sql #bigquery #datastudio
star

Fri Jul 23 2021 21:12:42 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/8674718/best-way-to-select-random-rows-postgresql

#sql
star

Tue Jul 20 2021 22:48:57 GMT+0000 (Coordinated Universal Time)

#python #sql
star

Sun Jul 18 2021 22:16:03 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Fri Jul 09 2021 08:48:04 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Thu Jul 08 2021 07:53:42 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Thu Jul 08 2021 07:49:10 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Thu Jul 08 2021 07:47:43 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Tue Jul 06 2021 11:20:18 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Mon Jul 05 2021 10:19:58 GMT+0000 (Coordinated Universal Time)

#sql #bigquery
star

Thu Jul 01 2021 14:04:39 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Jul 01 2021 13:46:30 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Jun 14 2021 17:06:09 GMT+0000 (Coordinated Universal Time)

#sql #postgres
star

Sun Jun 13 2021 04:34:51 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Sun Jun 13 2021 04:23:51 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Wed Jun 09 2021 02:43:28 GMT+0000 (Coordinated Universal Time) https://wrds-www.wharton.upenn.edu/pages/support/programming-wrds/programming-python/python-from-your-computer/

#python #sql #thesis
star

Tue Jun 01 2021 10:18:19 GMT+0000 (Coordinated Universal Time) https://www.google.com/search?q

#sql
star

Fri May 28 2021 15:03:32 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/best-sql-database-tutorial/

#sql
star

Fri May 28 2021 15:03:04 GMT+0000 (Coordinated Universal Time) https://www.freecodecamp.org/news/best-sql-database-tutorial/

#sql
star

Fri May 28 2021 15:01:34 GMT+0000 (Coordinated Universal Time)

#sql #postgres
star

Thu May 27 2021 20:27:23 GMT+0000 (Coordinated Universal Time)

#sql #postgres
star

Thu May 27 2021 11:27:55 GMT+0000 (Coordinated Universal Time) https://www.sqlservercentral.com/articles/data-profiling-with-t-sql

#sql
star

Mon May 24 2021 03:55:36 GMT+0000 (Coordinated Universal Time)

#sql
star

Wed May 19 2021 19:06:46 GMT+0000 (Coordinated Universal Time) https://popsql.com/learn-sql/snowflake/how-to-update-data-in-snowflake

#sql
star

Sun May 16 2021 09:41:20 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu May 06 2021 15:18:56 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun May 02 2021 09:41:55 GMT+0000 (Coordinated Universal Time) https://static.skillshare.com/uploads/attachment/1192226128/8c1374ad/Create%20the%20Initial%20Tables%20Script.txt

#sql #mysql
star

Thu Apr 29 2021 09:53:47 GMT+0000 (Coordinated Universal Time)

#sql
star

Mon Apr 26 2021 14:11:52 GMT+0000 (Coordinated Universal Time) https://gist.github.com/ekumachidi/aac9316496fb2ca84dcef00920fede9b

#sql
star

Wed Apr 14 2021 16:51:05 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/5218370/sql-overlaps-operator-problem-how-to-get-rid-of-it

#sql
star

Thu Apr 08 2021 09:52:52 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Mar 21 2021 16:13:35 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/39729901/interpret-several-values-as-null-when-copying-csv-file-in-postgresql-table

#sql
star

Thu Mar 11 2021 06:54:31 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Mar 09 2021 19:08:44 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/11053567/inserting-data-into-hive-table

#sql
star

Tue Mar 09 2021 19:07:44 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/11053567/inserting-data-into-hive-table

#sql
star

Wed Feb 24 2021 20:06:09 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/43322033/create-database-on-docker-compose-startup

#sql #docker
star

Wed Feb 24 2021 19:37:28 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/34633961/mysql-bind-address-in-a-docker-container

#sql
star

Tue Feb 02 2021 18:00:05 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/35169412/mysql-find-in-set-equivalent-to-postgresql

#sql
star

Tue Feb 02 2021 17:58:47 GMT+0000 (Coordinated Universal Time) https://w3resource.com/PostgreSQL/postgresql_string_to_array-function.php

#sql
star

Tue Feb 02 2021 17:48:42 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/2944499/how-to-add-an-auto-incrementing-primary-key-to-an-existing-table-in-postgresql

#sql
star

Tue Feb 02 2021 17:32:55 GMT+0000 (Coordinated Universal Time) https://w3resource.com/PostgreSQL/position-function.php

#sql
star

Fri Dec 11 2020 11:39:30 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Fri Nov 27 2020 18:42:18 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/22636388/import-sql-file-in-node-js-and-execute-against-postgresql

#sql #nodejs
star

Fri Nov 13 2020 21:36:50 GMT+0000 (Coordinated Universal Time) https://www.mysqltutorial.org/mysql-inner-join.aspx/

#sql
star

Wed Nov 11 2020 15:40:19 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/55377661/drop-partition-by-name-sql-server

#sql
star

Tue Oct 27 2020 17:03:36 GMT+0000 (Coordinated Universal Time)

#sql #mysql
star

Tue Oct 27 2020 17:02:54 GMT+0000 (Coordinated Universal Time)

#sql
star

Tue Oct 13 2020 21:08:05 GMT+0000 (Coordinated Universal Time) https://www.mssqltips.com/sqlservertip/2914/rolling-up-multiple-rows-into-a-single-row-and-column-for-sql-server-data/

#sql
star

Sun Oct 11 2020 21:55:16 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/27435839/how-to-list-active-connections-on-postgresql

#sql
star

Sun Oct 11 2020 00:39:53 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/887370/sql-server-extract-table-meta-data-description-fields-and-their-data-types

#sql
star

Mon Sep 21 2020 07:46:18 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/23921117/disable-only-full-group-by

#sql
star

Thu Sep 17 2020 20:48:35 GMT+0000 (Coordinated Universal Time)

#sql
star

Sun Aug 16 2020 11:59:20 GMT+0000 (Coordinated Universal Time) https://bertwagner.com/2017/08/22/how-to-search-and-destroy-non-sargable-queries-on-your-server/

#sql #query
star

Mon Aug 10 2020 10:41:37 GMT+0000 (Coordinated Universal Time)

#sql
star

Thu Aug 06 2020 13:55:46 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/a/39816161/6942743

#sql #database #querying-data
star

Sun Jul 05 2020 14:40:15 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/8859353/alter-table-to-add-a-composite-primary-key

#sql
star

Wed Apr 01 2020 08:18:27 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/60963447/sql-how-to-sum-select

#sql #sum
star

Wed Apr 01 2020 07:15:27 GMT+0000 (Coordinated Universal Time) https://stackoverflow.com/questions/60963585/storing-selected-items-from-listbox-for-sql-where-statement

#sql

Save snippets that work with our extensions

Available in the Chrome Web Store Get Firefox Add-on Get VS Code extension