Can I trigger a job based on another job? [duplicate] - firebase

I have some queries that run hourly and export the data from BigQuery to GCS. The daily tables seem to be updated over the next 2 days. Has anyone found a way to get the delta rows between what was exported to GCS and what was inserted in BigQuery?
This is what I am currently running (which ignores the missing data) as a scheduled script
DECLARE tables ARRAY <STRING>;
CREATE TABLE IF NOT EXISTS `project.analytics_xxx.daily_export_log`
(
table_name STRING,
insert_date TIMESTAMP
);
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}') AND
TABLE_NAME NOT IN (SELECT TABLE_NAME FROM `project.analytics_xxx.daily_export_log`)
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS ( uri = CONCAT('gs://bucket/live/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE ) AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''` ''';
EXECUTE IMMEDIATE '''
INSERT INTO `project.analytics_xxx.daily_export_log` SELECT ''' || "'" || tab.f0_ || "'" || ''' table_name, current_timestamp() insert_date
''';
END FOR
I tried using event_timestamp + event_server_timestamp_offset as a cutoff measure but that failed as event_timestamp is device synced so if users have altered their system time then their timestamps would be way off.

I've managed to fix the issue by appending a gcs_export_timestamp column to every export table.
DECLARE tables ARRAY <STRING>;
/************************************************************************************************************
* DAILY EVENTS EXPORT *
************************************************************************************************************/
CREATE TABLE IF NOT EXISTS `project.analytics_xxx.daily_export_log`
(
table_name STRING,
insert_date TIMESTAMP
)
OPTIONS
(
expiration_timestamp=TIMESTAMP "3000-01-01"
);
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}') AND
TABLE_NAME NOT IN (SELECT TABLE_NAME FROM `project.analytics_xxx.daily_export_log`)
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS (
uri = CONCAT('gs://bucket/live/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE
) AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''`
''';
EXECUTE IMMEDIATE '''
INSERT INTO `project.analytics_xxx.daily_export_log` SELECT ''' || "'" || tab.f0_ || "'" || ''' table_name, current_timestamp() insert_date
''';
EXECUTE IMMEDIATE '''
ALTER TABLE `project.analytics_xxx.''' || tab.f0_ || '''`
ADD COLUMN IF NOT EXISTS gcs_export_timestamp TIMESTAMP
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = current_timestamp()
WHERE gcs_export_timestamp IS NULL
''';
END FOR;
/************************************************************************************************************
* DELAYED EVENTS EXPORT *
************************************************************************************************************/
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}')
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
/************ CHECK IF DAILY TABLE WAS OVERWRITTEN AND RE-ADD THE gcs_export_timestamp COLUMN ************/
BEGIN
IF (SELECT 1 FROM `region-us`.INFORMATION_SCHEMA.COLUMNS WHERE table_name = tab.f0_ AND column_name = 'gcs_export_timestamp') IS NULL THEN
IF (SELECT 1 FROM `project.analytics_xxx.daily_export_log` WHERE table_name = tab.f0_) IS NOT NULL THEN
EXECUTE IMMEDIATE '''
ALTER TABLE `project.analytics_xxx.''' || tab.f0_ || '''`
ADD COLUMN IF NOT EXISTS gcs_export_timestamp TIMESTAMP
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = (SELECT insert_date FROM `project.analytics_xxx.daily_export_log` WHERE table_name = \'''' || tab.f0_ || '''\')
WHERE gcs_export_timestamp IS NULL
''';
END IF;
END IF;
END;
/***************************** EXPORT DATA WHERE gcs_export_timestamp IS NULL ****************************/
BEGIN
EXECUTE IMMEDIATE '''
CREATE OR REPLACE TEMP TABLE _SESSION.tmp AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''` WHERE gcs_export_timestamp IS NULL
''';
IF (SELECT COUNT(*) cnt FROM _SESSION.tmp) > 0 THEN
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS (
uri = CONCAT('gs://bucket/delayed/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE
) AS
SELECT * FROM _SESSION.tmp
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = current_timestamp()
WHERE gcs_export_timestamp IS NULL
''';
END IF;
EXCEPTION WHEN ERROR THEN END;
END FOR;

Related

How to capture delta rows in BigQuery Export

I have some queries that run hourly and export the data from BigQuery to GCS. The daily tables seem to be updated over the next 2 days. Has anyone found a way to get the delta rows between what was exported to GCS and what was inserted in BigQuery?
This is what I am currently running (which ignores the missing data) as a scheduled script
DECLARE tables ARRAY <STRING>;
CREATE TABLE IF NOT EXISTS `project.analytics_xxx.daily_export_log`
(
table_name STRING,
insert_date TIMESTAMP
);
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}') AND
TABLE_NAME NOT IN (SELECT TABLE_NAME FROM `project.analytics_xxx.daily_export_log`)
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS ( uri = CONCAT('gs://bucket/live/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE ) AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''` ''';
EXECUTE IMMEDIATE '''
INSERT INTO `project.analytics_xxx.daily_export_log` SELECT ''' || "'" || tab.f0_ || "'" || ''' table_name, current_timestamp() insert_date
''';
END FOR
I tried using event_timestamp + event_server_timestamp_offset as a cutoff measure but that failed as event_timestamp is device synced so if users have altered their system time then their timestamps would be way off.
I've managed to fix the issue by appending a gcs_export_timestamp column to every export table.
DECLARE tables ARRAY <STRING>;
/************************************************************************************************************
* DAILY EVENTS EXPORT *
************************************************************************************************************/
CREATE TABLE IF NOT EXISTS `project.analytics_xxx.daily_export_log`
(
table_name STRING,
insert_date TIMESTAMP
)
OPTIONS
(
expiration_timestamp=TIMESTAMP "3000-01-01"
);
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}') AND
TABLE_NAME NOT IN (SELECT TABLE_NAME FROM `project.analytics_xxx.daily_export_log`)
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS (
uri = CONCAT('gs://bucket/live/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE
) AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''`
''';
EXECUTE IMMEDIATE '''
INSERT INTO `project.analytics_xxx.daily_export_log` SELECT ''' || "'" || tab.f0_ || "'" || ''' table_name, current_timestamp() insert_date
''';
EXECUTE IMMEDIATE '''
ALTER TABLE `project.analytics_xxx.''' || tab.f0_ || '''`
ADD COLUMN IF NOT EXISTS gcs_export_timestamp TIMESTAMP
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = current_timestamp()
WHERE gcs_export_timestamp IS NULL
''';
END FOR;
/************************************************************************************************************
* DELAYED EVENTS EXPORT *
************************************************************************************************************/
SET tables = (SELECT
ARRAY_AGG(TABLE_NAME) TABLES
FROM
`project.analytics_xxx.INFORMATION_SCHEMA.TABLES`
WHERE
REGEXP_CONTAINS(TABLE_NAME, 'events_\\d{8}')
);
FOR tab IN
(SELECT * FROM UNNEST(tables))
DO
/************ CHECK IF DAILY TABLE WAS OVERWRITTEN AND RE-ADD THE gcs_export_timestamp COLUMN ************/
BEGIN
IF (SELECT 1 FROM `region-us`.INFORMATION_SCHEMA.COLUMNS WHERE table_name = tab.f0_ AND column_name = 'gcs_export_timestamp') IS NULL THEN
IF (SELECT 1 FROM `project.analytics_xxx.daily_export_log` WHERE table_name = tab.f0_) IS NOT NULL THEN
EXECUTE IMMEDIATE '''
ALTER TABLE `project.analytics_xxx.''' || tab.f0_ || '''`
ADD COLUMN IF NOT EXISTS gcs_export_timestamp TIMESTAMP
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = (SELECT insert_date FROM `project.analytics_xxx.daily_export_log` WHERE table_name = \'''' || tab.f0_ || '''\')
WHERE gcs_export_timestamp IS NULL
''';
END IF;
END IF;
END;
/***************************** EXPORT DATA WHERE gcs_export_timestamp IS NULL ****************************/
BEGIN
EXECUTE IMMEDIATE '''
CREATE OR REPLACE TEMP TABLE _SESSION.tmp AS
SELECT * FROM `project.analytics_xxx.''' || tab.f0_ || '''` WHERE gcs_export_timestamp IS NULL
''';
IF (SELECT COUNT(*) cnt FROM _SESSION.tmp) > 0 THEN
EXECUTE IMMEDIATE '''
EXPORT DATA
OPTIONS (
uri = CONCAT('gs://bucket/delayed/', format_timestamp('%Y/%m/%d/', current_timestamp()), ''' || "'" || tab.f0_ || "'" || ''', '/*_', format_timestamp('%Y%m%d%H%M%S', current_timestamp()), '.json.gz'),
format='JSON',
compression='GZIP',
overwrite=FALSE
) AS
SELECT * FROM _SESSION.tmp
''';
EXECUTE IMMEDIATE '''
UPDATE `project.analytics_xxx.''' || tab.f0_ || '''` SET
gcs_export_timestamp = current_timestamp()
WHERE gcs_export_timestamp IS NULL
''';
END IF;
EXCEPTION WHEN ERROR THEN END;
END FOR;

How to get single output from PL/SQL procedure having select statement

I am trying to execute one PL/SQL procedure. I am getting nullpointerexception every time. Might be I am returning the procedure in wrong way.
Can you please help me in this procedure.
PROCEDURE p_regidexport(countryid IN varchar2, cropid IN varchar2, productid IN VARCHAR2, pregid out varchar)
IS
fnc VARCHAR2(30) := 'P_REGIDEXPORT';
query VARCHAR2(10000);
regid varchar(20);
BEGIN
select REG_ID into regid from GRS_Registration where LOC_ID =(select loc_id from GRS_location where Country = ' || countryid || ') AND CROP_ID = (select crop_id from GRS_crop where CROP_NM = ' || cropid || ')AND REG_NAME =' || '''' || productid || ''';
pregid := regid;
sub_log('P_REGIDEXPORT:'||pregid);
dbms_output.put_line(pregid);
EXCEPTION
WHEN no_data_found THEN
dbms_output.put_line('No record present');
END P_REGIDEXPORT;
you need not to concatenate in parameter value. because its not dynamic query. so, you can directly pass the parameter variable into ur query.
make sure that your qry will return one single value.
its just idea based upon ur code, u can try based upon ur requirement.
Hope it will help you!!
create or replace PROCEDURE p_regidexport(countryid IN varchar2, cropid IN varchar2, productid IN VARCHAR2, pregid out varchar)
IS
fnc VARCHAR2(30) := 'P_REGIDEXPORT';
query VARCHAR2(10000);
regid varchar(20);
BEGIN
begin
select nvl(REG_ID,'0') into regid from GRS_Registration
where
LOC_ID =(select loc_id from GRS_location where Country = countryid ) AND
CROP_ID = (select crop_id from GRS_crop where CROP_NM = cropid)AND
REG_NAME = productid ;
EXCEPTION
WHEN no_data_found THEN
dbms_output.put_line('No record '); --- or regid ='0';
end;
pregid := regid;
--sub_log('P_REGIDEXPORT:'||pregid);
dbms_output.put_line(pregid);
EXCEPTION
WHEN others THEN
dbms_output.put_line('No record present' || ' - ' || sqlerrm);
END P_REGIDEXPORT;
All the best!! if it is useful click up button which is in left side of this answer

how to fetch records from parameter file

Hi I would like to know how I can fetch the records when I'm using parameter file.
My scripts is like that. when I execute it, I got error message. Please help me to fix this script.
Thank you in advance
CREATE OR REPLACE PROCEDURE testappt
(
XFILE IN VARCHAR2
) is
xfpt varchar2(1):='F';
TYPE curtype IS REF CURSOR;
appt_cur curtype;
appt_rec appt_cur%ROWTYPE; -- error
BEGIN
open appt_cur for 'SELECT * FROM ' || xfile || ' where fpt!= :xfpt ' using xfpt;
loop
fetch appt_cur into appt_rec -- error
exit when appt_cur%not found; -- error
execute immediate 'update ' || xfile || ' set apptgrp=46' || 'where reptrc=6269' ||
'and og=trim(0||6)' || 'and trim(a_jc)=2876';
commit;
end loop;
end testappt;
/
It looks like you need some spaces in your literals. Try:
CREATE OR REPLACE PROCEDURE testappt
(
XFILE IN VARCHAR2
) is
xfpt varchar2(1):='F';
TYPE curtype IS REF CURSOR;
appt_cur curtype;
appt_rec appt_cur%ROWTYPE; -- error
BEGIN
open appt_cur for 'SELECT * FROM ' || xfile || ' where fpt!= :xfpt ' using xfpt;
loop
fetch appt_cur into appt_rec -- error
exit when appt_cur%not found; -- error
execute immediate 'update ' || xfile ||
' set apptgrp=46' ||
' where reptrc=6269' ||
' and og=trim(0||6)' ||
' and trim(a_jc)=2876';
commit;
end loop;
end testappt;
/
Share and enjoy.

PL/SQL DDL Execute Immediate

ACCEPT p_username PROMPT 'Enter Username : '
ACCEPT p_password PROMPT 'Enter New Password for Username : '
VARIABLE g_output VARCHAR2(4000)
DECLARE
CURSOR NAME IS SELECT TABLE_NAME FROM DBA_TABLES
WHERE OWNER LIKE '%&p_username%';
DDL_DROP VARCHAR2(200);
BEGIN
FOR TNAME IN NAME
LOOP
BEGIN
EXECUTE IMMEDIATE 'DROP TABLE' || ' ' || TNAME.TABLE_NAME;
:g_output := :g_output || ' ' || TNAME.TABLE_NAME;
END;
END LOOP;
END;
/
PRINT g_output
Hello, I'm new to PL/SQL and trying to make a script to drop the user's table and ultimately change their password later after dropping their tables. I am having difficulty with the EXECUTE IMMEDIATE command. The script works if I remove the EXECUTE IMMEDIATE line. I tested it by printing the table names inside the loop and I get the right # of tables and their corresponding names.
Any help is appreciated, thanks.
Edited the code to reflect the suggestion but still didn't work. Getting the same error.
ACCEPT p_username PROMPT 'Enter Username : '
ACCEPT p_password PROMPT 'Enter New Password for Username : '
VARIABLE g_output VARCHAR2(4000)
DECLARE
NAME SYS_REFCURSOR;
DDL_WORD VARCHAR2(200);
BEGIN
OPEN NAME FOR SELECT TABLE_NAME FROM DBA_TABLES
WHERE OWNER LIKE '%&p_username%';
LOOP
FETCH NAME INTO DDL_WORD;
EXIT WHEN NAME%NOTFOUND;
EXECUTE IMMEDIATE 'DROP TABLE "' || DDL_WORD || '" CASCADE CONSTRAINTS';
:g_output := :g_output || ' ' || DDL_WORD;
END LOOP;
CLOSE NAME;
END;
/
PRINT g_output
You probably need to specify the owner for the table in the DROP statement:
ACCEPT p_username PROMPT 'Enter Username : '
ACCEPT p_password PROMPT 'Enter New Password for Username : '
VARIABLE g_output VARCHAR2(4000)
DECLARE
CURSOR NAME IS SELECT OWNER, TABLE_NAME FROM DBA_TABLES
WHERE OWNER LIKE '%&p_username%';
DDL_DROP VARCHAR2(200);
BEGIN
FOR TNAME IN NAME
LOOP
BEGIN
EXECUTE IMMEDIATE 'DROP TABLE' || ' ' || TNAME.OWNER || '.' || TNAME.TABLE_NAME;
:g_output := :g_output || ' ' || TNAME.OWNER || '.' || TNAME.TABLE_NAME;
END;
END LOOP;
END;
/
PRINT g_output
The code looks fine.
You could try with () like this
BEGIN
EXECUTE IMMEDIATE (code_text);
END;
You could try
c SYS_REFCURSOR;
BEGIN
OPEN c FOR 'SELECT * FROM table';
CLOSE c;
END;

PLSQL error - ORA-00984: column not allowed here

I have written a PL-SQL block
DECLARE
SchemaName VARCHAR2(50) :='REQ_SUNIL_5750';
userpassword VARCHAR2(50) :='XYZ';
stmt VARCHAR2(5000);
BEGIN
stmt :='INSERT INTO ' || SchemaName || '.USER_CREDS VALUES ('|| SchemaName ||', '|| userpassword ||' )';
DBMS_OUTPUT.PUT_LINE(stmt) ;
EXECUTE IMMEDIATE stmt;
commit;
END;
When I execute above block I am getting below,
ORA-00984: column not allowed here
I have created table with name 'REQ_SUNIL_5750.USER_CREDS and it has username and password columns
Please help
You have to quote your string values properly:
stmt :='INSERT INTO ' || SchemaName ||
'.USER_CREDS VALUES ('''|| SchemaName ||''', '''|| userpassword ||''' )';
Frank's answer is great, I would add one point though.
From the perspective of performance and reuseability, your execute immediate statement should use bind variables and the insert syntax should specify the columns that correspond to the values being entered.

Resources