filmov
tv
Load JSON file into Snowflake table using Snowpipe
Показать описание
Prerequisite:
--------------------
Working with JSON in Snowflake | Schema on Read
Dataset:
-------------
Code:
--------
drop database if exists ramu;
create or replace database ramu;
use ramu;
CREATE or replace TABLE jsonpipe_testing(
guid VARCHAR(16777216)
,isActive VARCHAR(16777216)
,balance VARCHAR(16777216)
,age INTEGER
,first_name VARCHAR(16777216)
,last_name VARCHAR(16777216)
,company VARCHAR(16777216)
,filename VARCHAR(16777216)
,file_row_number VARCHAR(16777216)
,load_timestamp timestamp default TO_TIMESTAMP_NTZ(current_timestamp));
select * from jsonpipe_testing;
--create json format
create or replace file format json_format
type = 'json';
create or replace stage Snow_stage url="s3://{}"
credentials=(aws_key_id='{}'
aws_secret_key='{}')
file_format = json_format;
list @Snow_stage;
--Direct query Snowflake External Stage
select $1 from @Snow_stage;
--Specific Element Extraction
select $1:age,$1:balance from @Snow_stage;
--Specific Element Extraction
select $1:age,$1:balance::varchar from @Snow_stage;
--Some metadata extraction
select $1:age,$1:balance::varchar,METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage;
select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage;
select * from jsonpipe_testing;
copy into ramu.PUBLIC.jsonpipe_testing from
(select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,
METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage);
select * from jsonpipe_testing;
CREATE or replace TABLE jsonpipe_testing12(
guid VARCHAR(16777216)
,isActive VARCHAR(16777216)
,balance VARCHAR(16777216)
,age INTEGER
,first_name VARCHAR(16777216)
,last_name VARCHAR(16777216)
,company VARCHAR(16777216)
,filename VARCHAR(16777216)
,file_row_number VARCHAR(16777216)
,load_timestamp timestamp default TO_TIMESTAMP_NTZ(current_timestamp));
create or replace pipe spa
auto_ingest=true as copy into ramu.PUBLIC.jsonpipe_testing12 from
(select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,
METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage);
show pipes;
select * from jsonpipe_testing12;--42
select distinct(FILENAME) from jsonpipe_testing12;
Check this playlist for more AWS Projects in Big Data domain:
--------------------
Working with JSON in Snowflake | Schema on Read
Dataset:
-------------
Code:
--------
drop database if exists ramu;
create or replace database ramu;
use ramu;
CREATE or replace TABLE jsonpipe_testing(
guid VARCHAR(16777216)
,isActive VARCHAR(16777216)
,balance VARCHAR(16777216)
,age INTEGER
,first_name VARCHAR(16777216)
,last_name VARCHAR(16777216)
,company VARCHAR(16777216)
,filename VARCHAR(16777216)
,file_row_number VARCHAR(16777216)
,load_timestamp timestamp default TO_TIMESTAMP_NTZ(current_timestamp));
select * from jsonpipe_testing;
--create json format
create or replace file format json_format
type = 'json';
create or replace stage Snow_stage url="s3://{}"
credentials=(aws_key_id='{}'
aws_secret_key='{}')
file_format = json_format;
list @Snow_stage;
--Direct query Snowflake External Stage
select $1 from @Snow_stage;
--Specific Element Extraction
select $1:age,$1:balance from @Snow_stage;
--Specific Element Extraction
select $1:age,$1:balance::varchar from @Snow_stage;
--Some metadata extraction
select $1:age,$1:balance::varchar,METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage;
select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage;
select * from jsonpipe_testing;
copy into ramu.PUBLIC.jsonpipe_testing from
(select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,
METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage);
select * from jsonpipe_testing;
CREATE or replace TABLE jsonpipe_testing12(
guid VARCHAR(16777216)
,isActive VARCHAR(16777216)
,balance VARCHAR(16777216)
,age INTEGER
,first_name VARCHAR(16777216)
,last_name VARCHAR(16777216)
,company VARCHAR(16777216)
,filename VARCHAR(16777216)
,file_row_number VARCHAR(16777216)
,load_timestamp timestamp default TO_TIMESTAMP_NTZ(current_timestamp));
create or replace pipe spa
auto_ingest=true as copy into ramu.PUBLIC.jsonpipe_testing12 from
(select $1:guid::varchar,$1:isActive,$1:balance::varchar,$1:age,$1:first_name::varchar,$1:last_name::varchar,$1:company::varchar,
METADATA$FILENAME,METADATA$FILE_ROW_NUMBER,TO_TIMESTAMP_NTZ(current_timestamp) from @Snow_stage);
show pipes;
select * from jsonpipe_testing12;--42
select distinct(FILENAME) from jsonpipe_testing12;
Check this playlist for more AWS Projects in Big Data domain:
Комментарии