Prerequisite:
--------------------
AWS Lambda and Snowflake integration + automation using Snowpipe | ETL | Lambda Deployment Package
• AWS Lambda and Snowfla...
SQL Queries used:
-------------------------------
drop database if exists s3_to_snowflake;
--Database Creation
create database if not exists s3_to_snowflake;
--Specify the active/current database for the session.
use s3_to_snowflake;
--Specify the role
use role ACCOUNTADMIN;
--Table Creation
create or replace table s3_to_snowflake.PUBLIC.Iris_dataset (Id number(10,0),sepal_length number(10,5) , sepal_width number(10,4),
petal_length number(10,3) , petal_width number(10,4) , CLASS_NAME varchar(20));
--File Format Creation
create or replace file format my_csv_format
type = csv field_delimiter = ',' skip_header = 1
field_optionally_enclosed_by = '"'
null_if = ('NULL', 'null')
empty_field_as_null = true;
--Storage Creation
create or replace storage integration s3_int
TYPE = EXTERNAL_STAGE
STORAGE_PROVIDER = S3
ENABLED = TRUE
STORAGE_AWS_ROLE_ARN = '{}'
STORAGE_ALLOWED_LOCATIONS = ('{}')
COMMENT = '{}' ;
--Describe the Integration Object
DESC INTEGRATION s3_int;
--Stage Creation
create or replace stage s3_to_snowflake.PUBLIC.Snow_stage url="s3://{}"
storage_integration = s3_int;
--Files present in the external stage
list @Snow_stage;
select system$get_aws_sns_iam_policy('{}');
--Create the SNowpipe
create or replace pipe s3_to_snowflake.PUBLIC.spa
auto_ingest=true aws_sns_topic='{}' as copy into s3_to_snowflake.PUBLIC.Iris_dataset from
@s3_to_snowflake.PUBLIC.Snow_stage FILE_FORMAT=(FORMAT_NAME=my_csv_format);
show pipes;
select * from s3_to_snowflake.PUBLIC.Iris_dataset;
select count(*) from s3_to_snowflake.PUBLIC.Iris_dataset;
Check this playlist for more AWS Projects or POC in Big Data domain:
• Demystifying Data Engi...
12 сен 2024