https://docs.snowflake.com/en/user-guide/data-load-snowpipe-auto-s3.html
Commands
$list @stageName = list content of stageName
$create or replace file format csv type='csv';
$show file formats in database TestDB;
$create or replace WAREHOUSE "COMPUTE_WH"
COMMENT = ''
WAREHOUSE_SIZE = 'MEDIUM' AUTO_RESUME = true AUTO_SUSEND = 60
SCALING_POLICY = 'STANDARD';
$truncate table tableName;
$alter warehouse compute_wh set warehouse_size='large';
$show warehouse;
$copy into tableName from @stageName file_format=csv PATTERN = '.*csv.*';
$create table nameTable clone nameClonedTable;
$create table json_weather_data (v variant);
$create stage stageName
URL:.............
$list @stageName;
$drop table nameTable;
$undrop table nameTable;
Snowflake configuration
July 28, 2022 7:18 AM
1. Create S3 integration
create storage integration aws_s3_integration
type = external_stage
storage_provider = s3
enabled = true
storage_aws_role_arn = 'arn:aws:iam::AWSaccountID:role/snowflake-role'
storage_allowed_locations = ('s3://snowflake-rds-livedata/dbname/');
2. Get ARN's
desc integration aws_s3_integration;
select system$get_aws_sns_iam_policy('arn:aws:sns:us-east-2:AWSaccountID:snowflake-s3- notifications');
3. Create Snowflake stage
use schema test_db.dbname;
create stage mystage
url = 's3://snowflake-rds-livedata/dbname/'
storage_integration = aws_s3_integration;
4. Create Snowflake pipe
create or replace pipe test_db.dbname.mypipe auto_ingest=true as
copy into test_db.dbname.transactions
from @MYSTAGE/transactions
file_format = (type = 'CSV');
Snowflake create tables
July 28, 2022 7:17 AM
CREATE OR REPLACE TABLE TRANSACTIONS
( --FILE varchar(500) default NULL,
--DATE Date default NULL,
--ROW_NR NUMBER default 0,
dummy_col varchar(1),
id varchar(255) default '',
column1 varchar(255) default null);
AS (
SELECT $1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23, $24,$25,$26,$27,$28,$29,$30,$31,$32,$33,$34,$35
FROM @MYSTAGE/transactions/20220720-172954873.csv(file_format=>'MY_CSV_FORMAT')
);
Snowflake various queries
July 28, 2022 7:18 AM
show pipes;
show stages;
list @MYSTAGE/tableName;
show columns in table tableName;
select * from databaseName.tableName limit 100;
SELECT$1,$2,$3,$4,$5FROM @MYSTAGE/tableName/20220720-17112954873.csv limit100;
--create or replace pipe db_name.databaseName.mypipe auto_ingest=false as
SELECT $1,$2,$3,$4,$6,$7,$8,$9,$10,$11,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25,$26,
$27,$28,$29,$30,$31,$32,$33,$34,$35
from @MYSTAGE/tableName/20220720-172954873.csv copy into db_name.databaseName;
copy into db_name.databaseName
from (select $1,$2,$3,$4,$6,$7,$8,$9,$10,$11,$14,$15,$16,$17,$18,$19,$20,$21,$22,$23,$24,$25, $26,$27,$28,$29,$30,$31,$32,$33,$34,$35 from @MYSTAGE/tableName/20220720-172954873.csv)
file_format = (type = 'CSV', error_on_column_count_mismatch = false);