Snowflake Protocol Examples
Snowflake Protocol Examples
Comprehensive code examples for using HeliosDB with Snowflake protocol compatibility.
Connection Examples
Python
import snowflake.connector
# Basic connectionconn = snowflake.connector.connect( host="localhost", port=443, user="admin", password="password", account="heliosdb", warehouse="COMPUTE_WH", database="MYDB", schema="PUBLIC")
# Execute querycursor = conn.cursor()cursor.execute("SELECT * FROM users LIMIT 10")rows = cursor.fetchall()for row in rows: print(row)
# Using context managerwith snowflake.connector.connect(...) as conn: with conn.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM orders") print(cursor.fetchone()[0])JavaScript/Node.js
const snowflake = require('snowflake-sdk');
const connection = snowflake.createConnection({ host: 'localhost', port: 443, account: 'heliosdb', username: 'admin', password: 'password', warehouse: 'COMPUTE_WH', database: 'MYDB', schema: 'PUBLIC'});
// Connect and executeconnection.connect((err, conn) => { if (err) { console.error('Connection failed:', err); return; }
connection.execute({ sqlText: 'SELECT * FROM users LIMIT 10', complete: (err, stmt, rows) => { if (err) { console.error('Query failed:', err); return; } console.log('Results:', rows); connection.destroy(); } });});
// Promise-based (with wrapper)async function query(sql) { return new Promise((resolve, reject) => { connection.execute({ sqlText: sql, complete: (err, stmt, rows) => { if (err) reject(err); else resolve(rows); } }); });}Time Travel Examples
Query Historical Data
-- Query data from a specific timestampSELECT * FROM ordersAT(TIMESTAMP => '2024-01-01 12:00:00')WHERE status = 'completed';
-- Query data from 1 hour agoSELECT * FROM inventoryAT(OFFSET => -3600);
-- Query data before a specific statementSELECT * FROM productsBEFORE(STATEMENT => 'abc123-statement-id');
-- Track changes over timeSELECT *FROM ordersCHANGES(INFORMATION => DEFAULT)AT(TIMESTAMP => '2024-01-01')END(TIMESTAMP => '2024-01-02');Restore Dropped Data
-- Undrop a tableUNDROP TABLE deleted_orders;
-- Clone at historical pointCREATE TABLE orders_backup CLONE ordersAT(TIMESTAMP => '2024-01-01 00:00:00');VARIANT Data Type
Working with Semi-Structured Data
-- Create table with VARIANT columnCREATE TABLE events ( id INT, event_time TIMESTAMP, data VARIANT);
-- Insert JSON dataINSERT INTO events (id, event_time, data)VALUES ( 1, CURRENT_TIMESTAMP(), PARSE_JSON('{"user": "alice", "action": "login", "metadata": {"ip": "192.168.1.1"}}'));
-- Query VARIANT dataSELECT id, data:user::STRING as user_name, data:action::STRING as action, data:metadata.ip::STRING as ip_addressFROM events;
-- Filter on VARIANT fieldsSELECT *FROM eventsWHERE data:action::STRING = 'login' AND data:metadata.ip::STRING LIKE '192.168.%';Array Operations
-- Work with arrays in VARIANTINSERT INTO events (id, data)VALUES ( 2, PARSE_JSON('{"user": "bob", "tags": ["admin", "developer"]}'));
-- Access array elementsSELECT data:user::STRING as user, data:tags[0]::STRING as first_tag, ARRAY_SIZE(data:tags) as tag_countFROM events;
-- Check array containsSELECT *FROM eventsWHERE ARRAY_CONTAINS('admin'::VARIANT, data:tags);Building Objects
-- Construct objects dynamicallySELECT OBJECT_CONSTRUCT( 'id', id, 'name', name, 'email', email, 'metadata', OBJECT_CONSTRUCT( 'created', created_at, 'status', status ) ) as user_jsonFROM users;
-- Construct arraysSELECT user_id, ARRAY_AGG(OBJECT_CONSTRUCT('product', product_name, 'qty', quantity)) as itemsFROM order_itemsGROUP BY user_id;FLATTEN Examples
Basic FLATTEN
-- Table with array dataCREATE TABLE orders ( id INT, customer_id INT, items VARIANT);
INSERT INTO orders VALUES ( 1, 100, PARSE_JSON('[ {"product_id": 1, "name": "Widget", "price": 9.99, "qty": 2}, {"product_id": 2, "name": "Gadget", "price": 19.99, "qty": 1} ]'));
-- Flatten arraySELECT o.id as order_id, o.customer_id, f.value:product_id::INT as product_id, f.value:name::STRING as product_name, f.value:price::FLOAT as price, f.value:qty::INT as quantityFROM orders o,LATERAL FLATTEN(input => o.items) f;Recursive FLATTEN
-- Nested structureCREATE TABLE configs ( id INT, config VARIANT);
INSERT INTO configs VALUES ( 1, PARSE_JSON('{ "app": { "settings": { "feature_a": true, "feature_b": false }, "limits": { "max_users": 100 } } }'));
-- Recursively flattenSELECT f.path as config_path, f.key as config_key, f.value as config_valueFROM configs c,LATERAL FLATTEN(input => c.config, recursive => true) fWHERE f.value IS NOT NULL AND TYPEOF(f.value) != 'OBJECT';Virtual Warehouse Management
-- Create a warehouseCREATE WAREHOUSE analytics_wh WITH WAREHOUSE_SIZE = 'LARGE' AUTO_SUSPEND = 300 AUTO_RESUME = TRUE INITIALLY_SUSPENDED = TRUE;
-- Resize warehouseALTER WAREHOUSE analytics_wh SET WAREHOUSE_SIZE = 'XLARGE';
-- Suspend/resumeALTER WAREHOUSE analytics_wh SUSPEND;ALTER WAREHOUSE analytics_wh RESUME;
-- Multi-cluster warehouseCREATE WAREHOUSE scaling_wh WITH WAREHOUSE_SIZE = 'MEDIUM' MIN_CLUSTER_COUNT = 1 MAX_CLUSTER_COUNT = 4 SCALING_POLICY = 'STANDARD';
-- Switch warehouse for sessionUSE WAREHOUSE analytics_wh;
-- Check warehouse statusSHOW WAREHOUSES;COPY INTO Examples
From Cloud Storage
-- From S3COPY INTO my_tableFROM 's3://my-bucket/data/'CREDENTIALS = (AWS_KEY_ID = 'xxx' AWS_SECRET_KEY = 'yyy')FILE_FORMAT = (TYPE = 'PARQUET')PATTERN = '.*\.parquet';
-- From Azure BlobCOPY INTO my_tableFROM 'azure://myaccount.blob.core.windows.net/container/path/'CREDENTIALS = (AZURE_SAS_TOKEN = 'xxx')FILE_FORMAT = (TYPE = 'JSON');
-- From GCSCOPY INTO my_tableFROM 'gcs://bucket/path/'CREDENTIALS = (GCS_CREDENTIALS = 'xxx')FILE_FORMAT = (TYPE = 'CSV' FIELD_DELIMITER = ',' SKIP_HEADER = 1);With Error Handling
-- Continue on errorCOPY INTO my_tableFROM @my_stage/data/FILE_FORMAT = (TYPE = 'JSON')ON_ERROR = 'CONTINUE';
-- Skip file on errorCOPY INTO my_tableFROM @my_stage/data/FILE_FORMAT = (TYPE = 'CSV')ON_ERROR = 'SKIP_FILE';
-- Abort on error (default)COPY INTO my_tableFROM @my_stage/data/FILE_FORMAT = (TYPE = 'PARQUET')ON_ERROR = 'ABORT_STATEMENT';Export Data
-- Export to stageCOPY INTO @my_stage/export/FROM my_tableFILE_FORMAT = (TYPE = 'PARQUET')HEADER = TRUEOVERWRITE = TRUE;
-- Export with partitioningCOPY INTO @my_stage/export/FROM (SELECT * FROM my_table WHERE date >= '2024-01-01')PARTITION BY (date)FILE_FORMAT = (TYPE = 'PARQUET');Session Management
-- Set session parametersALTER SESSION SET TIMEZONE = 'UTC';ALTER SESSION SET QUERY_TAG = 'analytics_pipeline';ALTER SESSION SET DATE_OUTPUT_FORMAT = 'YYYY-MM-DD';
-- Use different contextsUSE DATABASE analytics;USE SCHEMA reporting;USE WAREHOUSE compute_wh;
-- Show current contextSELECT CURRENT_DATABASE(), CURRENT_SCHEMA(), CURRENT_WAREHOUSE();Python Full Example
import snowflake.connectorfrom datetime import datetime, timedelta
# Connectconn = snowflake.connector.connect( host="localhost", port=443, user="admin", password="password", account="heliosdb")cursor = conn.cursor()
# Set contextcursor.execute("USE WAREHOUSE compute_wh")cursor.execute("USE DATABASE analytics")cursor.execute("USE SCHEMA public")
# Time travel queryyesterday = (datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S')cursor.execute(f""" SELECT COUNT(*) FROM orders AT(TIMESTAMP => '{yesterday}')""")print(f"Orders yesterday: {cursor.fetchone()[0]}")
# VARIANT querycursor.execute(""" SELECT data:user::STRING as user, data:events::ARRAY as events FROM user_activity WHERE data:timestamp::TIMESTAMP > DATEADD(hour, -1, CURRENT_TIMESTAMP())""")for row in cursor.fetchall(): print(f"User: {row[0]}, Events: {row[1]}")
# FLATTENcursor.execute(""" SELECT o.id, f.value:name::STRING as product, f.value:price::FLOAT as price FROM orders o, LATERAL FLATTEN(input => o.items) f WHERE o.created_at > DATEADD(day, -7, CURRENT_TIMESTAMP())""")for row in cursor.fetchall(): print(f"Order {row[0]}: {row[1]} - ${row[2]}")
cursor.close()conn.close()Last Updated: January 2026