Operations (sample payloads)
Main operations
Create database
Create a database. Sample Input
{
"auth": {},
"database_type": {
"standard": \{
"database": "NEW_ANALYTICS_DB",
"replace": false,
"state": "TRANSIENT",
"if_not_exists": true,
"data_retention_time_in_days": 7,
"comment": "New database for analytics data"
\}
}
}
Sample Output
\{
"status": "success",
"message": "Database 'NEW_ANALYTICS_DB' created successfully"
\}
Create schema
Create a schema. Sample Input
{
"auth": {},
"database": "SALES_DB",
"schema": "CUSTOMER_DATA",
"replace": false,
"state": "TRANSIENT",
"if_not_exists": true,
"clone": {
"source_database": "ARCHIVE_DB",
"source_schema": "OLD_CUSTOMER_DATA",
"time_travel": {
"at_before": "AT",
"statement": {
"timestamp": "2023-06-15T10:30:00Z"
}
}
},
"data_retention_time_in_days": 14,
"comment": "Schema for storing customer-related data"
}
Sample Output
\{
"status": "success",
"message": "Schema 'CUSTOMER_DATA' created successfully in database 'SALES_DB'"
\}
Create stage
Create a new named internal or external stage to use for loading data from files into Snowflake tables and unloading data from tables into files. Sample Input
{
"auth": \{
"database": "MY_DATABASE",
"schema": "MY_SCHEMA"
\},
"stage_name": "MY_STAGE",
"stage_type": {
"aws_s3_bucket": {
"bucket": "my-s3-bucket",
"path": "data/",
"credentials": \{
"aws_key_id": "AKIAIOSFODNN7EXAMPLE",
"aws_secret_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
\},
"encryption": {
"type": "AWS_SSE_S3"
}
}
},
"replace": true,
"temporary": false,
"stage_file_format": {
"type": {
"csv": \{
"compression": "GZIP",
"record_delimiter": "\\n",
"field_delimiter": ",",
"skip_header": 1,
"date_format": "AUTO",
"time_format": "AUTO",
"timestamp_format": "AUTO",
"binary_format": "HEX",
"escape": "\\",
"trim_space": true,
"field_optionally_enclosed_by": "\"",
"null_if": [
"NULL",
""
],
"error_on_column_count_mismatch": true
\}
}
},
"stage_copy_options": \{
"on_error": "CONTINUE",
"size_limit": 1000000000,
"purge": false,
"enforce_length": true
\},
"comment": "External stage for CSV files in S3"
}
Sample Output
Create table
Create a table. Sample Input
{
"auth": \{
"database": "MY_DATABASE",
"schema": "MY_SCHEMA"
\},
"table": "EMPLOYEES",
"state": "PERMANENT",
"columns": [
{
"name": "EMPLOYEE_ID",
"type": {
"simple_type": "INTEGER"
},
"not_null": true,
"key_type": "primary_key"
},
{
"name": "FIRST_NAME",
"type": {
"varchar": {
"length": 50
}
},
"not_null": true
},
{
"name": "LAST_NAME",
"type": {
"varchar": {
"length": 50
}
},
"not_null": true
},
{
"name": "EMAIL",
"type": {
"varchar": {
"length": 100
}
},
"not_null": true
},
{
"name": "HIRE_DATE",
"type": {
"simple_type": "DATE"
},
"not_null": true
},
{
"name": "SALARY",
"type": {
"number": \{
"precision": 10,
"scale": 2
\}
},
"not_null": true
}
],
"comment": "Table to store employee information",
"data_retention_time_in_days": 30
}
Sample Output
\{
"status": "success",
"message": "Table EMPLOYEES created successfully in MY_DATABASE.MY_SCHEMA"
\}
Create warehouse
Creates a new virtual warehouse in the system. Creating a warehouse includes specifying its size. Initial creation of a warehouse may take some time to provision the servers, unless the warehouse is set to be created initially in a SUSPENDED state. Sample Input
{
"auth": {},
"warehouse": "MY_NEW_WAREHOUSE",
"replace": false,
"if_not_exists": true,
"warehouse_properties": \{
"warehouse_size": "MEDIUM",
"max_cluster_count": 3,
"min_cluster_count": 1,
"scaling_policy": "STANDARD",
"auto_suspend": 300,
"auto_resume": true,
"resource_monitor": "MY_RESOURCE_MONITOR",
"initially_suspended": false
\},
"warehouse_parameters": \{
"max_concurrency_level": 8,
"statement_queued_timeout_in_seconds": 60,
"statement_timeout_in_seconds": 3600
\},
"comment": "New warehouse for data analytics team"
}
Sample Output
Custom query
Send a custom SQL query to Snowflake. Sample Input
{
"auth": {
"warehouse": "COMPUTE_WH"
},
"query": "SELECT * FROM sales_data WHERE total_amount > 1000 ORDER BY sale_date DESC LIMIT 5;"
}
Sample Output
{
"status": "success",
"rows": [
\{
"sale_id": 1234,
"customer_name": "John Doe",
"product_name": "Laptop",
"total_amount": 1599.99,
"sale_date": "2023-05-15"
\},
\{
"sale_id": 1235,
"customer_name": "Jane Smith",
"product_name": "Smartphone",
"total_amount": 1299.99,
"sale_date": "2023-05-14"
\},
\{
"sale_id": 1236,
"customer_name": "Bob Johnson",
"product_name": "4K TV",
"total_amount": 1799.99,
"sale_date": "2023-05-13"
\},
\{
"sale_id": 1237,
"customer_name": "Alice Brown",
"product_name": "Gaming Console",
"total_amount": 1099.99,
"sale_date": "2023-05-12"
\},
\{
"sale_id": 1238,
"customer_name": "Charlie Wilson",
"product_name": "Desktop Computer",
"total_amount": 1499.99,
"sale_date": "2023-05-11"
\}
]
}
Delete database
Removes a database from the system. Sample Input
{
"auth": {
"database": "SALES_DB"
},
"database": "SALES_DB",
"if_exists": true,
"cascade_restrict": "CASCADE"
}
Sample Output
Delete files from stage
Removes files that have been staged (i.e. uploaded from a local file system or unloaded from a table) Sample Input
{
"auth": \{
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"stage": "MY_STAGE",
"path": "customer_data/",
"pattern": ".*\\.csv"
}
Sample Output
{
"status": "SUCCESS",
"rows": [
\{
"name": "customer_data/customers_2023-01-01.csv",
"result": "DELETED"
\},
\{
"name": "customer_data/customers_2023-01-02.csv",
"result": "DELETED"
\},
\{
"name": "customer_data/customers_2023-01-03.csv",
"result": "DELETED"
\}
]
}
Delete rows from table
Delete rows from the target table using optional query and additional (non-target) tables. Sample Input
{
"auth": \{
"warehouse": "COMPUTE_WH",
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"table": "CUSTOMERS",
"using": "ORDERS",
"where": "CUSTOMERS.CUSTOMER_ID NOT IN (SELECT DISTINCT CUSTOMER_ID FROM ORDERS WHERE ORDER_DATE > '2023-01-01')"
}
Sample Output
\{
"status": "success",
"number of rows deleted": 152
\}
Delete schema
Delete a schema from the current or specified database. Sample Input Sample Output
\{
"status": "success",
"message": "Schema CUSTOMER_SCHEMA has been successfully deleted from database SALES_DB."
\}
Delete stage
Removes the specified named internal or external stage from the current/specified schema. The status of the files in the stage depends on the stage type. Sample Input
{
"auth": \{
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"stage": "CUSTOMER_DATA_STAGE",
"if_exists": true
}
Sample Output
\{
"status": "success",
"message": "Stage 'CUSTOMER_DATA_STAGE' successfully deleted."
\}
Delete table
Delete a table from the current or specified schema. Sample Input
{
"auth": \{
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"table": "CUSTOMERS",
"if_exists": true,
"cascade_restrict": "CASCADE"
}
Sample Output
Delete warehouse
Removes the specified virtual warehouse from the system. Sample Input
{
"auth": {},
"warehouse": "MY_WAREHOUSE",
"if_exists": true
}
Sample Output
\{
"status": "success",
"message": "Warehouse 'MY_WAREHOUSE' has been successfully deleted."
\}
Insert rows in table
Updates a table by inserting one or more rows into the table. Sample Input
{
"auth": \{
"warehouse": "COMPUTE_WH",
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"warehouse": "COMPUTE_WH",
"table_database": "SALES_DB",
"table_schema": "PUBLIC",
"table": "CUSTOMERS",
"rows": [
{
"data": [
\{
"column": "CUSTOMER_ID",
"value": "'C001'"
\},
\{
"column": "FIRST_NAME",
"value": "'John'"
\},
\{
"column": "LAST_NAME",
"value": "'Doe'"
\},
\{
"column": "EMAIL",
"value": "'john.doe@example.com'"
\}
]
},
{
"data": [
\{
"column": "CUSTOMER_ID",
"value": "'C002'"
\},
\{
"column": "FIRST_NAME",
"value": "'Jane'"
\},
\{
"column": "LAST_NAME",
"value": "'Smith'"
\},
\{
"column": "EMAIL",
"value": "'jane.smith@example.com'"
\}
]
}
],
"overwrite": false
}
Sample Output
{
"status": "success",
"rows": [
{
"number of rows inserted": 2
}
]
}
List copy history
This operation can be used to query Snowflake data loading history along various dimensions within the last 7 days. Sample Input Sample Output
{
"status": "success",
"rows": [
\{
"FILE_NAME": "sales_data_20230502.csv",
"STAGE_LOCATION": "@my_stage/sales/",
"LAST_LOAD_TIME": "2023-05-02T10:15:30Z",
"ROW_COUNT": 15000,
"ROW_PARSED": 15000,
"FILE_SIZE": 2500000,
"FIRST_ERROR_MESSAGE": null,
"FIRST_ERROR_LINE_NUMBER": 0,
"FIRST_ERROR_CHARACTER_POS": 0,
"FIRST_ERROR_COLUMN_NAME": null,
"ERROR_COUNT": 0,
"ERROR_LIMIT": 100,
"STATUS": "LOADED",
"TABLE_CATALOG_NAME": "MYDB",
"TABLE_SCHEMA_NAME": "PUBLIC",
"TABLE_NAME": "SALES",
"PIPE_CATALOG_NAME": null,
"PIPE_SCHEMA_NAME": null,
"PIPE_NAME": null,
"PIPE_RECEIVED_TIME": null
\},
\{
"FILE_NAME": "sales_data_20230503.csv",
"STAGE_LOCATION": "@my_stage/sales/",
"LAST_LOAD_TIME": "2023-05-03T09:45:12Z",
"ROW_COUNT": 14500,
"ROW_PARSED": 14500,
"FILE_SIZE": 2400000,
"FIRST_ERROR_MESSAGE": null,
"FIRST_ERROR_LINE_NUMBER": 0,
"FIRST_ERROR_CHARACTER_POS": 0,
"FIRST_ERROR_COLUMN_NAME": null,
"ERROR_COUNT": 0,
"ERROR_LIMIT": 100,
"STATUS": "LOADED",
"TABLE_CATALOG_NAME": "MYDB",
"TABLE_SCHEMA_NAME": "PUBLIC",
"TABLE_NAME": "SALES",
"PIPE_CATALOG_NAME": null,
"PIPE_SCHEMA_NAME": null,
"PIPE_NAME": null,
"PIPE_RECEIVED_TIME": null
\}
]
}
List databases
Lists the databases for which you have access privileges, including the dropped databases that are still within the Time Travel retention period. Sample Input
{
"auth": {},
"warehouse": "MY_WAREHOUSE",
"terse": false,
"history": true,
"like": "PROD%",
"starts_with": "PROD",
"limit": \{
"number": 5,
"from": "PROD_DB1"
\}
}
Sample Output
List files in stage
List the files currently in a stage. Sample Input
{
"auth": \{
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"stage_name": "MY_STAGE",
"path": "customer_data/",
"pattern": ".*\\.csv$"
}
Sample Output
{
"rows": [
\{
"name": "customer_data/customers_2023_01.csv",
"size": 1048576,
"md5": "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6",
"last_modified": "2023-01-15T10:30:00Z"
\},
\{
"name": "customer_data/customers_2023_02.csv",
"size": 2097152,
"md5": "q7r8s9t0u1v2w3x4y5z6a7b8c9d0e1f2",
"last_modified": "2023-02-15T11:45:00Z"
\},
\{
"name": "customer_data/customers_2023_03.csv",
"size": 3145728,
"md5": "g3h4i5j6k7l8m9n0o1p2q3r4s5t6u7v8",
"last_modified": "2023-03-15T09:15:00Z"
\}
],
"status": "success"
}
List schemas
Lists the schemas for which you have access privileges, including the dropped schemas that are still within the Time Travel retention period. Sample Input
{
"auth": {
"warehouse": "COMPUTE_WH"
},
"warehouse": "COMPUTE_WH",
"terse": false,
"history": true,
"like": "SALES%",
"in": {
"database": "SNOWFLAKE_SAMPLE_DATA"
},
"starts_with": "SALES",
"limit": \{
"number": 5,
"from": "SALES_2022"
\}
}
Sample Output
List stages
Lists the stages for which you have access privileges. Sample Input
{
"auth": {},
"warehouse": "MY_WAREHOUSE",
"in": \{
"database": "MY_DATABASE",
"schema": "MY_SCHEMA"
\},
"like": "STAGE_%"
}
Sample Output
{
"status": "SUCCESS",
"rows": [
\{
"has_encryption_key": "false",
"schema_name": "MY_SCHEMA",
"name": "STAGE_1",
"url": "s3://my-bucket/stage1/",
"database_name": "MY_DATABASE",
"owner": "ACCOUNTADMIN",
"region": null,
"cloud": null,
"created_on": "2023-05-15T10:30:00.000Z",
"has_credentials": "true",
"type": "EXTERNAL",
"comment": "External stage for data loading"
\},
\{
"has_encryption_key": "true",
"schema_name": "MY_SCHEMA",
"name": "STAGE_2",
"url": "azure://my-container/stage2/",
"database_name": "MY_DATABASE",
"owner": "SYSADMIN",
"region": null,
"cloud": null,
"created_on": "2023-05-16T14:45:00.000Z",
"has_credentials": "true",
"type": "EXTERNAL",
"comment": "Encrypted external stage for sensitive data"
\}
]
}
List table rows
Lists rows in one or more tables. Sample Input
{
"auth": \{
"warehouse": "COMPUTE_WH",
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"warehouse": "COMPUTE_WH",
"table_database": "SALES_DB",
"table_schema": "PUBLIC",
"table": "CUSTOMERS",
"distinct": false,
"objects": [
\{
"column_name": "CUSTOMER_ID",
"column_alias": "ID"
\},
{
"column_name": "FIRST_NAME"
},
{
"column_name": "LAST_NAME"
},
{
"column_name": "EMAIL"
}
],
"query": "WHERE CUSTOMER_ID > 1000",
"limit": 5
}
Sample Output
{
"status": "success",
"rows": [
\{
"ID": 1001,
"FIRST_NAME": "John",
"LAST_NAME": "Doe",
"EMAIL": "john.doe@example.com"
\},
\{
"ID": 1002,
"FIRST_NAME": "Jane",
"LAST_NAME": "Smith",
"EMAIL": "jane.smith@example.com"
\},
\{
"ID": 1003,
"FIRST_NAME": "Michael",
"LAST_NAME": "Johnson",
"EMAIL": "michael.johnson@example.com"
\},
\{
"ID": 1004,
"FIRST_NAME": "Emily",
"LAST_NAME": "Brown",
"EMAIL": "emily.brown@example.com"
\},
\{
"ID": 1005,
"FIRST_NAME": "David",
"LAST_NAME": "Wilson",
"EMAIL": "david.wilson@example.com"
\}
]
}
List tables
Lists the tables for which you have access privileges, including the dropped tables that are still within the Time Travel retention period. Sample Input Sample Output
{
"status": "SUCCESS",
"rows": [
\{
"schema_name": "PUBLIC",
"bytes": 1048576,
"name": "SALES_2023_Q1",
"retention_time": "1",
"database_name": "MY_DATABASE",
"owner": "SALESADMIN",
"kind": "TABLE",
"created_on": "2023-01-01 00:00:00.000 -0800",
"rows": 100000,
"cluster_by": "ORDER_DATE",
"comment": "Q1 sales data"
\},
\{
"schema_name": "PUBLIC",
"bytes": 2097152,
"name": "SALES_2023_Q2",
"retention_time": "1",
"database_name": "MY_DATABASE",
"owner": "SALESADMIN",
"kind": "TABLE",
"created_on": "2023-04-01 00:00:00.000 -0700",
"rows": 150000,
"cluster_by": "ORDER_DATE",
"comment": "Q2 sales data"
\},
\{
"schema_name": "PUBLIC",
"bytes": 3145728,
"name": "SALES_2023_Q3",
"retention_time": "1",
"database_name": "MY_DATABASE",
"owner": "SALESADMIN",
"kind": "TABLE",
"created_on": "2023-07-01 00:00:00.000 -0700",
"rows": 200000,
"cluster_by": "ORDER_DATE",
"comment": "Q3 sales data"
\}
]
}
List warehouses
Lists the warehouses for which you have access privileges. Sample Input Sample Output
{
"status": "SUCCESS",
"rows": [
\{
"name": "PROD_WAREHOUSE",
"state": "STARTED",
"type": "STANDARD",
"size": "X-LARGE",
"running": 5,
"queued": 2,
"is_default": "N",
"is_current": "Y",
"auto_suspend": 600,
"auto_resume": "true",
"available": "YES",
"provisioning": "N",
"quiescing": "N",
"other": null,
"created_on": "2023-05-01T10:00:00.000Z",
"resumed_on": "2023-05-15T08:30:00.000Z",
"updated_on": "2023-05-15T09:15:00.000Z",
"owner": "SYSADMIN",
"comment": "Production warehouse for data processing",
"resource_monitor": "PROD_MONITOR",
"actives": 3,
"pendings": 1,
"failed": 0,
"suspended": 0,
"uuid": "01a2b3c4-5d6e-7f8g-9h0i-jklmnopqrstu"
\},
\{
"name": "PROD_REPORTING",
"state": "SUSPENDED",
"type": "STANDARD",
"size": "LARGE",
"running": 0,
"queued": 0,
"is_default": "N",
"is_current": "N",
"auto_suspend": 300,
"auto_resume": "true",
"available": "YES",
"provisioning": "N",
"quiescing": "N",
"other": null,
"created_on": "2023-04-15T14:30:00.000Z",
"resumed_on": null,
"updated_on": "2023-05-10T16:45:00.000Z",
"owner": "REPORTADMIN",
"comment": "Warehouse for generating reports",
"resource_monitor": "REPORTING_MONITOR",
"actives": 0,
"pendings": 0,
"failed": 0,
"suspended": 1,
"uuid": "98v7u6t5-4s3r-2q1p-0o9i-nmlkjihgfedcb"
\}
]
}
Load data to table
Load data from staged files to an existing table. The files must already be staged. Sample Input
{
"auth": \{
"database": "MYDB",
"schema": "PUBLIC",
"warehouse": "COMPUTE_WH"
\},
"table": "CUSTOMERS",
"warehouse": "COMPUTE_WH",
"stage_type": {
"internal": \{
"value": "my_stage",
"path": "customer_data/"
\}
},
"file_format": {
"type": {
"csv": \{
"compression": "GZIP",
"field_delimiter": ",",
"skip_header": 1
\}
}
},
"copy_options": \{
"on_error": "CONTINUE",
"size_limit": 1000000000,
"purge": false
\}
}
Sample Output
Unload data from table
Unload data from a table (or query) into one or more files in a named internal stage, a named external stage or an external stage. Sample Input Sample Output
Update database
Modifies the properties for an existing database, including changing the name of the database and changing the Time Travel data retention period (if you are using Snowflake Enterprise Edition or higher). Sample Input
{
"auth": {},
"database": "SALES_DB",
"if_exists": true,
"operation": {
"set": \{
"data_retention_time_in_days": 90,
"comment": "Updated sales database with 90-day data retention"
\}
}
}
Sample Output
\{
"status": "success",
"message": "Database SALES_DB updated successfully"
\}
Update row in table
Updates specified rows in the target table with new values. Sample Input
{
"auth": \{
"warehouse": "COMPUTE_WH",
"database": "SALES_DB",
"schema": "PUBLIC"
\},
"warehouse": "COMPUTE_WH",
"table_database": "SALES_DB",
"table_schema": "PUBLIC",
"table": "CUSTOMERS",
"set": [
\{
"column": "EMAIL",
"value": "'new.email@example.com'"
\},
\{
"column": "LAST_UPDATED",
"value": "CURRENT_TIMESTAMP()"
\}
],
"where": "CUSTOMER_ID = 1001"
}
Sample Output
Update schema
Modifies the properties for an existing schema, including renaming the schema or swapping it with another schema, and changing the Time Travel data retention period (if you are using Snowflake Enterprise Edition or higher). Sample Input
{
"auth": {
"database": "SALES_DB"
},
"schema": "CUSTOMER_DATA",
"if_exists": true,
"operation": {
"set": \{
"data_retention_time_in_days": 30,
"comment": "Updated schema for customer data analysis"
\}
}
}
Sample Output
\{
"status": "success",
"message": "Schema CUSTOMER_DATA updated successfully"
\}
Update stage
Modifies the properties for an existing named internal or external stage. Sample Input
{
"auth": \{
"database": "MY_DATABASE",
"schema": "MY_SCHEMA"
\},
"stage": "MY_STAGE",
"if_exists": true,
"operation": {
"set": {
"type": {
"aws_s3_bucket": {
"bucket": "my-s3-bucket",
"path": "data/",
"credentials": \{
"aws_key_id": "AKIAIOSFODNN7EXAMPLE",
"aws_secret_key": "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
\},
"encryption": {
"type": "AWS_SSE_S3"
}
}
},
"stage_file_format": {
"type": {
"csv": \{
"compression": "GZIP",
"field_delimiter": ",",
"skip_header": 1
\}
}
},
"stage_copy_options": \{
"on_error": "CONTINUE",
"size_limit": 1000000000,
"purge": true
\},
"comment": "Updated S3 stage for CSV files"
}
}
}
Sample Output
Update table
Modifies the properties, columns, or constraints for an existing table. Sample Input
{
"auth": \{
"database": "MY_DATABASE",
"schema": "MY_SCHEMA"
\},
"table": "CUSTOMERS",
"operation": {
"add_column": {
"name": "LOYALTY_POINTS",
"type": {
"simple_type": "INTEGER"
},
"not_null": false,
"default": {
"default": "0"
}
}
}
}
Sample Output
\{
"status": "success",
"message": "Column 'LOYALTY_POINTS' added to table 'CUSTOMERS' successfully."
\}
Update warehouse
Suspends or resumes a virtual warehouse, or aborts all queries (and other SQL statements) for a warehouse. Can also be used to rename or set/unset the properties for a warehouse. Sample Input Sample Output
\{
"status": "success",
"message": "Warehouse MY_WAREHOUSE updated successfully"
\}
DDL operations
List table columns (DDL)
Note that DDL operations can only be called directly by Connectors API, or when using CustomJS in the Embedded solution editor for e.g. DDL-dependent data mapping
Sample Input
{
"auth": {}
}
Sample Output
[
\{
"column_name": "id",
"data_type": "NUMBER(38,0)",
"is_nullable": "NO",
"column_default": "NULL",
"is_identity": "YES",
"comment": "Unique identifier for the record"
\},
\{
"column_name": "first_name",
"data_type": "VARCHAR(50)",
"is_nullable": "NO",
"column_default": "NULL",
"is_identity": "NO",
"comment": "First name of the user"
\},
\{
"column_name": "last_name",
"data_type": "VARCHAR(50)",
"is_nullable": "NO",
"column_default": "NULL",
"is_identity": "NO",
"comment": "Last name of the user"
\},
\{
"column_name": "email",
"data_type": "VARCHAR(100)",
"is_nullable": "NO",
"column_default": "NULL",
"is_identity": "NO",
"comment": "Email address of the user"
\},
\{
"column_name": "created_at",
"data_type": "TIMESTAMP_NTZ(9)",
"is_nullable": "NO",
"column_default": "CURRENT_TIMESTAMP()",
"is_identity": "NO",
"comment": "Timestamp when the record was created"
\}
]