snowflake.Task
Explore with Pulumi AI
Import
$ pulumi import snowflake:index/task:Task example '"<database_name>"."<schema_name>"."<task_name>"'
Create Task Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Task(name: string, args: TaskArgs, opts?: CustomResourceOptions);
@overload
def Task(resource_name: str,
args: TaskArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Task(resource_name: str,
opts: Optional[ResourceOptions] = None,
database: Optional[str] = None,
schema: Optional[str] = None,
sql_statement: Optional[str] = None,
started: Optional[bool] = None,
abort_detached_query: Optional[bool] = None,
afters: Optional[Sequence[str]] = None,
allow_overlapping_execution: Optional[str] = None,
autocommit: Optional[bool] = None,
binary_input_format: Optional[str] = None,
binary_output_format: Optional[str] = None,
client_memory_limit: Optional[int] = None,
client_metadata_request_use_connection_ctx: Optional[bool] = None,
client_prefetch_threads: Optional[int] = None,
client_result_chunk_size: Optional[int] = None,
client_result_column_case_insensitive: Optional[bool] = None,
client_session_keep_alive: Optional[bool] = None,
client_session_keep_alive_heartbeat_frequency: Optional[int] = None,
client_timestamp_type_mapping: Optional[str] = None,
comment: Optional[str] = None,
config: Optional[str] = None,
date_input_format: Optional[str] = None,
date_output_format: Optional[str] = None,
enable_unload_physical_type_optimization: Optional[bool] = None,
error_integration: Optional[str] = None,
error_on_nondeterministic_merge: Optional[bool] = None,
error_on_nondeterministic_update: Optional[bool] = None,
finalize: Optional[str] = None,
geography_output_format: Optional[str] = None,
geometry_output_format: Optional[str] = None,
jdbc_treat_timestamp_ntz_as_utc: Optional[bool] = None,
jdbc_use_session_timezone: Optional[bool] = None,
json_indent: Optional[int] = None,
lock_timeout: Optional[int] = None,
log_level: Optional[str] = None,
multi_statement_count: Optional[int] = None,
name: Optional[str] = None,
noorder_sequence_as_default: Optional[bool] = None,
odbc_treat_decimal_as_int: Optional[bool] = None,
query_tag: Optional[str] = None,
quoted_identifiers_ignore_case: Optional[bool] = None,
rows_per_resultset: Optional[int] = None,
s3_stage_vpce_dns_name: Optional[str] = None,
schedule: Optional[TaskScheduleArgs] = None,
search_path: Optional[str] = None,
statement_queued_timeout_in_seconds: Optional[int] = None,
statement_timeout_in_seconds: Optional[int] = None,
strict_json_output: Optional[bool] = None,
suspend_task_after_num_failures: Optional[int] = None,
task_auto_retry_attempts: Optional[int] = None,
time_input_format: Optional[str] = None,
time_output_format: Optional[str] = None,
timestamp_day_is_always24h: Optional[bool] = None,
timestamp_input_format: Optional[str] = None,
timestamp_ltz_output_format: Optional[str] = None,
timestamp_ntz_output_format: Optional[str] = None,
timestamp_output_format: Optional[str] = None,
timestamp_type_mapping: Optional[str] = None,
timestamp_tz_output_format: Optional[str] = None,
timezone: Optional[str] = None,
trace_level: Optional[str] = None,
transaction_abort_on_error: Optional[bool] = None,
transaction_default_isolation_level: Optional[str] = None,
two_digit_century_start: Optional[int] = None,
unsupported_ddl_action: Optional[str] = None,
use_cached_result: Optional[bool] = None,
user_task_managed_initial_warehouse_size: Optional[str] = None,
user_task_minimum_trigger_interval_in_seconds: Optional[int] = None,
user_task_timeout_ms: Optional[int] = None,
warehouse: Optional[str] = None,
week_of_year_policy: Optional[int] = None,
week_start: Optional[int] = None,
when: Optional[str] = None)
func NewTask(ctx *Context, name string, args TaskArgs, opts ...ResourceOption) (*Task, error)
public Task(string name, TaskArgs args, CustomResourceOptions? opts = null)
type: snowflake:Task
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args TaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args TaskArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args TaskArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args TaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args TaskArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var taskResource = new Snowflake.Task("taskResource", new()
{
Database = "string",
Schema = "string",
SqlStatement = "string",
Started = false,
AbortDetachedQuery = false,
Afters = new[]
{
"string",
},
AllowOverlappingExecution = "string",
Autocommit = false,
BinaryInputFormat = "string",
BinaryOutputFormat = "string",
ClientMemoryLimit = 0,
ClientMetadataRequestUseConnectionCtx = false,
ClientPrefetchThreads = 0,
ClientResultChunkSize = 0,
ClientResultColumnCaseInsensitive = false,
ClientSessionKeepAlive = false,
ClientSessionKeepAliveHeartbeatFrequency = 0,
ClientTimestampTypeMapping = "string",
Comment = "string",
Config = "string",
DateInputFormat = "string",
DateOutputFormat = "string",
EnableUnloadPhysicalTypeOptimization = false,
ErrorIntegration = "string",
ErrorOnNondeterministicMerge = false,
ErrorOnNondeterministicUpdate = false,
Finalize = "string",
GeographyOutputFormat = "string",
GeometryOutputFormat = "string",
JdbcTreatTimestampNtzAsUtc = false,
JdbcUseSessionTimezone = false,
JsonIndent = 0,
LockTimeout = 0,
LogLevel = "string",
MultiStatementCount = 0,
Name = "string",
NoorderSequenceAsDefault = false,
OdbcTreatDecimalAsInt = false,
QueryTag = "string",
QuotedIdentifiersIgnoreCase = false,
RowsPerResultset = 0,
S3StageVpceDnsName = "string",
Schedule = new Snowflake.Inputs.TaskScheduleArgs
{
Minutes = 0,
UsingCron = "string",
},
SearchPath = "string",
StatementQueuedTimeoutInSeconds = 0,
StatementTimeoutInSeconds = 0,
StrictJsonOutput = false,
SuspendTaskAfterNumFailures = 0,
TaskAutoRetryAttempts = 0,
TimeInputFormat = "string",
TimeOutputFormat = "string",
TimestampDayIsAlways24h = false,
TimestampInputFormat = "string",
TimestampLtzOutputFormat = "string",
TimestampNtzOutputFormat = "string",
TimestampOutputFormat = "string",
TimestampTypeMapping = "string",
TimestampTzOutputFormat = "string",
Timezone = "string",
TraceLevel = "string",
TransactionAbortOnError = false,
TransactionDefaultIsolationLevel = "string",
TwoDigitCenturyStart = 0,
UnsupportedDdlAction = "string",
UseCachedResult = false,
UserTaskManagedInitialWarehouseSize = "string",
UserTaskMinimumTriggerIntervalInSeconds = 0,
UserTaskTimeoutMs = 0,
Warehouse = "string",
WeekOfYearPolicy = 0,
WeekStart = 0,
When = "string",
});
example, err := snowflake.NewTask(ctx, "taskResource", &snowflake.TaskArgs{
Database: pulumi.String("string"),
Schema: pulumi.String("string"),
SqlStatement: pulumi.String("string"),
Started: pulumi.Bool(false),
AbortDetachedQuery: pulumi.Bool(false),
Afters: pulumi.StringArray{
pulumi.String("string"),
},
AllowOverlappingExecution: pulumi.String("string"),
Autocommit: pulumi.Bool(false),
BinaryInputFormat: pulumi.String("string"),
BinaryOutputFormat: pulumi.String("string"),
ClientMemoryLimit: pulumi.Int(0),
ClientMetadataRequestUseConnectionCtx: pulumi.Bool(false),
ClientPrefetchThreads: pulumi.Int(0),
ClientResultChunkSize: pulumi.Int(0),
ClientResultColumnCaseInsensitive: pulumi.Bool(false),
ClientSessionKeepAlive: pulumi.Bool(false),
ClientSessionKeepAliveHeartbeatFrequency: pulumi.Int(0),
ClientTimestampTypeMapping: pulumi.String("string"),
Comment: pulumi.String("string"),
Config: pulumi.String("string"),
DateInputFormat: pulumi.String("string"),
DateOutputFormat: pulumi.String("string"),
EnableUnloadPhysicalTypeOptimization: pulumi.Bool(false),
ErrorIntegration: pulumi.String("string"),
ErrorOnNondeterministicMerge: pulumi.Bool(false),
ErrorOnNondeterministicUpdate: pulumi.Bool(false),
Finalize: pulumi.String("string"),
GeographyOutputFormat: pulumi.String("string"),
GeometryOutputFormat: pulumi.String("string"),
JdbcTreatTimestampNtzAsUtc: pulumi.Bool(false),
JdbcUseSessionTimezone: pulumi.Bool(false),
JsonIndent: pulumi.Int(0),
LockTimeout: pulumi.Int(0),
LogLevel: pulumi.String("string"),
MultiStatementCount: pulumi.Int(0),
Name: pulumi.String("string"),
NoorderSequenceAsDefault: pulumi.Bool(false),
OdbcTreatDecimalAsInt: pulumi.Bool(false),
QueryTag: pulumi.String("string"),
QuotedIdentifiersIgnoreCase: pulumi.Bool(false),
RowsPerResultset: pulumi.Int(0),
S3StageVpceDnsName: pulumi.String("string"),
Schedule: &snowflake.TaskScheduleArgs{
Minutes: pulumi.Int(0),
UsingCron: pulumi.String("string"),
},
SearchPath: pulumi.String("string"),
StatementQueuedTimeoutInSeconds: pulumi.Int(0),
StatementTimeoutInSeconds: pulumi.Int(0),
StrictJsonOutput: pulumi.Bool(false),
SuspendTaskAfterNumFailures: pulumi.Int(0),
TaskAutoRetryAttempts: pulumi.Int(0),
TimeInputFormat: pulumi.String("string"),
TimeOutputFormat: pulumi.String("string"),
TimestampDayIsAlways24h: pulumi.Bool(false),
TimestampInputFormat: pulumi.String("string"),
TimestampLtzOutputFormat: pulumi.String("string"),
TimestampNtzOutputFormat: pulumi.String("string"),
TimestampOutputFormat: pulumi.String("string"),
TimestampTypeMapping: pulumi.String("string"),
TimestampTzOutputFormat: pulumi.String("string"),
Timezone: pulumi.String("string"),
TraceLevel: pulumi.String("string"),
TransactionAbortOnError: pulumi.Bool(false),
TransactionDefaultIsolationLevel: pulumi.String("string"),
TwoDigitCenturyStart: pulumi.Int(0),
UnsupportedDdlAction: pulumi.String("string"),
UseCachedResult: pulumi.Bool(false),
UserTaskManagedInitialWarehouseSize: pulumi.String("string"),
UserTaskMinimumTriggerIntervalInSeconds: pulumi.Int(0),
UserTaskTimeoutMs: pulumi.Int(0),
Warehouse: pulumi.String("string"),
WeekOfYearPolicy: pulumi.Int(0),
WeekStart: pulumi.Int(0),
When: pulumi.String("string"),
})
var taskResource = new Task("taskResource", TaskArgs.builder()
.database("string")
.schema("string")
.sqlStatement("string")
.started(false)
.abortDetachedQuery(false)
.afters("string")
.allowOverlappingExecution("string")
.autocommit(false)
.binaryInputFormat("string")
.binaryOutputFormat("string")
.clientMemoryLimit(0)
.clientMetadataRequestUseConnectionCtx(false)
.clientPrefetchThreads(0)
.clientResultChunkSize(0)
.clientResultColumnCaseInsensitive(false)
.clientSessionKeepAlive(false)
.clientSessionKeepAliveHeartbeatFrequency(0)
.clientTimestampTypeMapping("string")
.comment("string")
.config("string")
.dateInputFormat("string")
.dateOutputFormat("string")
.enableUnloadPhysicalTypeOptimization(false)
.errorIntegration("string")
.errorOnNondeterministicMerge(false)
.errorOnNondeterministicUpdate(false)
.finalize("string")
.geographyOutputFormat("string")
.geometryOutputFormat("string")
.jdbcTreatTimestampNtzAsUtc(false)
.jdbcUseSessionTimezone(false)
.jsonIndent(0)
.lockTimeout(0)
.logLevel("string")
.multiStatementCount(0)
.name("string")
.noorderSequenceAsDefault(false)
.odbcTreatDecimalAsInt(false)
.queryTag("string")
.quotedIdentifiersIgnoreCase(false)
.rowsPerResultset(0)
.s3StageVpceDnsName("string")
.schedule(TaskScheduleArgs.builder()
.minutes(0)
.usingCron("string")
.build())
.searchPath("string")
.statementQueuedTimeoutInSeconds(0)
.statementTimeoutInSeconds(0)
.strictJsonOutput(false)
.suspendTaskAfterNumFailures(0)
.taskAutoRetryAttempts(0)
.timeInputFormat("string")
.timeOutputFormat("string")
.timestampDayIsAlways24h(false)
.timestampInputFormat("string")
.timestampLtzOutputFormat("string")
.timestampNtzOutputFormat("string")
.timestampOutputFormat("string")
.timestampTypeMapping("string")
.timestampTzOutputFormat("string")
.timezone("string")
.traceLevel("string")
.transactionAbortOnError(false)
.transactionDefaultIsolationLevel("string")
.twoDigitCenturyStart(0)
.unsupportedDdlAction("string")
.useCachedResult(false)
.userTaskManagedInitialWarehouseSize("string")
.userTaskMinimumTriggerIntervalInSeconds(0)
.userTaskTimeoutMs(0)
.warehouse("string")
.weekOfYearPolicy(0)
.weekStart(0)
.when("string")
.build());
task_resource = snowflake.Task("taskResource",
database="string",
schema="string",
sql_statement="string",
started=False,
abort_detached_query=False,
afters=["string"],
allow_overlapping_execution="string",
autocommit=False,
binary_input_format="string",
binary_output_format="string",
client_memory_limit=0,
client_metadata_request_use_connection_ctx=False,
client_prefetch_threads=0,
client_result_chunk_size=0,
client_result_column_case_insensitive=False,
client_session_keep_alive=False,
client_session_keep_alive_heartbeat_frequency=0,
client_timestamp_type_mapping="string",
comment="string",
config="string",
date_input_format="string",
date_output_format="string",
enable_unload_physical_type_optimization=False,
error_integration="string",
error_on_nondeterministic_merge=False,
error_on_nondeterministic_update=False,
finalize="string",
geography_output_format="string",
geometry_output_format="string",
jdbc_treat_timestamp_ntz_as_utc=False,
jdbc_use_session_timezone=False,
json_indent=0,
lock_timeout=0,
log_level="string",
multi_statement_count=0,
name="string",
noorder_sequence_as_default=False,
odbc_treat_decimal_as_int=False,
query_tag="string",
quoted_identifiers_ignore_case=False,
rows_per_resultset=0,
s3_stage_vpce_dns_name="string",
schedule={
"minutes": 0,
"using_cron": "string",
},
search_path="string",
statement_queued_timeout_in_seconds=0,
statement_timeout_in_seconds=0,
strict_json_output=False,
suspend_task_after_num_failures=0,
task_auto_retry_attempts=0,
time_input_format="string",
time_output_format="string",
timestamp_day_is_always24h=False,
timestamp_input_format="string",
timestamp_ltz_output_format="string",
timestamp_ntz_output_format="string",
timestamp_output_format="string",
timestamp_type_mapping="string",
timestamp_tz_output_format="string",
timezone="string",
trace_level="string",
transaction_abort_on_error=False,
transaction_default_isolation_level="string",
two_digit_century_start=0,
unsupported_ddl_action="string",
use_cached_result=False,
user_task_managed_initial_warehouse_size="string",
user_task_minimum_trigger_interval_in_seconds=0,
user_task_timeout_ms=0,
warehouse="string",
week_of_year_policy=0,
week_start=0,
when="string")
const taskResource = new snowflake.Task("taskResource", {
database: "string",
schema: "string",
sqlStatement: "string",
started: false,
abortDetachedQuery: false,
afters: ["string"],
allowOverlappingExecution: "string",
autocommit: false,
binaryInputFormat: "string",
binaryOutputFormat: "string",
clientMemoryLimit: 0,
clientMetadataRequestUseConnectionCtx: false,
clientPrefetchThreads: 0,
clientResultChunkSize: 0,
clientResultColumnCaseInsensitive: false,
clientSessionKeepAlive: false,
clientSessionKeepAliveHeartbeatFrequency: 0,
clientTimestampTypeMapping: "string",
comment: "string",
config: "string",
dateInputFormat: "string",
dateOutputFormat: "string",
enableUnloadPhysicalTypeOptimization: false,
errorIntegration: "string",
errorOnNondeterministicMerge: false,
errorOnNondeterministicUpdate: false,
finalize: "string",
geographyOutputFormat: "string",
geometryOutputFormat: "string",
jdbcTreatTimestampNtzAsUtc: false,
jdbcUseSessionTimezone: false,
jsonIndent: 0,
lockTimeout: 0,
logLevel: "string",
multiStatementCount: 0,
name: "string",
noorderSequenceAsDefault: false,
odbcTreatDecimalAsInt: false,
queryTag: "string",
quotedIdentifiersIgnoreCase: false,
rowsPerResultset: 0,
s3StageVpceDnsName: "string",
schedule: {
minutes: 0,
usingCron: "string",
},
searchPath: "string",
statementQueuedTimeoutInSeconds: 0,
statementTimeoutInSeconds: 0,
strictJsonOutput: false,
suspendTaskAfterNumFailures: 0,
taskAutoRetryAttempts: 0,
timeInputFormat: "string",
timeOutputFormat: "string",
timestampDayIsAlways24h: false,
timestampInputFormat: "string",
timestampLtzOutputFormat: "string",
timestampNtzOutputFormat: "string",
timestampOutputFormat: "string",
timestampTypeMapping: "string",
timestampTzOutputFormat: "string",
timezone: "string",
traceLevel: "string",
transactionAbortOnError: false,
transactionDefaultIsolationLevel: "string",
twoDigitCenturyStart: 0,
unsupportedDdlAction: "string",
useCachedResult: false,
userTaskManagedInitialWarehouseSize: "string",
userTaskMinimumTriggerIntervalInSeconds: 0,
userTaskTimeoutMs: 0,
warehouse: "string",
weekOfYearPolicy: 0,
weekStart: 0,
when: "string",
});
type: snowflake:Task
properties:
abortDetachedQuery: false
afters:
- string
allowOverlappingExecution: string
autocommit: false
binaryInputFormat: string
binaryOutputFormat: string
clientMemoryLimit: 0
clientMetadataRequestUseConnectionCtx: false
clientPrefetchThreads: 0
clientResultChunkSize: 0
clientResultColumnCaseInsensitive: false
clientSessionKeepAlive: false
clientSessionKeepAliveHeartbeatFrequency: 0
clientTimestampTypeMapping: string
comment: string
config: string
database: string
dateInputFormat: string
dateOutputFormat: string
enableUnloadPhysicalTypeOptimization: false
errorIntegration: string
errorOnNondeterministicMerge: false
errorOnNondeterministicUpdate: false
finalize: string
geographyOutputFormat: string
geometryOutputFormat: string
jdbcTreatTimestampNtzAsUtc: false
jdbcUseSessionTimezone: false
jsonIndent: 0
lockTimeout: 0
logLevel: string
multiStatementCount: 0
name: string
noorderSequenceAsDefault: false
odbcTreatDecimalAsInt: false
queryTag: string
quotedIdentifiersIgnoreCase: false
rowsPerResultset: 0
s3StageVpceDnsName: string
schedule:
minutes: 0
usingCron: string
schema: string
searchPath: string
sqlStatement: string
started: false
statementQueuedTimeoutInSeconds: 0
statementTimeoutInSeconds: 0
strictJsonOutput: false
suspendTaskAfterNumFailures: 0
taskAutoRetryAttempts: 0
timeInputFormat: string
timeOutputFormat: string
timestampDayIsAlways24h: false
timestampInputFormat: string
timestampLtzOutputFormat: string
timestampNtzOutputFormat: string
timestampOutputFormat: string
timestampTypeMapping: string
timestampTzOutputFormat: string
timezone: string
traceLevel: string
transactionAbortOnError: false
transactionDefaultIsolationLevel: string
twoDigitCenturyStart: 0
unsupportedDdlAction: string
useCachedResult: false
userTaskManagedInitialWarehouseSize: string
userTaskMinimumTriggerIntervalInSeconds: 0
userTaskTimeoutMs: 0
warehouse: string
weekOfYearPolicy: 0
weekStart: 0
when: string
Task Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Task resource accepts the following input properties:
- Database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- Started bool
- Specifies if the task should be started or suspended.
- Abort
Detached boolQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- Afters List<string>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- Autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- Binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- Binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- Client
Memory intLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- Client
Metadata boolRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- Client
Prefetch intThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- Client
Result intChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- Client
Result boolColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- Client
Session boolKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- Client
Session intKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- Client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- Comment string
- Specifies a comment for the task.
- Config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- Date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- Date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- Enable
Unload boolPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- Error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - Error
On boolNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- Error
On boolNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- Finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- Geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- Jdbc
Treat boolTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- Jdbc
Use boolSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- Json
Indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- Lock
Timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- Log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- Multi
Statement intCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- Name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Noorder
Sequence boolAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- Odbc
Treat boolDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- Query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- Quoted
Identifiers boolIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- Rows
Per intResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- S3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- Schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - Search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- Statement
Queued intTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- Statement
Timeout intIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- Strict
Json boolOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- Suspend
Task intAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- Task
Auto intRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- Time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- Time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- Timestamp
Day boolIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- Timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- Timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- Timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- Timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- Timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- Timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- Timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- Trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- Transaction
Abort boolOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- Transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- Two
Digit intCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - Unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- Use
Cached boolResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- User
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- User
Task intMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- User
Task intTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- Warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- Week
Of intYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - Week
Start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - When string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- Database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- Started bool
- Specifies if the task should be started or suspended.
- Abort
Detached boolQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- Afters []string
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- Autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- Binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- Binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- Client
Memory intLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- Client
Metadata boolRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- Client
Prefetch intThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- Client
Result intChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- Client
Result boolColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- Client
Session boolKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- Client
Session intKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- Client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- Comment string
- Specifies a comment for the task.
- Config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- Date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- Date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- Enable
Unload boolPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- Error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - Error
On boolNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- Error
On boolNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- Finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- Geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- Jdbc
Treat boolTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- Jdbc
Use boolSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- Json
Indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- Lock
Timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- Log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- Multi
Statement intCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- Name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Noorder
Sequence boolAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- Odbc
Treat boolDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- Query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- Quoted
Identifiers boolIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- Rows
Per intResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- S3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- Schedule
Task
Schedule Args - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - Search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- Statement
Queued intTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- Statement
Timeout intIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- Strict
Json boolOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- Suspend
Task intAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- Task
Auto intRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- Time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- Time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- Timestamp
Day boolIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- Timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- Timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- Timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- Timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- Timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- Timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- Timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- Trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- Transaction
Abort boolOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- Transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- Two
Digit intCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - Unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- Use
Cached boolResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- User
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- User
Task intMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- User
Task intTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- Warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- Week
Of intYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - Week
Start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - When string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- database String
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - schema String
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - sql
Statement String - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started Boolean
- Specifies if the task should be started or suspended.
- abort
Detached BooleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters List<String>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping StringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit Boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input StringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output StringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory IntegerLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata BooleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch IntegerThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result IntegerChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result BooleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session BooleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session IntegerKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp StringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment String
- Specifies a comment for the task.
- config String
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- date
Input StringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output StringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload BooleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration String - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On BooleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On BooleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize_ String
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - geography
Output StringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output StringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat BooleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use BooleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent Integer - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout Integer - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level String - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement IntegerCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name String
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence BooleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat BooleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- query
Tag String - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers BooleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per IntegerResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce StringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - search
Path String - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- statement
Queued IntegerTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout IntegerIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json BooleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task IntegerAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto IntegerRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input StringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output StringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day BooleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input StringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz StringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz StringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output StringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type StringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz StringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone String
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level String - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort BooleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default StringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit IntegerCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl StringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached BooleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task StringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task IntegerMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task IntegerTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse String
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of IntegerYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start Integer - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when String
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started boolean
- Specifies if the task should be started or suspended.
- abort
Detached booleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters string[]
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory numberLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata booleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch numberThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result numberChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result booleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session booleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session numberKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment string
- Specifies a comment for the task.
- config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload booleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On booleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On booleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat booleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use booleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent number - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout number - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement numberCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence booleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat booleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers booleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per numberResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- statement
Queued numberTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout numberIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json booleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task numberAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto numberRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day booleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort booleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit numberCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached booleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task numberMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task numberTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of numberYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start number - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- database str
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - schema str
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - sql_
statement str - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started bool
- Specifies if the task should be started or suspended.
- abort_
detached_ boolquery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters Sequence[str]
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow_
overlapping_ strexecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary_
input_ strformat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary_
output_ strformat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client_
memory_ intlimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client_
metadata_ boolrequest_ use_ connection_ ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client_
prefetch_ intthreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client_
result_ intchunk_ size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client_
result_ boolcolumn_ case_ insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client_
session_ boolkeep_ alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client_
session_ intkeep_ alive_ heartbeat_ frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client_
timestamp_ strtype_ mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment str
- Specifies a comment for the task.
- config str
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- date_
input_ strformat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date_
output_ strformat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable_
unload_ boolphysical_ type_ optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error_
integration str - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error_
on_ boolnondeterministic_ merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error_
on_ boolnondeterministic_ update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize str
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - geography_
output_ strformat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry_
output_ strformat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc_
treat_ booltimestamp_ ntz_ as_ utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc_
use_ boolsession_ timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json_
indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock_
timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log_
level str - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi_
statement_ intcount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name str
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder_
sequence_ boolas_ default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc_
treat_ booldecimal_ as_ int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- query_
tag str - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted_
identifiers_ boolignore_ case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows_
per_ intresultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3_
stage_ strvpce_ dns_ name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule Args - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - search_
path str - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- statement_
queued_ inttimeout_ in_ seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement_
timeout_ intin_ seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict_
json_ booloutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend_
task_ intafter_ num_ failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task_
auto_ intretry_ attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time_
input_ strformat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time_
output_ strformat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp_
day_ boolis_ always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp_
input_ strformat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp_
ltz_ stroutput_ format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp_
ntz_ stroutput_ format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp_
output_ strformat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp_
type_ strmapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp_
tz_ stroutput_ format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone str
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace_
level str - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction_
abort_ boolon_ error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction_
default_ strisolation_ level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two_
digit_ intcentury_ start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported_
ddl_ straction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use_
cached_ boolresult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user_
task_ strmanaged_ initial_ warehouse_ size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user_
task_ intminimum_ trigger_ interval_ in_ seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user_
task_ inttimeout_ ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse str
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week_
of_ intyear_ policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week_
start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when str
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- database String
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - schema String
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - sql
Statement String - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started Boolean
- Specifies if the task should be started or suspended.
- abort
Detached BooleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters List<String>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping StringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit Boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input StringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output StringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory NumberLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata BooleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch NumberThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result NumberChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result BooleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session BooleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session NumberKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp StringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment String
- Specifies a comment for the task.
- config String
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- date
Input StringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output StringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload BooleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration String - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On BooleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On BooleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize String
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - geography
Output StringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output StringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat BooleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use BooleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent Number - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout Number - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level String - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement NumberCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name String
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence BooleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat BooleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- query
Tag String - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers BooleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per NumberResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce StringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule Property Map
- The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - search
Path String - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- statement
Queued NumberTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout NumberIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json BooleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task NumberAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto NumberRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input StringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output StringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day BooleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input StringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz StringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz StringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output StringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type StringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz StringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone String
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level String - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort BooleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default StringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit NumberCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl StringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached BooleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task StringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task NumberMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task NumberTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse String
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of NumberYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start Number - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when String
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
Outputs
All input properties are implicitly available as output properties. Additionally, the Task resource produces the following output properties:
- Fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- Id string
- The provider-assigned unique ID for this managed resource.
- Parameters
List<Task
Parameter> - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - Show
Outputs List<TaskShow Output> - Outputs the result of
SHOW TASKS
for the given task.
- Fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- Id string
- The provider-assigned unique ID for this managed resource.
- Parameters
[]Task
Parameter - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - Show
Outputs []TaskShow Output - Outputs the result of
SHOW TASKS
for the given task.
- fully
Qualified StringName - Fully qualified name of the resource. For more information, see object name resolution.
- id String
- The provider-assigned unique ID for this managed resource.
- parameters
List<Task
Parameter> - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - show
Outputs List<TaskShow Output> - Outputs the result of
SHOW TASKS
for the given task.
- fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- id string
- The provider-assigned unique ID for this managed resource.
- parameters
Task
Parameter[] - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - show
Outputs TaskShow Output[] - Outputs the result of
SHOW TASKS
for the given task.
- fully_
qualified_ strname - Fully qualified name of the resource. For more information, see object name resolution.
- id str
- The provider-assigned unique ID for this managed resource.
- parameters
Sequence[Task
Parameter] - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - show_
outputs Sequence[TaskShow Output] - Outputs the result of
SHOW TASKS
for the given task.
- fully
Qualified StringName - Fully qualified name of the resource. For more information, see object name resolution.
- id String
- The provider-assigned unique ID for this managed resource.
- parameters List<Property Map>
- Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - show
Outputs List<Property Map> - Outputs the result of
SHOW TASKS
for the given task.
Look up Existing Task Resource
Get an existing Task resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: TaskState, opts?: CustomResourceOptions): Task
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
abort_detached_query: Optional[bool] = None,
afters: Optional[Sequence[str]] = None,
allow_overlapping_execution: Optional[str] = None,
autocommit: Optional[bool] = None,
binary_input_format: Optional[str] = None,
binary_output_format: Optional[str] = None,
client_memory_limit: Optional[int] = None,
client_metadata_request_use_connection_ctx: Optional[bool] = None,
client_prefetch_threads: Optional[int] = None,
client_result_chunk_size: Optional[int] = None,
client_result_column_case_insensitive: Optional[bool] = None,
client_session_keep_alive: Optional[bool] = None,
client_session_keep_alive_heartbeat_frequency: Optional[int] = None,
client_timestamp_type_mapping: Optional[str] = None,
comment: Optional[str] = None,
config: Optional[str] = None,
database: Optional[str] = None,
date_input_format: Optional[str] = None,
date_output_format: Optional[str] = None,
enable_unload_physical_type_optimization: Optional[bool] = None,
error_integration: Optional[str] = None,
error_on_nondeterministic_merge: Optional[bool] = None,
error_on_nondeterministic_update: Optional[bool] = None,
finalize: Optional[str] = None,
fully_qualified_name: Optional[str] = None,
geography_output_format: Optional[str] = None,
geometry_output_format: Optional[str] = None,
jdbc_treat_timestamp_ntz_as_utc: Optional[bool] = None,
jdbc_use_session_timezone: Optional[bool] = None,
json_indent: Optional[int] = None,
lock_timeout: Optional[int] = None,
log_level: Optional[str] = None,
multi_statement_count: Optional[int] = None,
name: Optional[str] = None,
noorder_sequence_as_default: Optional[bool] = None,
odbc_treat_decimal_as_int: Optional[bool] = None,
parameters: Optional[Sequence[TaskParameterArgs]] = None,
query_tag: Optional[str] = None,
quoted_identifiers_ignore_case: Optional[bool] = None,
rows_per_resultset: Optional[int] = None,
s3_stage_vpce_dns_name: Optional[str] = None,
schedule: Optional[TaskScheduleArgs] = None,
schema: Optional[str] = None,
search_path: Optional[str] = None,
show_outputs: Optional[Sequence[TaskShowOutputArgs]] = None,
sql_statement: Optional[str] = None,
started: Optional[bool] = None,
statement_queued_timeout_in_seconds: Optional[int] = None,
statement_timeout_in_seconds: Optional[int] = None,
strict_json_output: Optional[bool] = None,
suspend_task_after_num_failures: Optional[int] = None,
task_auto_retry_attempts: Optional[int] = None,
time_input_format: Optional[str] = None,
time_output_format: Optional[str] = None,
timestamp_day_is_always24h: Optional[bool] = None,
timestamp_input_format: Optional[str] = None,
timestamp_ltz_output_format: Optional[str] = None,
timestamp_ntz_output_format: Optional[str] = None,
timestamp_output_format: Optional[str] = None,
timestamp_type_mapping: Optional[str] = None,
timestamp_tz_output_format: Optional[str] = None,
timezone: Optional[str] = None,
trace_level: Optional[str] = None,
transaction_abort_on_error: Optional[bool] = None,
transaction_default_isolation_level: Optional[str] = None,
two_digit_century_start: Optional[int] = None,
unsupported_ddl_action: Optional[str] = None,
use_cached_result: Optional[bool] = None,
user_task_managed_initial_warehouse_size: Optional[str] = None,
user_task_minimum_trigger_interval_in_seconds: Optional[int] = None,
user_task_timeout_ms: Optional[int] = None,
warehouse: Optional[str] = None,
week_of_year_policy: Optional[int] = None,
week_start: Optional[int] = None,
when: Optional[str] = None) -> Task
func GetTask(ctx *Context, name string, id IDInput, state *TaskState, opts ...ResourceOption) (*Task, error)
public static Task Get(string name, Input<string> id, TaskState? state, CustomResourceOptions? opts = null)
public static Task get(String name, Output<String> id, TaskState state, CustomResourceOptions options)
resources: _: type: snowflake:Task get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Abort
Detached boolQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- Afters List<string>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- Autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- Binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- Binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- Client
Memory intLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- Client
Metadata boolRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- Client
Prefetch intThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- Client
Result intChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- Client
Result boolColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- Client
Session boolKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- Client
Session intKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- Client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- Comment string
- Specifies a comment for the task.
- Config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- Database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- Date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- Enable
Unload boolPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- Error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - Error
On boolNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- Error
On boolNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- Finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- Geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- Geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- Jdbc
Treat boolTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- Jdbc
Use boolSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- Json
Indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- Lock
Timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- Log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- Multi
Statement intCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- Name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Noorder
Sequence boolAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- Odbc
Treat boolDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- Parameters
List<Task
Parameter> - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - Query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- Quoted
Identifiers boolIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- Rows
Per intResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- S3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- Schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - Schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- Show
Outputs List<TaskShow Output> - Outputs the result of
SHOW TASKS
for the given task. - Sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- Started bool
- Specifies if the task should be started or suspended.
- Statement
Queued intTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- Statement
Timeout intIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- Strict
Json boolOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- Suspend
Task intAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- Task
Auto intRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- Time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- Time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- Timestamp
Day boolIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- Timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- Timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- Timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- Timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- Timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- Timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- Timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- Trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- Transaction
Abort boolOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- Transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- Two
Digit intCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - Unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- Use
Cached boolResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- User
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- User
Task intMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- User
Task intTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- Warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- Week
Of intYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - Week
Start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - When string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- Abort
Detached boolQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- Afters []string
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- Autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- Binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- Binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- Client
Memory intLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- Client
Metadata boolRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- Client
Prefetch intThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- Client
Result intChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- Client
Result boolColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- Client
Session boolKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- Client
Session intKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- Client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- Comment string
- Specifies a comment for the task.
- Config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- Database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- Date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- Enable
Unload boolPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- Error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - Error
On boolNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- Error
On boolNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- Finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- Geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- Geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- Jdbc
Treat boolTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- Jdbc
Use boolSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- Json
Indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- Lock
Timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- Log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- Multi
Statement intCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- Name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Noorder
Sequence boolAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- Odbc
Treat boolDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- Parameters
[]Task
Parameter Args - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - Query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- Quoted
Identifiers boolIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- Rows
Per intResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- S3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- Schedule
Task
Schedule Args - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - Schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - Search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- Show
Outputs []TaskShow Output Args - Outputs the result of
SHOW TASKS
for the given task. - Sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- Started bool
- Specifies if the task should be started or suspended.
- Statement
Queued intTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- Statement
Timeout intIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- Strict
Json boolOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- Suspend
Task intAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- Task
Auto intRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- Time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- Time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- Timestamp
Day boolIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- Timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- Timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- Timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- Timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- Timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- Timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- Timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- Trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- Transaction
Abort boolOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- Transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- Two
Digit intCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - Unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- Use
Cached boolResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- User
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- User
Task intMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- User
Task intTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- Warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- Week
Of intYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - Week
Start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - When string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- abort
Detached BooleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters List<String>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping StringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit Boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input StringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output StringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory IntegerLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata BooleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch IntegerThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result IntegerChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result BooleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session BooleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session IntegerKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp StringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment String
- Specifies a comment for the task.
- config String
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- database String
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - date
Input StringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output StringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload BooleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration String - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On BooleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On BooleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize_ String
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - fully
Qualified StringName - Fully qualified name of the resource. For more information, see object name resolution.
- geography
Output StringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output StringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat BooleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use BooleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent Integer - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout Integer - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level String - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement IntegerCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name String
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence BooleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat BooleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- parameters
List<Task
Parameter> - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - query
Tag String - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers BooleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per IntegerResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce StringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - schema String
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - search
Path String - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- show
Outputs List<TaskShow Output> - Outputs the result of
SHOW TASKS
for the given task. - sql
Statement String - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started Boolean
- Specifies if the task should be started or suspended.
- statement
Queued IntegerTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout IntegerIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json BooleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task IntegerAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto IntegerRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input StringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output StringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day BooleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input StringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz StringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz StringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output StringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type StringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz StringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone String
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level String - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort BooleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default StringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit IntegerCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl StringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached BooleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task StringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task IntegerMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task IntegerTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse String
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of IntegerYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start Integer - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when String
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- abort
Detached booleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters string[]
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping stringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input stringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output stringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory numberLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata booleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch numberThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result numberChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result booleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session booleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session numberKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp stringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment string
- Specifies a comment for the task.
- config string
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- database string
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - date
Input stringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output stringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload booleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration string - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On booleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On booleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize string
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - fully
Qualified stringName - Fully qualified name of the resource. For more information, see object name resolution.
- geography
Output stringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output stringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat booleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use booleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent number - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout number - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level string - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement numberCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name string
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence booleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat booleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- parameters
Task
Parameter[] - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - query
Tag string - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers booleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per numberResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce stringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - schema string
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - search
Path string - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- show
Outputs TaskShow Output[] - Outputs the result of
SHOW TASKS
for the given task. - sql
Statement string - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started boolean
- Specifies if the task should be started or suspended.
- statement
Queued numberTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout numberIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json booleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task numberAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto numberRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input stringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output stringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day booleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input stringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz stringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz stringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output stringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type stringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz stringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone string
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level string - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort booleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default stringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit numberCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl stringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached booleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task stringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task numberMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task numberTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse string
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of numberYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start number - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when string
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- abort_
detached_ boolquery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters Sequence[str]
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow_
overlapping_ strexecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit bool
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary_
input_ strformat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary_
output_ strformat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client_
memory_ intlimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client_
metadata_ boolrequest_ use_ connection_ ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client_
prefetch_ intthreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client_
result_ intchunk_ size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client_
result_ boolcolumn_ case_ insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client_
session_ boolkeep_ alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client_
session_ intkeep_ alive_ heartbeat_ frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client_
timestamp_ strtype_ mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment str
- Specifies a comment for the task.
- config str
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- database str
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - date_
input_ strformat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date_
output_ strformat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable_
unload_ boolphysical_ type_ optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error_
integration str - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error_
on_ boolnondeterministic_ merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error_
on_ boolnondeterministic_ update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize str
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - fully_
qualified_ strname - Fully qualified name of the resource. For more information, see object name resolution.
- geography_
output_ strformat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry_
output_ strformat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc_
treat_ booltimestamp_ ntz_ as_ utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc_
use_ boolsession_ timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json_
indent int - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock_
timeout int - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log_
level str - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi_
statement_ intcount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name str
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder_
sequence_ boolas_ default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc_
treat_ booldecimal_ as_ int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- parameters
Sequence[Task
Parameter Args] - Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - query_
tag str - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted_
identifiers_ boolignore_ case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows_
per_ intresultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3_
stage_ strvpce_ dns_ name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule
Task
Schedule Args - The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - schema str
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - search_
path str - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- show_
outputs Sequence[TaskShow Output Args] - Outputs the result of
SHOW TASKS
for the given task. - sql_
statement str - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started bool
- Specifies if the task should be started or suspended.
- statement_
queued_ inttimeout_ in_ seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement_
timeout_ intin_ seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict_
json_ booloutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend_
task_ intafter_ num_ failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task_
auto_ intretry_ attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time_
input_ strformat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time_
output_ strformat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp_
day_ boolis_ always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp_
input_ strformat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp_
ltz_ stroutput_ format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp_
ntz_ stroutput_ format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp_
output_ strformat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp_
type_ strmapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp_
tz_ stroutput_ format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone str
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace_
level str - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction_
abort_ boolon_ error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction_
default_ strisolation_ level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two_
digit_ intcentury_ start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported_
ddl_ straction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use_
cached_ boolresult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user_
task_ strmanaged_ initial_ warehouse_ size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user_
task_ intminimum_ trigger_ interval_ in_ seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user_
task_ inttimeout_ ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse str
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week_
of_ intyear_ policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week_
start int - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when str
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
- abort
Detached BooleanQuery - Specifies the action that Snowflake performs for in-progress queries if connectivity is lost due to abrupt termination of a session (e.g. network outage, browser termination, service interruption). For more information, check ABORTDETACHEDQUERY docs.
- afters List<String>
- Specifies one or more predecessor tasks for the current task. Use this option to create a DAG of tasks or add this task to an existing DAG. A DAG is a series of tasks that starts with a scheduled root task and is linked together by dependencies. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - allow
Overlapping StringExecution - By default, Snowflake ensures that only one instance of a particular DAG is allowed to run at a time, setting the parameter value to TRUE permits DAG runs to overlap. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value.
- autocommit Boolean
- Specifies whether autocommit is enabled for the session. Autocommit determines whether a DML statement, when executed without an active transaction, is automatically committed after the statement successfully completes. For more information, see Transactions. For more information, check AUTOCOMMIT docs.
- binary
Input StringFormat - The format of VARCHAR values passed as input to VARCHAR-to-BINARY conversion functions. For more information, see Binary input and output. For more information, check BINARYINPUTFORMAT docs.
- binary
Output StringFormat - The format for VARCHAR values returned as output by BINARY-to-VARCHAR conversion functions. For more information, see Binary input and output. For more information, check BINARYOUTPUTFORMAT docs.
- client
Memory NumberLimit - Parameter that specifies the maximum amount of memory the JDBC driver or ODBC driver should use for the result set from queries (in MB). For more information, check CLIENTMEMORYLIMIT docs.
- client
Metadata BooleanRequest Use Connection Ctx - For specific ODBC functions and JDBC methods, this parameter can change the default search scope from all databases/schemas to the current database/schema. The narrower search typically returns fewer rows and executes more quickly. For more information, check CLIENTMETADATAREQUESTUSECONNECTION_CTX docs.
- client
Prefetch NumberThreads - Parameter that specifies the number of threads used by the client to pre-fetch large result sets. The driver will attempt to honor the parameter value, but defines the minimum and maximum values (depending on your system’s resources) to improve performance. For more information, check CLIENTPREFETCHTHREADS docs.
- client
Result NumberChunk Size - Parameter that specifies the maximum size of each set (or chunk) of query results to download (in MB). The JDBC driver downloads query results in chunks. For more information, check CLIENTRESULTCHUNK_SIZE docs.
- client
Result BooleanColumn Case Insensitive - Parameter that indicates whether to match column name case-insensitively in ResultSet.get* methods in JDBC. For more information, check CLIENTRESULTCOLUMNCASEINSENSITIVE docs.
- client
Session BooleanKeep Alive - Parameter that indicates whether to force a user to log in again after a period of inactivity in the session. For more information, check CLIENTSESSIONKEEP_ALIVE docs.
- client
Session NumberKeep Alive Heartbeat Frequency - Number of seconds in-between client attempts to update the token for the session. For more information, check CLIENTSESSIONKEEPALIVEHEARTBEAT_FREQUENCY docs.
- client
Timestamp StringType Mapping - Specifies the TIMESTAMP_* variation to use when binding timestamp variables for JDBC or ODBC applications that use the bind API to load data. For more information, check CLIENTTIMESTAMPTYPE_MAPPING docs.
- comment String
- Specifies a comment for the task.
- config String
- Specifies a string representation of key value pairs that can be accessed by all tasks in the task graph. Must be in JSON format.
- database String
- The database in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - date
Input StringFormat - Specifies the input format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEINPUTFORMAT docs.
- date
Output StringFormat - Specifies the display format for the DATE data type. For more information, see Date and time input and output formats. For more information, check DATEOUTPUTFORMAT docs.
- enable
Unload BooleanPhysical Type Optimization - Specifies whether to set the schema for unloaded Parquet files based on the logical column data types (i.e. the types in the unload SQL query or source table) or on the unloaded column values (i.e. the smallest data types and precision that support the values in the output columns of the unload SQL statement or source table). For more information, check ENABLEUNLOADPHYSICALTYPEOPTIMIZATION docs.
- error
Integration String - Specifies the name of the notification integration used for error notifications. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. For more information about this resource, see docs. - error
On BooleanNondeterministic Merge - Specifies whether to return an error when the MERGE command is used to update or delete a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_MERGE docs.
- error
On BooleanNondeterministic Update - Specifies whether to return an error when the UPDATE command is used to update a target row that joins multiple source rows and the system cannot determine the action to perform on the target row. For more information, check ERRORONNONDETERMINISTIC_UPDATE docs.
- finalize String
- Specifies the name of a root task that the finalizer task is associated with. Finalizer tasks run after all other tasks in the task graph run to completion. You can define the SQL of a finalizer task to handle notifications and the release and cleanup of resources that a task graph uses. For more information, see Release and cleanup of task graphs. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - fully
Qualified StringName - Fully qualified name of the resource. For more information, see object name resolution.
- geography
Output StringFormat - Display format for GEOGRAPHY values. For more information, check GEOGRAPHYOUTPUTFORMAT docs.
- geometry
Output StringFormat - Display format for GEOMETRY values. For more information, check GEOMETRYOUTPUTFORMAT docs.
- jdbc
Treat BooleanTimestamp Ntz As Utc - Specifies how JDBC processes TIMESTAMPNTZ values. For more information, check TREATTIMESTAMPNTZASUTC docsJDBC.
- jdbc
Use BooleanSession Timezone - Specifies whether the JDBC Driver uses the time zone of the JVM or the time zone of the session (specified by the TIMEZONE parameter) for the getDate(), getTime(), and getTimestamp() methods of the ResultSet class. For more information, check JDBCUSESESSION_TIMEZONE docs.
- json
Indent Number - Specifies the number of blank spaces to indent each new element in JSON output in the session. Also specifies whether to insert newline characters after each element. For more information, check JSON_INDENT docs.
- lock
Timeout Number - Number of seconds to wait while trying to lock a resource, before timing out and aborting the statement. For more information, check LOCK_TIMEOUT docs.
- log
Level String - Specifies the severity level of messages that should be ingested and made available in the active event table. Messages at the specified level (and at more severe levels) are ingested. For more information about log levels, see Setting log level. For more information, check LOG_LEVEL docs.
- multi
Statement NumberCount - Number of statements to execute when using the multi-statement capability. For more information, check MULTISTATEMENTCOUNT docs.
- name String
- Specifies the identifier for the task; must be unique for the database and schema in which the task is created. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - noorder
Sequence BooleanAs Default - Specifies whether the ORDER or NOORDER property is set by default when you create a new sequence or add a new table column. The ORDER and NOORDER properties determine whether or not the values are generated for the sequence or auto-incremented column in increasing or decreasing order. For more information, check NOORDERSEQUENCEAS_DEFAULT docs.
- odbc
Treat BooleanDecimal As Int - Specifies how ODBC processes columns that have a scale of zero (0). For more information, check ODBCTREATDECIMALASINT docs.
- parameters List<Property Map>
- Outputs the result of
SHOW PARAMETERS IN TASK
for the given task. - query
Tag String - Optional string that can be used to tag queries and other SQL statements executed within a session. The tags are displayed in the output of the QUERYHISTORY, QUERYHISTORY*BY** functions. For more information, check QUERY_TAG docs.
- quoted
Identifiers BooleanIgnore Case - Specifies whether letters in double-quoted object identifiers are stored and resolved as uppercase letters. By default, Snowflake preserves the case of alphabetic characters when storing and resolving double-quoted identifiers (see Identifier resolution). You can use this parameter in situations in which third-party applications always use double quotes around identifiers. For more information, check QUOTEDIDENTIFIERSIGNORE_CASE docs.
- rows
Per NumberResultset - Specifies the maximum number of rows returned in a result set. A value of 0 specifies no maximum. For more information, check ROWSPERRESULTSET docs.
- s3Stage
Vpce StringDns Name - Specifies the DNS name of an Amazon S3 interface endpoint. Requests sent to the internal stage of an account via AWS PrivateLink for Amazon S3 use this endpoint to connect. For more information, see Accessing Internal stages with dedicated interface endpoints. For more information, check S3STAGEVPCEDNSNAME docs.
- schedule Property Map
- The schedule for periodically running the task. This can be a cron or interval in minutes. (Conflicts with finalize and after; when set, one of the sub-fields
minutes
orusing_cron
should be set) - schema String
- The schema in which to create the task. Due to technical limitations (read more here), avoid using the following characters:
|
,.
,"
. - search
Path String - Specifies the path to search to resolve unqualified object names in queries. For more information, see Name resolution in queries. Comma-separated list of identifiers. An identifier can be a fully or partially qualified schema name. For more information, check SEARCH_PATH docs.
- show
Outputs List<Property Map> - Outputs the result of
SHOW TASKS
for the given task. - sql
Statement String - Any single SQL statement, or a call to a stored procedure, executed when the task runs.
- started Boolean
- Specifies if the task should be started or suspended.
- statement
Queued NumberTimeout In Seconds - Amount of time, in seconds, a SQL statement (query, DDL, DML, etc.) remains queued for a warehouse before it is canceled by the system. This parameter can be used in conjunction with the MAXCONCURRENCYLEVEL parameter to ensure a warehouse is never backlogged. For more information, check STATEMENTQUEUEDTIMEOUTINSECONDS docs.
- statement
Timeout NumberIn Seconds - Amount of time, in seconds, after which a running SQL statement (query, DDL, DML, etc.) is canceled by the system. For more information, check STATEMENTTIMEOUTIN_SECONDS docs.
- strict
Json BooleanOutput - This parameter specifies whether JSON output in a session is compatible with the general standard (as described by http://json.org). By design, Snowflake allows JSON input that contains non-standard values; however, these non-standard values might result in Snowflake outputting JSON that is incompatible with other platforms and languages. This parameter, when enabled, ensures that Snowflake outputs valid/compatible JSON. For more information, check STRICTJSONOUTPUT docs.
- suspend
Task NumberAfter Num Failures - Specifies the number of consecutive failed task runs after which the current task is suspended automatically. The default is 0 (no automatic suspension). For more information, check SUSPENDTASKAFTERNUMFAILURES docs.
- task
Auto NumberRetry Attempts - Specifies the number of automatic task graph retry attempts. If any task graphs complete in a FAILED state, Snowflake can automatically retry the task graphs from the last task in the graph that failed. For more information, check TASKAUTORETRY_ATTEMPTS docs.
- time
Input StringFormat - Specifies the input format for the TIME data type. For more information, see Date and time input and output formats. Any valid, supported time format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of times stored in the system during the session). For more information, check TIMEINPUTFORMAT docs.
- time
Output StringFormat - Specifies the display format for the TIME data type. For more information, see Date and time input and output formats. For more information, check TIMEOUTPUTFORMAT docs.
- timestamp
Day BooleanIs Always24h - Specifies whether the DATEADD function (and its aliases) always consider a day to be exactly 24 hours for expressions that span multiple days. For more information, check TIMESTAMPDAYISALWAYS24H docs.
- timestamp
Input StringFormat - Specifies the input format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. Any valid, supported timestamp format or AUTO (AUTO specifies that Snowflake attempts to automatically detect the format of timestamps stored in the system during the session). For more information, check TIMESTAMPINPUTFORMAT docs.
- timestamp
Ltz StringOutput Format - Specifies the display format for the TIMESTAMPLTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPLTZOUTPUT*FORMAT docs.
- timestamp
Ntz StringOutput Format - Specifies the display format for the TIMESTAMPNTZ data type. For more information, check NTZOUTPUTFORMAT docsTIMESTAMP.
- timestamp
Output StringFormat - Specifies the display format for the TIMESTAMP data type alias. For more information, see Date and time input and output formats. For more information, check TIMESTAMPOUTPUTFORMAT docs.
- timestamp
Type StringMapping - Specifies the TIMESTAMP** variation that the TIMESTAMP data type alias maps to. For more information, check TIMESTAMP*TYPE_MAPPING docs.
- timestamp
Tz StringOutput Format - Specifies the display format for the TIMESTAMPTZ data type. If no format is specified, defaults to OUTPUT*FORMATTIMESTAMP. For more information, see Date and time input and output formats. For more information, check TIMESTAMPTZOUTPUT*FORMAT docs.
- timezone String
- Specifies the time zone for the session. You can specify a time zone name or a link name from release 2021a of the IANA Time Zone Database (e.g. America/Los_Angeles, Europe/London, UTC, Etc/GMT, etc.). For more information, check TIMEZONE docs.
- trace
Level String - Controls how trace events are ingested into the event table. For more information about trace levels, see Setting trace level. For more information, check TRACE_LEVEL docs.
- transaction
Abort BooleanOn Error - Specifies the action to perform when a statement issued within a non-autocommit transaction returns with an error. For more information, check TRANSACTIONABORTON_ERROR docs.
- transaction
Default StringIsolation Level - Specifies the isolation level for transactions in the user session. For more information, check TRANSACTIONDEFAULTISOLATION_LEVEL docs.
- two
Digit NumberCentury Start - Specifies the “century start” year for 2-digit years (i.e. the earliest year such dates can represent). This parameter prevents ambiguous dates when importing or converting data with the
YY
date format component (i.e. years represented as 2 digits). For more information, check TWODIGITCENTURY_START docs. - unsupported
Ddl StringAction - Determines if an unsupported (i.e. non-default) value specified for a constraint property returns an error. For more information, check UNSUPPORTEDDDLACTION docs.
- use
Cached BooleanResult - Specifies whether to reuse persisted query results, if available, when a matching query is submitted. For more information, check USECACHEDRESULT docs.
- user
Task StringManaged Initial Warehouse Size - Specifies the size of the compute resources to provision for the first run of the task, before a task history is available for Snowflake to determine an ideal size. Once a task has successfully completed a few runs, Snowflake ignores this parameter setting. Valid values are (case-insensitive): %s. (Conflicts with warehouse). For more information about warehouses, see docs. For more information, check USERTASKMANAGEDINITIALWAREHOUSE_SIZE docs.
- user
Task NumberMinimum Trigger Interval In Seconds - Minimum amount of time between Triggered Task executions in seconds For more information, check USERTASKMINIMUMTRIGGERINTERVALINSECONDS docs.
- user
Task NumberTimeout Ms - Specifies the time limit on a single run of the task before it times out (in milliseconds). For more information, check USERTASKTIMEOUT_MS docs.
- warehouse String
- The warehouse the task will use. Omit this parameter to use Snowflake-managed compute resources for runs of this task. Due to Snowflake limitations warehouse identifier can consist of only upper-cased letters. (Conflicts with usertaskmanagedinitialwarehouse_size) For more information about this resource, see docs.
- week
Of NumberYear Policy - Specifies how the weeks in a given year are computed.
0
: The semantics used are equivalent to the ISO semantics, in which a week belongs to a given year if at least 4 days of that week are in that year.1
: January 1 is included in the first week of the year and December 31 is included in the last week of the year. For more information, check WEEKOFYEAR_POLICY docs. - week
Start Number - Specifies the first day of the week (used by week-related date functions).
0
: Legacy Snowflake behavior is used (i.e. ISO-like semantics).1
(Monday) to7
(Sunday): All the week-related functions use weeks that start on the specified day of the week. For more information, check WEEK_START docs. - when String
- Specifies a Boolean SQL expression; multiple conditions joined with AND/OR are supported. When a task is triggered (based on its SCHEDULE or AFTER setting), it validates the conditions of the expression to determine whether to execute. If the conditions of the expression are not met, then the task skips the current run. Any tasks that identify this task as a predecessor also don’t run.
Supporting Types
TaskParameter, TaskParameterArgs
- Abort
Detached List<TaskQueries Parameter Abort Detached Query> - Autocommits
List<Task
Parameter Autocommit> - Binary
Input List<TaskFormats Parameter Binary Input Format> - Binary
Output List<TaskFormats Parameter Binary Output Format> - Client
Memory List<TaskLimits Parameter Client Memory Limit> - Client
Metadata List<TaskRequest Use Connection Ctxes Parameter Client Metadata Request Use Connection Ctx> - Client
Prefetch List<TaskThreads Parameter Client Prefetch Thread> - Client
Result List<TaskChunk Sizes Parameter Client Result Chunk Size> - Client
Result List<TaskColumn Case Insensitives Parameter Client Result Column Case Insensitive> - Client
Session List<TaskKeep Alive Heartbeat Frequencies Parameter Client Session Keep Alive Heartbeat Frequency> - Client
Session List<TaskKeep Alives Parameter Client Session Keep Alife> - Client
Timestamp List<TaskType Mappings Parameter Client Timestamp Type Mapping> - Date
Input List<TaskFormats Parameter Date Input Format> - Date
Output List<TaskFormats Parameter Date Output Format> - Enable
Unload List<TaskPhysical Type Optimizations Parameter Enable Unload Physical Type Optimization> - Error
On List<TaskNondeterministic Merges Parameter Error On Nondeterministic Merge> - Error
On List<TaskNondeterministic Updates Parameter Error On Nondeterministic Update> - Geography
Output List<TaskFormats Parameter Geography Output Format> - Geometry
Output List<TaskFormats Parameter Geometry Output Format> - Jdbc
Treat List<TaskTimestamp Ntz As Utcs Parameter Jdbc Treat Timestamp Ntz As Utc> - Jdbc
Use List<TaskSession Timezones Parameter Jdbc Use Session Timezone> - Json
Indents List<TaskParameter Json Indent> - Lock
Timeouts List<TaskParameter Lock Timeout> - Log
Levels List<TaskParameter Log Level> - Multi
Statement List<TaskCounts Parameter Multi Statement Count> - Noorder
Sequence List<TaskAs Defaults Parameter Noorder Sequence As Default> - Odbc
Treat List<TaskDecimal As Ints Parameter Odbc Treat Decimal As Int> - List<Task
Parameter Query Tag> - Quoted
Identifiers List<TaskIgnore Cases Parameter Quoted Identifiers Ignore Case> - Rows
Per List<TaskResultsets Parameter Rows Per Resultset> - S3Stage
Vpce List<TaskDns Names Parameter S3Stage Vpce Dns Name> - Search
Paths List<TaskParameter Search Path> - Statement
Queued List<TaskTimeout In Seconds Parameter Statement Queued Timeout In Second> - Statement
Timeout List<TaskIn Seconds Parameter Statement Timeout In Second> - Strict
Json List<TaskOutputs Parameter Strict Json Output> - Suspend
Task List<TaskAfter Num Failures Parameter Suspend Task After Num Failure> - Task
Auto List<TaskRetry Attempts Parameter Task Auto Retry Attempt> - Time
Input List<TaskFormats Parameter Time Input Format> - Time
Output List<TaskFormats Parameter Time Output Format> - Timestamp
Day List<TaskIs Always24hs Parameter Timestamp Day Is Always24h> - Timestamp
Input List<TaskFormats Parameter Timestamp Input Format> - Timestamp
Ltz List<TaskOutput Formats Parameter Timestamp Ltz Output Format> - Timestamp
Ntz List<TaskOutput Formats Parameter Timestamp Ntz Output Format> - Timestamp
Output List<TaskFormats Parameter Timestamp Output Format> - Timestamp
Type List<TaskMappings Parameter Timestamp Type Mapping> - Timestamp
Tz List<TaskOutput Formats Parameter Timestamp Tz Output Format> - Timezones
List<Task
Parameter Timezone> - Trace
Levels List<TaskParameter Trace Level> - Transaction
Abort List<TaskOn Errors Parameter Transaction Abort On Error> - Transaction
Default List<TaskIsolation Levels Parameter Transaction Default Isolation Level> - Two
Digit List<TaskCentury Starts Parameter Two Digit Century Start> - Unsupported
Ddl List<TaskActions Parameter Unsupported Ddl Action> - Use
Cached List<TaskResults Parameter Use Cached Result> - User
Task List<TaskManaged Initial Warehouse Sizes Parameter User Task Managed Initial Warehouse Size> - User
Task List<TaskMinimum Trigger Interval In Seconds Parameter User Task Minimum Trigger Interval In Second> - User
Task List<TaskTimeout Ms Parameter User Task Timeout M> - Week
Of List<TaskYear Policies Parameter Week Of Year Policy> - Week
Starts List<TaskParameter Week Start>
- Abort
Detached []TaskQueries Parameter Abort Detached Query - Autocommits
[]Task
Parameter Autocommit - Binary
Input []TaskFormats Parameter Binary Input Format - Binary
Output []TaskFormats Parameter Binary Output Format - Client
Memory []TaskLimits Parameter Client Memory Limit - Client
Metadata []TaskRequest Use Connection Ctxes Parameter Client Metadata Request Use Connection Ctx - Client
Prefetch []TaskThreads Parameter Client Prefetch Thread - Client
Result []TaskChunk Sizes Parameter Client Result Chunk Size - Client
Result []TaskColumn Case Insensitives Parameter Client Result Column Case Insensitive - Client
Session []TaskKeep Alive Heartbeat Frequencies Parameter Client Session Keep Alive Heartbeat Frequency - Client
Session []TaskKeep Alives Parameter Client Session Keep Alife - Client
Timestamp []TaskType Mappings Parameter Client Timestamp Type Mapping - Date
Input []TaskFormats Parameter Date Input Format - Date
Output []TaskFormats Parameter Date Output Format - Enable
Unload []TaskPhysical Type Optimizations Parameter Enable Unload Physical Type Optimization - Error
On []TaskNondeterministic Merges Parameter Error On Nondeterministic Merge - Error
On []TaskNondeterministic Updates Parameter Error On Nondeterministic Update - Geography
Output []TaskFormats Parameter Geography Output Format - Geometry
Output []TaskFormats Parameter Geometry Output Format - Jdbc
Treat []TaskTimestamp Ntz As Utcs Parameter Jdbc Treat Timestamp Ntz As Utc - Jdbc
Use []TaskSession Timezones Parameter Jdbc Use Session Timezone - Json
Indents []TaskParameter Json Indent - Lock
Timeouts []TaskParameter Lock Timeout - Log
Levels []TaskParameter Log Level - Multi
Statement []TaskCounts Parameter Multi Statement Count - Noorder
Sequence []TaskAs Defaults Parameter Noorder Sequence As Default - Odbc
Treat []TaskDecimal As Ints Parameter Odbc Treat Decimal As Int - []Task
Parameter Query Tag - Quoted
Identifiers []TaskIgnore Cases Parameter Quoted Identifiers Ignore Case - Rows
Per []TaskResultsets Parameter Rows Per Resultset - S3Stage
Vpce []TaskDns Names Parameter S3Stage Vpce Dns Name - Search
Paths []TaskParameter Search Path - Statement
Queued []TaskTimeout In Seconds Parameter Statement Queued Timeout In Second - Statement
Timeout []TaskIn Seconds Parameter Statement Timeout In Second - Strict
Json []TaskOutputs Parameter Strict Json Output - Suspend
Task []TaskAfter Num Failures Parameter Suspend Task After Num Failure - Task
Auto []TaskRetry Attempts Parameter Task Auto Retry Attempt - Time
Input []TaskFormats Parameter Time Input Format - Time
Output []TaskFormats Parameter Time Output Format - Timestamp
Day []TaskIs Always24hs Parameter Timestamp Day Is Always24h - Timestamp
Input []TaskFormats Parameter Timestamp Input Format - Timestamp
Ltz []TaskOutput Formats Parameter Timestamp Ltz Output Format - Timestamp
Ntz []TaskOutput Formats Parameter Timestamp Ntz Output Format - Timestamp
Output []TaskFormats Parameter Timestamp Output Format - Timestamp
Type []TaskMappings Parameter Timestamp Type Mapping - Timestamp
Tz []TaskOutput Formats Parameter Timestamp Tz Output Format - Timezones
[]Task
Parameter Timezone - Trace
Levels []TaskParameter Trace Level - Transaction
Abort []TaskOn Errors Parameter Transaction Abort On Error - Transaction
Default []TaskIsolation Levels Parameter Transaction Default Isolation Level - Two
Digit []TaskCentury Starts Parameter Two Digit Century Start - Unsupported
Ddl []TaskActions Parameter Unsupported Ddl Action - Use
Cached []TaskResults Parameter Use Cached Result - User
Task []TaskManaged Initial Warehouse Sizes Parameter User Task Managed Initial Warehouse Size - User
Task []TaskMinimum Trigger Interval In Seconds Parameter User Task Minimum Trigger Interval In Second - User
Task []TaskTimeout Ms Parameter User Task Timeout M - Week
Of []TaskYear Policies Parameter Week Of Year Policy - Week
Starts []TaskParameter Week Start
- abort
Detached List<TaskQueries Parameter Abort Detached Query> - autocommits
List<Task
Parameter Autocommit> - binary
Input List<TaskFormats Parameter Binary Input Format> - binary
Output List<TaskFormats Parameter Binary Output Format> - client
Memory List<TaskLimits Parameter Client Memory Limit> - client
Metadata List<TaskRequest Use Connection Ctxes Parameter Client Metadata Request Use Connection Ctx> - client
Prefetch List<TaskThreads Parameter Client Prefetch Thread> - client
Result List<TaskChunk Sizes Parameter Client Result Chunk Size> - client
Result List<TaskColumn Case Insensitives Parameter Client Result Column Case Insensitive> - client
Session List<TaskKeep Alive Heartbeat Frequencies Parameter Client Session Keep Alive Heartbeat Frequency> - client
Session List<TaskKeep Alives Parameter Client Session Keep Alife> - client
Timestamp List<TaskType Mappings Parameter Client Timestamp Type Mapping> - date
Input List<TaskFormats Parameter Date Input Format> - date
Output List<TaskFormats Parameter Date Output Format> - enable
Unload List<TaskPhysical Type Optimizations Parameter Enable Unload Physical Type Optimization> - error
On List<TaskNondeterministic Merges Parameter Error On Nondeterministic Merge> - error
On List<TaskNondeterministic Updates Parameter Error On Nondeterministic Update> - geography
Output List<TaskFormats Parameter Geography Output Format> - geometry
Output List<TaskFormats Parameter Geometry Output Format> - jdbc
Treat List<TaskTimestamp Ntz As Utcs Parameter Jdbc Treat Timestamp Ntz As Utc> - jdbc
Use List<TaskSession Timezones Parameter Jdbc Use Session Timezone> - json
Indents List<TaskParameter Json Indent> - lock
Timeouts List<TaskParameter Lock Timeout> - log
Levels List<TaskParameter Log Level> - multi
Statement List<TaskCounts Parameter Multi Statement Count> - noorder
Sequence List<TaskAs Defaults Parameter Noorder Sequence As Default> - odbc
Treat List<TaskDecimal As Ints Parameter Odbc Treat Decimal As Int> - List<Task
Parameter Query Tag> - quoted
Identifiers List<TaskIgnore Cases Parameter Quoted Identifiers Ignore Case> - rows
Per List<TaskResultsets Parameter Rows Per Resultset> - s3Stage
Vpce List<TaskDns Names Parameter S3Stage Vpce Dns Name> - search
Paths List<TaskParameter Search Path> - statement
Queued List<TaskTimeout In Seconds Parameter Statement Queued Timeout In Second> - statement
Timeout List<TaskIn Seconds Parameter Statement Timeout In Second> - strict
Json List<TaskOutputs Parameter Strict Json Output> - suspend
Task List<TaskAfter Num Failures Parameter Suspend Task After Num Failure> - task
Auto List<TaskRetry Attempts Parameter Task Auto Retry Attempt> - time
Input List<TaskFormats Parameter Time Input Format> - time
Output List<TaskFormats Parameter Time Output Format> - timestamp
Day List<TaskIs Always24hs Parameter Timestamp Day Is Always24h> - timestamp
Input List<TaskFormats Parameter Timestamp Input Format> - timestamp
Ltz List<TaskOutput Formats Parameter Timestamp Ltz Output Format> - timestamp
Ntz List<TaskOutput Formats Parameter Timestamp Ntz Output Format> - timestamp
Output List<TaskFormats Parameter Timestamp Output Format> - timestamp
Type List<TaskMappings Parameter Timestamp Type Mapping> - timestamp
Tz List<TaskOutput Formats Parameter Timestamp Tz Output Format> - timezones
List<Task
Parameter Timezone> - trace
Levels List<TaskParameter Trace Level> - transaction
Abort List<TaskOn Errors Parameter Transaction Abort On Error> - transaction
Default List<TaskIsolation Levels Parameter Transaction Default Isolation Level> - two
Digit List<TaskCentury Starts Parameter Two Digit Century Start> - unsupported
Ddl List<TaskActions Parameter Unsupported Ddl Action> - use
Cached List<TaskResults Parameter Use Cached Result> - user
Task List<TaskManaged Initial Warehouse Sizes Parameter User Task Managed Initial Warehouse Size> - user
Task List<TaskMinimum Trigger Interval In Seconds Parameter User Task Minimum Trigger Interval In Second> - user
Task List<TaskTimeout Ms Parameter User Task Timeout M> - week
Of List<TaskYear Policies Parameter Week Of Year Policy> - week
Starts List<TaskParameter Week Start>
- abort
Detached TaskQueries Parameter Abort Detached Query[] - autocommits
Task
Parameter Autocommit[] - binary
Input TaskFormats Parameter Binary Input Format[] - binary
Output TaskFormats Parameter Binary Output Format[] - client
Memory TaskLimits Parameter Client Memory Limit[] - client
Metadata TaskRequest Use Connection Ctxes Parameter Client Metadata Request Use Connection Ctx[] - client
Prefetch TaskThreads Parameter Client Prefetch Thread[] - client
Result TaskChunk Sizes Parameter Client Result Chunk Size[] - client
Result TaskColumn Case Insensitives Parameter Client Result Column Case Insensitive[] - client
Session TaskKeep Alive Heartbeat Frequencies Parameter Client Session Keep Alive Heartbeat Frequency[] - client
Session TaskKeep Alives Parameter Client Session Keep Alife[] - client
Timestamp TaskType Mappings Parameter Client Timestamp Type Mapping[] - date
Input TaskFormats Parameter Date Input Format[] - date
Output TaskFormats Parameter Date Output Format[] - enable
Unload TaskPhysical Type Optimizations Parameter Enable Unload Physical Type Optimization[] - error
On TaskNondeterministic Merges Parameter Error On Nondeterministic Merge[] - error
On TaskNondeterministic Updates Parameter Error On Nondeterministic Update[] - geography
Output TaskFormats Parameter Geography Output Format[] - geometry
Output TaskFormats Parameter Geometry Output Format[] - jdbc
Treat TaskTimestamp Ntz As Utcs Parameter Jdbc Treat Timestamp Ntz As Utc[] - jdbc
Use TaskSession Timezones Parameter Jdbc Use Session Timezone[] - json
Indents TaskParameter Json Indent[] - lock
Timeouts TaskParameter Lock Timeout[] - log
Levels TaskParameter Log Level[] - multi
Statement TaskCounts Parameter Multi Statement Count[] - noorder
Sequence TaskAs Defaults Parameter Noorder Sequence As Default[] - odbc
Treat TaskDecimal As Ints Parameter Odbc Treat Decimal As Int[] - Task
Parameter Query Tag[] - quoted
Identifiers TaskIgnore Cases Parameter Quoted Identifiers Ignore Case[] - rows
Per TaskResultsets Parameter Rows Per Resultset[] - s3Stage
Vpce TaskDns Names Parameter S3Stage Vpce Dns Name[] - search
Paths TaskParameter Search Path[] - statement
Queued TaskTimeout In Seconds Parameter Statement Queued Timeout In Second[] - statement
Timeout TaskIn Seconds Parameter Statement Timeout In Second[] - strict
Json TaskOutputs Parameter Strict Json Output[] - suspend
Task TaskAfter Num Failures Parameter Suspend Task After Num Failure[] - task
Auto TaskRetry Attempts Parameter Task Auto Retry Attempt[] - time
Input TaskFormats Parameter Time Input Format[] - time
Output TaskFormats Parameter Time Output Format[] - timestamp
Day TaskIs Always24hs Parameter Timestamp Day Is Always24h[] - timestamp
Input TaskFormats Parameter Timestamp Input Format[] - timestamp
Ltz TaskOutput Formats Parameter Timestamp Ltz Output Format[] - timestamp
Ntz TaskOutput Formats Parameter Timestamp Ntz Output Format[] - timestamp
Output TaskFormats Parameter Timestamp Output Format[] - timestamp
Type TaskMappings Parameter Timestamp Type Mapping[] - timestamp
Tz TaskOutput Formats Parameter Timestamp Tz Output Format[] - timezones
Task
Parameter Timezone[] - trace
Levels TaskParameter Trace Level[] - transaction
Abort TaskOn Errors Parameter Transaction Abort On Error[] - transaction
Default TaskIsolation Levels Parameter Transaction Default Isolation Level[] - two
Digit TaskCentury Starts Parameter Two Digit Century Start[] - unsupported
Ddl TaskActions Parameter Unsupported Ddl Action[] - use
Cached TaskResults Parameter Use Cached Result[] - user
Task TaskManaged Initial Warehouse Sizes Parameter User Task Managed Initial Warehouse Size[] - user
Task TaskMinimum Trigger Interval In Seconds Parameter User Task Minimum Trigger Interval In Second[] - user
Task TaskTimeout Ms Parameter User Task Timeout M[] - week
Of TaskYear Policies Parameter Week Of Year Policy[] - week
Starts TaskParameter Week Start[]
- abort_
detached_ Sequence[Taskqueries Parameter Abort Detached Query] - autocommits
Sequence[Task
Parameter Autocommit] - binary_
input_ Sequence[Taskformats Parameter Binary Input Format] - binary_
output_ Sequence[Taskformats Parameter Binary Output Format] - client_
memory_ Sequence[Tasklimits Parameter Client Memory Limit] - client_
metadata_ Sequence[Taskrequest_ use_ connection_ ctxes Parameter Client Metadata Request Use Connection Ctx] - client_
prefetch_ Sequence[Taskthreads Parameter Client Prefetch Thread] - client_
result_ Sequence[Taskchunk_ sizes Parameter Client Result Chunk Size] - client_
result_ Sequence[Taskcolumn_ case_ insensitives Parameter Client Result Column Case Insensitive] - client_
session_ Sequence[Taskkeep_ alive_ heartbeat_ frequencies Parameter Client Session Keep Alive Heartbeat Frequency] - client_
session_ Sequence[Taskkeep_ alives Parameter Client Session Keep Alife] - client_
timestamp_ Sequence[Tasktype_ mappings Parameter Client Timestamp Type Mapping] - date_
input_ Sequence[Taskformats Parameter Date Input Format] - date_
output_ Sequence[Taskformats Parameter Date Output Format] - enable_
unload_ Sequence[Taskphysical_ type_ optimizations Parameter Enable Unload Physical Type Optimization] - error_
on_ Sequence[Tasknondeterministic_ merges Parameter Error On Nondeterministic Merge] - error_
on_ Sequence[Tasknondeterministic_ updates Parameter Error On Nondeterministic Update] - geography_
output_ Sequence[Taskformats Parameter Geography Output Format] - geometry_
output_ Sequence[Taskformats Parameter Geometry Output Format] - jdbc_
treat_ Sequence[Tasktimestamp_ ntz_ as_ utcs Parameter Jdbc Treat Timestamp Ntz As Utc] - jdbc_
use_ Sequence[Tasksession_ timezones Parameter Jdbc Use Session Timezone] - json_
indents Sequence[TaskParameter Json Indent] - lock_
timeouts Sequence[TaskParameter Lock Timeout] - log_
levels Sequence[TaskParameter Log Level] - multi_
statement_ Sequence[Taskcounts Parameter Multi Statement Count] - noorder_
sequence_ Sequence[Taskas_ defaults Parameter Noorder Sequence As Default] - odbc_
treat_ Sequence[Taskdecimal_ as_ ints Parameter Odbc Treat Decimal As Int] - Sequence[Task
Parameter Query Tag] - quoted_
identifiers_ Sequence[Taskignore_ cases Parameter Quoted Identifiers Ignore Case] - rows_
per_ Sequence[Taskresultsets Parameter Rows Per Resultset] - s3_
stage_ Sequence[Taskvpce_ dns_ names Parameter S3Stage Vpce Dns Name] - search_
paths Sequence[TaskParameter Search Path] - statement_
queued_ Sequence[Tasktimeout_ in_ seconds Parameter Statement Queued Timeout In Second] - statement_
timeout_ Sequence[Taskin_ seconds Parameter Statement Timeout In Second] - strict_
json_ Sequence[Taskoutputs Parameter Strict Json Output] - suspend_
task_ Sequence[Taskafter_ num_ failures Parameter Suspend Task After Num Failure] - task_
auto_ Sequence[Taskretry_ attempts Parameter Task Auto Retry Attempt] - time_
input_ Sequence[Taskformats Parameter Time Input Format] - time_
output_ Sequence[Taskformats Parameter Time Output Format] - timestamp_
day_ Sequence[Taskis_ always24hs Parameter Timestamp Day Is Always24h] - timestamp_
input_ Sequence[Taskformats Parameter Timestamp Input Format] - timestamp_
ltz_ Sequence[Taskoutput_ formats Parameter Timestamp Ltz Output Format] - timestamp_
ntz_ Sequence[Taskoutput_ formats Parameter Timestamp Ntz Output Format] - timestamp_
output_ Sequence[Taskformats Parameter Timestamp Output Format] - timestamp_
type_ Sequence[Taskmappings Parameter Timestamp Type Mapping] - timestamp_
tz_ Sequence[Taskoutput_ formats Parameter Timestamp Tz Output Format] - timezones
Sequence[Task
Parameter Timezone] - trace_
levels Sequence[TaskParameter Trace Level] - transaction_
abort_ Sequence[Taskon_ errors Parameter Transaction Abort On Error] - transaction_
default_ Sequence[Taskisolation_ levels Parameter Transaction Default Isolation Level] - two_
digit_ Sequence[Taskcentury_ starts Parameter Two Digit Century Start] - unsupported_
ddl_ Sequence[Taskactions Parameter Unsupported Ddl Action] - use_
cached_ Sequence[Taskresults Parameter Use Cached Result] - user_
task_ Sequence[Taskmanaged_ initial_ warehouse_ sizes Parameter User Task Managed Initial Warehouse Size] - user_
task_ Sequence[Taskminimum_ trigger_ interval_ in_ seconds Parameter User Task Minimum Trigger Interval In Second] - user_
task_ Sequence[Tasktimeout_ ms Parameter User Task Timeout M] - week_
of_ Sequence[Taskyear_ policies Parameter Week Of Year Policy] - week_
starts Sequence[TaskParameter Week Start]
- abort
Detached List<Property Map>Queries - autocommits List<Property Map>
- binary
Input List<Property Map>Formats - binary
Output List<Property Map>Formats - client
Memory List<Property Map>Limits - client
Metadata List<Property Map>Request Use Connection Ctxes - client
Prefetch List<Property Map>Threads - client
Result List<Property Map>Chunk Sizes - client
Result List<Property Map>Column Case Insensitives - client
Session List<Property Map>Keep Alive Heartbeat Frequencies - client
Session List<Property Map>Keep Alives - client
Timestamp List<Property Map>Type Mappings - date
Input List<Property Map>Formats - date
Output List<Property Map>Formats - enable
Unload List<Property Map>Physical Type Optimizations - error
On List<Property Map>Nondeterministic Merges - error
On List<Property Map>Nondeterministic Updates - geography
Output List<Property Map>Formats - geometry
Output List<Property Map>Formats - jdbc
Treat List<Property Map>Timestamp Ntz As Utcs - jdbc
Use List<Property Map>Session Timezones - json
Indents List<Property Map> - lock
Timeouts List<Property Map> - log
Levels List<Property Map> - multi
Statement List<Property Map>Counts - noorder
Sequence List<Property Map>As Defaults - odbc
Treat List<Property Map>Decimal As Ints - List<Property Map>
- quoted
Identifiers List<Property Map>Ignore Cases - rows
Per List<Property Map>Resultsets - s3Stage
Vpce List<Property Map>Dns Names - search
Paths List<Property Map> - statement
Queued List<Property Map>Timeout In Seconds - statement
Timeout List<Property Map>In Seconds - strict
Json List<Property Map>Outputs - suspend
Task List<Property Map>After Num Failures - task
Auto List<Property Map>Retry Attempts - time
Input List<Property Map>Formats - time
Output List<Property Map>Formats - timestamp
Day List<Property Map>Is Always24hs - timestamp
Input List<Property Map>Formats - timestamp
Ltz List<Property Map>Output Formats - timestamp
Ntz List<Property Map>Output Formats - timestamp
Output List<Property Map>Formats - timestamp
Type List<Property Map>Mappings - timestamp
Tz List<Property Map>Output Formats - timezones List<Property Map>
- trace
Levels List<Property Map> - transaction
Abort List<Property Map>On Errors - transaction
Default List<Property Map>Isolation Levels - two
Digit List<Property Map>Century Starts - unsupported
Ddl List<Property Map>Actions - use
Cached List<Property Map>Results - user
Task List<Property Map>Managed Initial Warehouse Sizes - user
Task List<Property Map>Minimum Trigger Interval In Seconds - user
Task List<Property Map>Timeout Ms - week
Of List<Property Map>Year Policies - week
Starts List<Property Map>
TaskParameterAbortDetachedQuery, TaskParameterAbortDetachedQueryArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterAutocommit, TaskParameterAutocommitArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterBinaryInputFormat, TaskParameterBinaryInputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterBinaryOutputFormat, TaskParameterBinaryOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientMemoryLimit, TaskParameterClientMemoryLimitArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientMetadataRequestUseConnectionCtx, TaskParameterClientMetadataRequestUseConnectionCtxArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientPrefetchThread, TaskParameterClientPrefetchThreadArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientResultChunkSize, TaskParameterClientResultChunkSizeArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientResultColumnCaseInsensitive, TaskParameterClientResultColumnCaseInsensitiveArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientSessionKeepAlife, TaskParameterClientSessionKeepAlifeArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientSessionKeepAliveHeartbeatFrequency, TaskParameterClientSessionKeepAliveHeartbeatFrequencyArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterClientTimestampTypeMapping, TaskParameterClientTimestampTypeMappingArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterDateInputFormat, TaskParameterDateInputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterDateOutputFormat, TaskParameterDateOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterEnableUnloadPhysicalTypeOptimization, TaskParameterEnableUnloadPhysicalTypeOptimizationArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterErrorOnNondeterministicMerge, TaskParameterErrorOnNondeterministicMergeArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterErrorOnNondeterministicUpdate, TaskParameterErrorOnNondeterministicUpdateArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterGeographyOutputFormat, TaskParameterGeographyOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterGeometryOutputFormat, TaskParameterGeometryOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterJdbcTreatTimestampNtzAsUtc, TaskParameterJdbcTreatTimestampNtzAsUtcArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterJdbcUseSessionTimezone, TaskParameterJdbcUseSessionTimezoneArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterJsonIndent, TaskParameterJsonIndentArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterLockTimeout, TaskParameterLockTimeoutArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterLogLevel, TaskParameterLogLevelArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterMultiStatementCount, TaskParameterMultiStatementCountArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterNoorderSequenceAsDefault, TaskParameterNoorderSequenceAsDefaultArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterOdbcTreatDecimalAsInt, TaskParameterOdbcTreatDecimalAsIntArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterQueryTag, TaskParameterQueryTagArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterQuotedIdentifiersIgnoreCase, TaskParameterQuotedIdentifiersIgnoreCaseArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterRowsPerResultset, TaskParameterRowsPerResultsetArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterS3StageVpceDnsName, TaskParameterS3StageVpceDnsNameArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterSearchPath, TaskParameterSearchPathArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterStatementQueuedTimeoutInSecond, TaskParameterStatementQueuedTimeoutInSecondArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterStatementTimeoutInSecond, TaskParameterStatementTimeoutInSecondArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterStrictJsonOutput, TaskParameterStrictJsonOutputArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterSuspendTaskAfterNumFailure, TaskParameterSuspendTaskAfterNumFailureArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTaskAutoRetryAttempt, TaskParameterTaskAutoRetryAttemptArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimeInputFormat, TaskParameterTimeInputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimeOutputFormat, TaskParameterTimeOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampDayIsAlways24h, TaskParameterTimestampDayIsAlways24hArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampInputFormat, TaskParameterTimestampInputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampLtzOutputFormat, TaskParameterTimestampLtzOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampNtzOutputFormat, TaskParameterTimestampNtzOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampOutputFormat, TaskParameterTimestampOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampTypeMapping, TaskParameterTimestampTypeMappingArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimestampTzOutputFormat, TaskParameterTimestampTzOutputFormatArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTimezone, TaskParameterTimezoneArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTraceLevel, TaskParameterTraceLevelArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTransactionAbortOnError, TaskParameterTransactionAbortOnErrorArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTransactionDefaultIsolationLevel, TaskParameterTransactionDefaultIsolationLevelArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterTwoDigitCenturyStart, TaskParameterTwoDigitCenturyStartArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterUnsupportedDdlAction, TaskParameterUnsupportedDdlActionArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterUseCachedResult, TaskParameterUseCachedResultArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterUserTaskManagedInitialWarehouseSize, TaskParameterUserTaskManagedInitialWarehouseSizeArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterUserTaskMinimumTriggerIntervalInSecond, TaskParameterUserTaskMinimumTriggerIntervalInSecondArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterUserTaskTimeoutM, TaskParameterUserTaskTimeoutMArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterWeekOfYearPolicy, TaskParameterWeekOfYearPolicyArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskParameterWeekStart, TaskParameterWeekStartArgs
- Default string
- Description string
- Key string
- Level string
- Value string
- Default string
- Description string
- Key string
- Level string
- Value string
- default_ String
- description String
- key String
- level String
- value String
- default string
- description string
- key string
- level string
- value string
- default str
- description str
- key str
- level str
- value str
- default String
- description String
- key String
- level String
- value String
TaskSchedule, TaskScheduleArgs
- Minutes int
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - Using
Cron string - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
- Minutes int
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - Using
Cron string - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
- minutes Integer
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - using
Cron String - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
- minutes number
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - using
Cron string - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
- minutes int
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - using_
cron str - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
- minutes Number
- Specifies an interval (in minutes) of wait time inserted between runs of the task. Accepts positive integers only. (conflicts with
using_cron
) - using
Cron String - Specifies a cron expression and time zone for periodically running the task. Supports a subset of standard cron utility syntax. (conflicts with
minutes
)
TaskShowOutput, TaskShowOutputArgs
- Allow
Overlapping boolExecution - Budget string
- Comment string
- Condition string
- Config string
- Created
On string - Database
Name string - Definition string
- Error
Integration string - Id string
- Last
Committed stringOn - Last
Suspended stringOn - Last
Suspended stringReason - Name string
- Owner string
- Owner
Role stringType - Predecessors List<string>
- Schedule string
- Schema
Name string - State string
- Task
Relations List<TaskShow Output Task Relation> - Warehouse string
- Allow
Overlapping boolExecution - Budget string
- Comment string
- Condition string
- Config string
- Created
On string - Database
Name string - Definition string
- Error
Integration string - Id string
- Last
Committed stringOn - Last
Suspended stringOn - Last
Suspended stringReason - Name string
- Owner string
- Owner
Role stringType - Predecessors []string
- Schedule string
- Schema
Name string - State string
- Task
Relations []TaskShow Output Task Relation - Warehouse string
- allow
Overlapping BooleanExecution - budget String
- comment String
- condition String
- config String
- created
On String - database
Name String - definition String
- error
Integration String - id String
- last
Committed StringOn - last
Suspended StringOn - last
Suspended StringReason - name String
- owner String
- owner
Role StringType - predecessors List<String>
- schedule String
- schema
Name String - state String
- task
Relations List<TaskShow Output Task Relation> - warehouse String
- allow
Overlapping booleanExecution - budget string
- comment string
- condition string
- config string
- created
On string - database
Name string - definition string
- error
Integration string - id string
- last
Committed stringOn - last
Suspended stringOn - last
Suspended stringReason - name string
- owner string
- owner
Role stringType - predecessors string[]
- schedule string
- schema
Name string - state string
- task
Relations TaskShow Output Task Relation[] - warehouse string
- allow_
overlapping_ boolexecution - budget str
- comment str
- condition str
- config str
- created_
on str - database_
name str - definition str
- error_
integration str - id str
- last_
committed_ stron - last_
suspended_ stron - last_
suspended_ strreason - name str
- owner str
- owner_
role_ strtype - predecessors Sequence[str]
- schedule str
- schema_
name str - state str
- task_
relations Sequence[TaskShow Output Task Relation] - warehouse str
- allow
Overlapping BooleanExecution - budget String
- comment String
- condition String
- config String
- created
On String - database
Name String - definition String
- error
Integration String - id String
- last
Committed StringOn - last
Suspended StringOn - last
Suspended StringReason - name String
- owner String
- owner
Role StringType - predecessors List<String>
- schedule String
- schema
Name String - state String
- task
Relations List<Property Map> - warehouse String
TaskShowOutputTaskRelation, TaskShowOutputTaskRelationArgs
- Finalized
Root stringTask - Finalizer string
- Predecessors List<string>
- Finalized
Root stringTask - Finalizer string
- Predecessors []string
- finalized
Root StringTask - finalizer String
- predecessors List<String>
- finalized
Root stringTask - finalizer string
- predecessors string[]
- finalized_
root_ strtask - finalizer str
- predecessors Sequence[str]
- finalized
Root StringTask - finalizer String
- predecessors List<String>
Package Details
- Repository
- Snowflake pulumi/pulumi-snowflake
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
snowflake
Terraform Provider.