1. Packages
  2. Airbyte Provider
  3. API Docs
  4. DestinationRedshift
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

airbyte.DestinationRedshift

Explore with Pulumi AI

airbyte logo
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

    DestinationRedshift Resource

    Example Usage

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.DestinationRedshift;
    import com.pulumi.airbyte.DestinationRedshiftArgs;
    import com.pulumi.airbyte.inputs.DestinationRedshiftConfigurationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myDestinationRedshift = new DestinationRedshift("myDestinationRedshift", DestinationRedshiftArgs.builder()
                .configuration(DestinationRedshiftConfigurationArgs.builder()
                    .database("...my_database...")
                    .disable_type_dedupe(false)
                    .drop_cascade(false)
                    .host("...my_host...")
                    .jdbc_url_params("...my_jdbc_url_params...")
                    .password("...my_password...")
                    .port(5439)
                    .raw_data_schema("...my_raw_data_schema...")
                    .schema("public")
                    .tunnel_method(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                    .uploading_method(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                    .username("...my_username...")
                    .build())
                .definitionId("50bfb2e7-1ca1-4132-b623-8606f328175d")
                .workspaceId("e25c2049-8986-4945-a3f6-604de181966d")
                .build());
    
        }
    }
    
    resources:
      myDestinationRedshift:
        type: airbyte:DestinationRedshift
        properties:
          configuration:
            database: '...my_database...'
            disable_type_dedupe: false
            drop_cascade: false
            host: '...my_host...'
            jdbc_url_params: '...my_jdbc_url_params...'
            password: '...my_password...'
            port: 5439
            raw_data_schema: '...my_raw_data_schema...'
            schema: public
            tunnel_method:
              sshKeyAuthentication:
                sshKey: '...my_ssh_key...'
                tunnelHost: '...my_tunnel_host...'
                tunnelPort: 22
                tunnelUser: '...my_tunnel_user...'
            uploading_method:
              awss3Staging:
                accessKeyId: '...my_access_key_id...'
                fileNamePattern: '{date}'
                purgeStagingData: false
                s3BucketName: airbyte.staging
                s3BucketPath: data_sync/test
                s3BucketRegion: eu-west-2
                secretAccessKey: '...my_secret_access_key...'
            username: '...my_username...'
          definitionId: 50bfb2e7-1ca1-4132-b623-8606f328175d
          workspaceId: e25c2049-8986-4945-a3f6-604de181966d
    

    Create DestinationRedshift Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DestinationRedshift(name: string, args: DestinationRedshiftArgs, opts?: CustomResourceOptions);
    @overload
    def DestinationRedshift(resource_name: str,
                            args: DestinationRedshiftArgs,
                            opts: Optional[ResourceOptions] = None)
    
    @overload
    def DestinationRedshift(resource_name: str,
                            opts: Optional[ResourceOptions] = None,
                            configuration: Optional[DestinationRedshiftConfigurationArgs] = None,
                            workspace_id: Optional[str] = None,
                            definition_id: Optional[str] = None,
                            name: Optional[str] = None)
    func NewDestinationRedshift(ctx *Context, name string, args DestinationRedshiftArgs, opts ...ResourceOption) (*DestinationRedshift, error)
    public DestinationRedshift(string name, DestinationRedshiftArgs args, CustomResourceOptions? opts = null)
    public DestinationRedshift(String name, DestinationRedshiftArgs args)
    public DestinationRedshift(String name, DestinationRedshiftArgs args, CustomResourceOptions options)
    
    type: airbyte:DestinationRedshift
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DestinationRedshiftArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DestinationRedshiftArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DestinationRedshiftArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DestinationRedshiftArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DestinationRedshiftArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var destinationRedshiftResource = new Airbyte.DestinationRedshift("destinationRedshiftResource", new()
    {
        Configuration = new Airbyte.Inputs.DestinationRedshiftConfigurationArgs
        {
            Database = "string",
            Host = "string",
            Password = "string",
            Username = "string",
            DisableTypeDedupe = false,
            DropCascade = false,
            JdbcUrlParams = "string",
            Port = 0,
            RawDataSchema = "string",
            Schema = "string",
            TunnelMethod = new Airbyte.Inputs.DestinationRedshiftConfigurationTunnelMethodArgs
            {
                NoTunnel = null,
                PasswordAuthentication = new Airbyte.Inputs.DestinationRedshiftConfigurationTunnelMethodPasswordAuthenticationArgs
                {
                    TunnelHost = "string",
                    TunnelUser = "string",
                    TunnelUserPassword = "string",
                    TunnelPort = 0,
                },
                SshKeyAuthentication = new Airbyte.Inputs.DestinationRedshiftConfigurationTunnelMethodSshKeyAuthenticationArgs
                {
                    SshKey = "string",
                    TunnelHost = "string",
                    TunnelUser = "string",
                    TunnelPort = 0,
                },
            },
            UploadingMethod = new Airbyte.Inputs.DestinationRedshiftConfigurationUploadingMethodArgs
            {
                Awss3Staging = new Airbyte.Inputs.DestinationRedshiftConfigurationUploadingMethodAwss3StagingArgs
                {
                    AccessKeyId = "string",
                    S3BucketName = "string",
                    SecretAccessKey = "string",
                    FileNamePattern = "string",
                    PurgeStagingData = false,
                    S3BucketPath = "string",
                    S3BucketRegion = "string",
                },
            },
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
    });
    
    example, err := airbyte.NewDestinationRedshift(ctx, "destinationRedshiftResource", &airbyte.DestinationRedshiftArgs{
    Configuration: &.DestinationRedshiftConfigurationArgs{
    Database: pulumi.String("string"),
    Host: pulumi.String("string"),
    Password: pulumi.String("string"),
    Username: pulumi.String("string"),
    DisableTypeDedupe: pulumi.Bool(false),
    DropCascade: pulumi.Bool(false),
    JdbcUrlParams: pulumi.String("string"),
    Port: pulumi.Float64(0),
    RawDataSchema: pulumi.String("string"),
    Schema: pulumi.String("string"),
    TunnelMethod: &.DestinationRedshiftConfigurationTunnelMethodArgs{
    NoTunnel: &.DestinationRedshiftConfigurationTunnelMethodNoTunnelArgs{
    },
    PasswordAuthentication: &.DestinationRedshiftConfigurationTunnelMethodPasswordAuthenticationArgs{
    TunnelHost: pulumi.String("string"),
    TunnelUser: pulumi.String("string"),
    TunnelUserPassword: pulumi.String("string"),
    TunnelPort: pulumi.Float64(0),
    },
    SshKeyAuthentication: &.DestinationRedshiftConfigurationTunnelMethodSshKeyAuthenticationArgs{
    SshKey: pulumi.String("string"),
    TunnelHost: pulumi.String("string"),
    TunnelUser: pulumi.String("string"),
    TunnelPort: pulumi.Float64(0),
    },
    },
    UploadingMethod: &.DestinationRedshiftConfigurationUploadingMethodArgs{
    Awss3Staging: &.DestinationRedshiftConfigurationUploadingMethodAwss3StagingArgs{
    AccessKeyId: pulumi.String("string"),
    S3BucketName: pulumi.String("string"),
    SecretAccessKey: pulumi.String("string"),
    FileNamePattern: pulumi.String("string"),
    PurgeStagingData: pulumi.Bool(false),
    S3BucketPath: pulumi.String("string"),
    S3BucketRegion: pulumi.String("string"),
    },
    },
    },
    WorkspaceId: pulumi.String("string"),
    DefinitionId: pulumi.String("string"),
    Name: pulumi.String("string"),
    })
    
    var destinationRedshiftResource = new DestinationRedshift("destinationRedshiftResource", DestinationRedshiftArgs.builder()
        .configuration(DestinationRedshiftConfigurationArgs.builder()
            .database("string")
            .host("string")
            .password("string")
            .username("string")
            .disableTypeDedupe(false)
            .dropCascade(false)
            .jdbcUrlParams("string")
            .port(0)
            .rawDataSchema("string")
            .schema("string")
            .tunnelMethod(DestinationRedshiftConfigurationTunnelMethodArgs.builder()
                .noTunnel()
                .passwordAuthentication(DestinationRedshiftConfigurationTunnelMethodPasswordAuthenticationArgs.builder()
                    .tunnelHost("string")
                    .tunnelUser("string")
                    .tunnelUserPassword("string")
                    .tunnelPort(0)
                    .build())
                .sshKeyAuthentication(DestinationRedshiftConfigurationTunnelMethodSshKeyAuthenticationArgs.builder()
                    .sshKey("string")
                    .tunnelHost("string")
                    .tunnelUser("string")
                    .tunnelPort(0)
                    .build())
                .build())
            .uploadingMethod(DestinationRedshiftConfigurationUploadingMethodArgs.builder()
                .awss3Staging(DestinationRedshiftConfigurationUploadingMethodAwss3StagingArgs.builder()
                    .accessKeyId("string")
                    .s3BucketName("string")
                    .secretAccessKey("string")
                    .fileNamePattern("string")
                    .purgeStagingData(false)
                    .s3BucketPath("string")
                    .s3BucketRegion("string")
                    .build())
                .build())
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .build());
    
    destination_redshift_resource = airbyte.DestinationRedshift("destinationRedshiftResource",
        configuration={
            "database": "string",
            "host": "string",
            "password": "string",
            "username": "string",
            "disable_type_dedupe": False,
            "drop_cascade": False,
            "jdbc_url_params": "string",
            "port": 0,
            "raw_data_schema": "string",
            "schema": "string",
            "tunnel_method": {
                "no_tunnel": {},
                "password_authentication": {
                    "tunnel_host": "string",
                    "tunnel_user": "string",
                    "tunnel_user_password": "string",
                    "tunnel_port": 0,
                },
                "ssh_key_authentication": {
                    "ssh_key": "string",
                    "tunnel_host": "string",
                    "tunnel_user": "string",
                    "tunnel_port": 0,
                },
            },
            "uploading_method": {
                "awss3_staging": {
                    "access_key_id": "string",
                    "s3_bucket_name": "string",
                    "secret_access_key": "string",
                    "file_name_pattern": "string",
                    "purge_staging_data": False,
                    "s3_bucket_path": "string",
                    "s3_bucket_region": "string",
                },
            },
        },
        workspace_id="string",
        definition_id="string",
        name="string")
    
    const destinationRedshiftResource = new airbyte.DestinationRedshift("destinationRedshiftResource", {
        configuration: {
            database: "string",
            host: "string",
            password: "string",
            username: "string",
            disableTypeDedupe: false,
            dropCascade: false,
            jdbcUrlParams: "string",
            port: 0,
            rawDataSchema: "string",
            schema: "string",
            tunnelMethod: {
                noTunnel: {},
                passwordAuthentication: {
                    tunnelHost: "string",
                    tunnelUser: "string",
                    tunnelUserPassword: "string",
                    tunnelPort: 0,
                },
                sshKeyAuthentication: {
                    sshKey: "string",
                    tunnelHost: "string",
                    tunnelUser: "string",
                    tunnelPort: 0,
                },
            },
            uploadingMethod: {
                awss3Staging: {
                    accessKeyId: "string",
                    s3BucketName: "string",
                    secretAccessKey: "string",
                    fileNamePattern: "string",
                    purgeStagingData: false,
                    s3BucketPath: "string",
                    s3BucketRegion: "string",
                },
            },
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
    });
    
    type: airbyte:DestinationRedshift
    properties:
        configuration:
            database: string
            disableTypeDedupe: false
            dropCascade: false
            host: string
            jdbcUrlParams: string
            password: string
            port: 0
            rawDataSchema: string
            schema: string
            tunnelMethod:
                noTunnel: {}
                passwordAuthentication:
                    tunnelHost: string
                    tunnelPort: 0
                    tunnelUser: string
                    tunnelUserPassword: string
                sshKeyAuthentication:
                    sshKey: string
                    tunnelHost: string
                    tunnelPort: 0
                    tunnelUser: string
            uploadingMethod:
                awss3Staging:
                    accessKeyId: string
                    fileNamePattern: string
                    purgeStagingData: false
                    s3BucketName: string
                    s3BucketPath: string
                    s3BucketRegion: string
                    secretAccessKey: string
            username: string
        definitionId: string
        name: string
        workspaceId: string
    

    DestinationRedshift Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DestinationRedshift resource accepts the following input properties:

    Configuration DestinationRedshiftConfiguration
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    Configuration DestinationRedshiftConfigurationArgs
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationRedshiftConfiguration
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationRedshiftConfiguration
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationRedshiftConfigurationArgs
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name str
    Name of the destination e.g. dev-mysql-instance.
    configuration Property Map
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DestinationRedshift resource produces the following output properties:

    CreatedAt double
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    CreatedAt float64
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Double
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt number
    destinationId string
    destinationType string
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    created_at float
    destination_id str
    destination_type str
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Number
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.

    Look up Existing DestinationRedshift Resource

    Get an existing DestinationRedshift resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DestinationRedshiftState, opts?: CustomResourceOptions): DestinationRedshift
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[DestinationRedshiftConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            destination_id: Optional[str] = None,
            destination_type: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[DestinationRedshiftResourceAllocationArgs] = None,
            workspace_id: Optional[str] = None) -> DestinationRedshift
    func GetDestinationRedshift(ctx *Context, name string, id IDInput, state *DestinationRedshiftState, opts ...ResourceOption) (*DestinationRedshift, error)
    public static DestinationRedshift Get(string name, Input<string> id, DestinationRedshiftState? state, CustomResourceOptions? opts = null)
    public static DestinationRedshift get(String name, Output<String> id, DestinationRedshiftState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:DestinationRedshift    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration DestinationRedshiftConfiguration
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    Configuration DestinationRedshiftConfigurationArgs
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationRedshiftResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    configuration DestinationRedshiftConfiguration
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String
    configuration DestinationRedshiftConfiguration
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId string
    destinationType string
    name string
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationRedshiftResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId string
    configuration DestinationRedshiftConfigurationArgs
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destination_id str
    destination_type str
    name str
    Name of the destination e.g. dev-mysql-instance.
    resource_allocation DestinationRedshiftResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspace_id str
    configuration Property Map
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String

    Supporting Types

    DestinationRedshiftConfiguration, DestinationRedshiftConfigurationArgs

    Database string
    Name of the database.
    Host string
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    Password string
    Password associated with the username.
    Username string
    Username to use to access the database.
    DisableTypeDedupe bool
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    DropCascade bool
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    JdbcUrlParams string
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    Port double
    Port of the database. Default: 5439
    RawDataSchema string
    The schema to write raw tables into (default: airbyte_internal).
    Schema string
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    TunnelMethod DestinationRedshiftConfigurationTunnelMethod
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    UploadingMethod DestinationRedshiftConfigurationUploadingMethod
    The way data will be uploaded to Redshift.
    Database string
    Name of the database.
    Host string
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    Password string
    Password associated with the username.
    Username string
    Username to use to access the database.
    DisableTypeDedupe bool
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    DropCascade bool
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    JdbcUrlParams string
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    Port float64
    Port of the database. Default: 5439
    RawDataSchema string
    The schema to write raw tables into (default: airbyte_internal).
    Schema string
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    TunnelMethod DestinationRedshiftConfigurationTunnelMethod
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    UploadingMethod DestinationRedshiftConfigurationUploadingMethod
    The way data will be uploaded to Redshift.
    database String
    Name of the database.
    host String
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    password String
    Password associated with the username.
    username String
    Username to use to access the database.
    disableTypeDedupe Boolean
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    dropCascade Boolean
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    jdbcUrlParams String
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    port Double
    Port of the database. Default: 5439
    rawDataSchema String
    The schema to write raw tables into (default: airbyte_internal).
    schema String
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    tunnelMethod DestinationRedshiftConfigurationTunnelMethod
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    uploadingMethod DestinationRedshiftConfigurationUploadingMethod
    The way data will be uploaded to Redshift.
    database string
    Name of the database.
    host string
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    password string
    Password associated with the username.
    username string
    Username to use to access the database.
    disableTypeDedupe boolean
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    dropCascade boolean
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    jdbcUrlParams string
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    port number
    Port of the database. Default: 5439
    rawDataSchema string
    The schema to write raw tables into (default: airbyte_internal).
    schema string
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    tunnelMethod DestinationRedshiftConfigurationTunnelMethod
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    uploadingMethod DestinationRedshiftConfigurationUploadingMethod
    The way data will be uploaded to Redshift.
    database str
    Name of the database.
    host str
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    password str
    Password associated with the username.
    username str
    Username to use to access the database.
    disable_type_dedupe bool
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    drop_cascade bool
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    jdbc_url_params str
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    port float
    Port of the database. Default: 5439
    raw_data_schema str
    The schema to write raw tables into (default: airbyte_internal).
    schema str
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    tunnel_method DestinationRedshiftConfigurationTunnelMethod
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    uploading_method DestinationRedshiftConfigurationUploadingMethod
    The way data will be uploaded to Redshift.
    database String
    Name of the database.
    host String
    Host Endpoint of the Redshift Cluster (must include the cluster-id, region and end with .redshift.amazonaws.com)
    password String
    Password associated with the username.
    username String
    Username to use to access the database.
    disableTypeDedupe Boolean
    Disable Writing Final Tables. WARNING! The data format in airbytedata is likely stable but there are no guarantees that other metadata columns will remain the same in future versions. Default: false
    dropCascade Boolean
    Drop tables with CASCADE. WARNING! This will delete all data in all dependent objects (views, etc.). Use with caution. This option is intended for usecases which can easily rebuild the dependent objects. Default: false
    jdbcUrlParams String
    Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3).
    port Number
    Port of the database. Default: 5439
    rawDataSchema String
    The schema to write raw tables into (default: airbyte_internal).
    schema String
    The default schema tables are written to if the source does not specify a namespace. Unless specifically configured, the usual value for this field is "public". Default: "public"
    tunnelMethod Property Map
    Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use.
    uploadingMethod Property Map
    The way data will be uploaded to Redshift.

    DestinationRedshiftConfigurationTunnelMethod, DestinationRedshiftConfigurationTunnelMethodArgs

    DestinationRedshiftConfigurationTunnelMethodPasswordAuthentication, DestinationRedshiftConfigurationTunnelMethodPasswordAuthenticationArgs

    TunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    TunnelUser string
    OS-level username for logging into the jump server host
    TunnelUserPassword string
    OS-level password for logging into the jump server host
    TunnelPort double
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    TunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    TunnelUser string
    OS-level username for logging into the jump server host
    TunnelUserPassword string
    OS-level password for logging into the jump server host
    TunnelPort float64
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    tunnelHost String
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser String
    OS-level username for logging into the jump server host
    tunnelUserPassword String
    OS-level password for logging into the jump server host
    tunnelPort Double
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    tunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser string
    OS-level username for logging into the jump server host
    tunnelUserPassword string
    OS-level password for logging into the jump server host
    tunnelPort number
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    tunnel_host str
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnel_user str
    OS-level username for logging into the jump server host
    tunnel_user_password str
    OS-level password for logging into the jump server host
    tunnel_port float
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    tunnelHost String
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser String
    OS-level username for logging into the jump server host
    tunnelUserPassword String
    OS-level password for logging into the jump server host
    tunnelPort Number
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22

    DestinationRedshiftConfigurationTunnelMethodSshKeyAuthentication, DestinationRedshiftConfigurationTunnelMethodSshKeyAuthenticationArgs

    SshKey string
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    TunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    TunnelUser string
    OS-level username for logging into the jump server host.
    TunnelPort double
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    SshKey string
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    TunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    TunnelUser string
    OS-level username for logging into the jump server host.
    TunnelPort float64
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    sshKey String
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    tunnelHost String
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser String
    OS-level username for logging into the jump server host.
    tunnelPort Double
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    sshKey string
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    tunnelHost string
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser string
    OS-level username for logging into the jump server host.
    tunnelPort number
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    ssh_key str
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    tunnel_host str
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnel_user str
    OS-level username for logging into the jump server host.
    tunnel_port float
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22
    sshKey String
    OS-level user account ssh key credentials in RSA PEM format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )
    tunnelHost String
    Hostname of the jump server host that allows inbound ssh tunnel.
    tunnelUser String
    OS-level username for logging into the jump server host.
    tunnelPort Number
    Port on the proxy/jump server that accepts inbound ssh connections. Default: 22

    DestinationRedshiftConfigurationUploadingMethod, DestinationRedshiftConfigurationUploadingMethodArgs

    Awss3Staging DestinationRedshiftConfigurationUploadingMethodAwss3Staging
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.
    Awss3Staging DestinationRedshiftConfigurationUploadingMethodAwss3Staging
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.
    awss3Staging DestinationRedshiftConfigurationUploadingMethodAwss3Staging
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.
    awss3Staging DestinationRedshiftConfigurationUploadingMethodAwss3Staging
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.
    awss3_staging DestinationRedshiftConfigurationUploadingMethodAwss3Staging
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.
    awss3Staging Property Map
    \n\n(recommended)\n\n Uploads data to S3 and then uses a COPY to insert the data into Redshift. COPY is recommended for production workloads for better speed and scalability. See \n\nAWS docs\n\n for more details.

    DestinationRedshiftConfigurationUploadingMethodAwss3Staging, DestinationRedshiftConfigurationUploadingMethodAwss3StagingArgs

    AccessKeyId string
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    S3BucketName string
    The name of the staging S3 bucket.
    SecretAccessKey string
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    FileNamePattern string
    The pattern allows you to set the file-name format for the S3 staging file(s)
    PurgeStagingData bool
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    S3BucketPath string
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    S3BucketRegion string
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    AccessKeyId string
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    S3BucketName string
    The name of the staging S3 bucket.
    SecretAccessKey string
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    FileNamePattern string
    The pattern allows you to set the file-name format for the S3 staging file(s)
    PurgeStagingData bool
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    S3BucketPath string
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    S3BucketRegion string
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    accessKeyId String
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    s3BucketName String
    The name of the staging S3 bucket.
    secretAccessKey String
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    fileNamePattern String
    The pattern allows you to set the file-name format for the S3 staging file(s)
    purgeStagingData Boolean
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    s3BucketPath String
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    s3BucketRegion String
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    accessKeyId string
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    s3BucketName string
    The name of the staging S3 bucket.
    secretAccessKey string
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    fileNamePattern string
    The pattern allows you to set the file-name format for the S3 staging file(s)
    purgeStagingData boolean
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    s3BucketPath string
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    s3BucketRegion string
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    access_key_id str
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    s3_bucket_name str
    The name of the staging S3 bucket.
    secret_access_key str
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    file_name_pattern str
    The pattern allows you to set the file-name format for the S3 staging file(s)
    purge_staging_data bool
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    s3_bucket_path str
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    s3_bucket_region str
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    accessKeyId String
    This ID grants access to the above S3 staging bucket. Airbyte requires Read and Write permissions to the given bucket. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    s3BucketName String
    The name of the staging S3 bucket.
    secretAccessKey String
    The corresponding secret to the above access key id. See \n\nAWS docs\n\n on how to generate an access key ID and secret access key.
    fileNamePattern String
    The pattern allows you to set the file-name format for the S3 staging file(s)
    purgeStagingData Boolean
    Whether to delete the staging files from S3 after completing the sync. See \n\n docs\n\n for details. Default: true
    s3BucketPath String
    The directory under the S3 bucket where data will be written. If not provided, then defaults to the root directory. See \n\npath's name recommendations\n\n for more details.
    s3BucketRegion String
    The region of the S3 staging bucket. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]

    DestinationRedshiftResourceAllocation, DestinationRedshiftResourceAllocationArgs

    Default DestinationRedshiftResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics List<DestinationRedshiftResourceAllocationJobSpecific>
    Default DestinationRedshiftResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics []DestinationRedshiftResourceAllocationJobSpecific
    default_ DestinationRedshiftResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<DestinationRedshiftResourceAllocationJobSpecific>
    default DestinationRedshiftResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics DestinationRedshiftResourceAllocationJobSpecific[]
    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    DestinationRedshiftResourceAllocationDefault, DestinationRedshiftResourceAllocationDefaultArgs

    DestinationRedshiftResourceAllocationJobSpecific, DestinationRedshiftResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationRedshiftResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationRedshiftResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationRedshiftResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationRedshiftResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resource_requirements DestinationRedshiftResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    DestinationRedshiftResourceAllocationJobSpecificResourceRequirements, DestinationRedshiftResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    $ pulumi import airbyte:index/destinationRedshift:DestinationRedshift my_airbyte_destination_redshift ""
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq