1. Packages
  2. Airbyte Provider
  3. API Docs
  4. DestinationAwsDatalake
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

airbyte.DestinationAwsDatalake

Explore with Pulumi AI

airbyte logo
airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq

    DestinationAwsDatalake Resource

    Example Usage

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.airbyte.DestinationAwsDatalake;
    import com.pulumi.airbyte.DestinationAwsDatalakeArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationCredentialsArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs;
    import com.pulumi.airbyte.inputs.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myDestinationAwsdatalake = new DestinationAwsDatalake("myDestinationAwsdatalake", DestinationAwsDatalakeArgs.builder()
                .configuration(DestinationAwsDatalakeConfigurationArgs.builder()
                    .aws_account_id("111111111111")
                    .bucket_name("...my_bucket_name...")
                    .bucket_prefix("...my_bucket_prefix...")
                    .credentials(DestinationAwsDatalakeConfigurationCredentialsArgs.builder()
                        .iamRole(DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs.builder()
                            .roleArn("...my_role_arn...")
                            .build())
                        .build())
                    .format(DestinationAwsDatalakeConfigurationFormatArgs.builder()
                        .jsonLinesNewlineDelimitedJson(DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs.builder()
                            .compressionCodec("UNCOMPRESSED")
                            .formatType("JSONL")
                            .build())
                        .parquetColumnarStorage(DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs.builder()
                            .compressionCodec("GZIP")
                            .formatType("Parquet")
                            .build())
                        .build())
                    .glue_catalog_float_as_decimal(true)
                    .lakeformation_database_default_tag_key("pii_level")
                    .lakeformation_database_default_tag_values("private,public")
                    .lakeformation_database_name("...my_lakeformation_database_name...")
                    .lakeformation_governed_tables(true)
                    .partitioning("DAY")
                    .region("ap-southeast-4")
                    .build())
                .definitionId("aa9c2d01-84b7-4474-ba6f-e45dbbc28cdd")
                .workspaceId("3df68150-9956-454d-8144-1645f409cdd1")
                .build());
    
        }
    }
    
    resources:
      myDestinationAwsdatalake:
        type: airbyte:DestinationAwsDatalake
        properties:
          configuration:
            aws_account_id: '111111111111'
            bucket_name: '...my_bucket_name...'
            bucket_prefix: '...my_bucket_prefix...'
            credentials:
              iamRole:
                roleArn: '...my_role_arn...'
            format:
              jsonLinesNewlineDelimitedJson:
                compressionCodec: UNCOMPRESSED
                formatType: JSONL
              parquetColumnarStorage:
                compressionCodec: GZIP
                formatType: Parquet
            glue_catalog_float_as_decimal: true
            lakeformation_database_default_tag_key: pii_level
            lakeformation_database_default_tag_values: private,public
            lakeformation_database_name: '...my_lakeformation_database_name...'
            lakeformation_governed_tables: true
            partitioning: DAY
            region: ap-southeast-4
          definitionId: aa9c2d01-84b7-4474-ba6f-e45dbbc28cdd
          workspaceId: 3df68150-9956-454d-8144-1645f409cdd1
    

    Create DestinationAwsDatalake Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DestinationAwsDatalake(name: string, args: DestinationAwsDatalakeArgs, opts?: CustomResourceOptions);
    @overload
    def DestinationAwsDatalake(resource_name: str,
                               args: DestinationAwsDatalakeArgs,
                               opts: Optional[ResourceOptions] = None)
    
    @overload
    def DestinationAwsDatalake(resource_name: str,
                               opts: Optional[ResourceOptions] = None,
                               configuration: Optional[DestinationAwsDatalakeConfigurationArgs] = None,
                               workspace_id: Optional[str] = None,
                               definition_id: Optional[str] = None,
                               name: Optional[str] = None)
    func NewDestinationAwsDatalake(ctx *Context, name string, args DestinationAwsDatalakeArgs, opts ...ResourceOption) (*DestinationAwsDatalake, error)
    public DestinationAwsDatalake(string name, DestinationAwsDatalakeArgs args, CustomResourceOptions? opts = null)
    public DestinationAwsDatalake(String name, DestinationAwsDatalakeArgs args)
    public DestinationAwsDatalake(String name, DestinationAwsDatalakeArgs args, CustomResourceOptions options)
    
    type: airbyte:DestinationAwsDatalake
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DestinationAwsDatalakeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DestinationAwsDatalakeArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DestinationAwsDatalakeArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DestinationAwsDatalakeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DestinationAwsDatalakeArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var destinationAwsDatalakeResource = new Airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource", new()
    {
        Configuration = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationArgs
        {
            BucketName = "string",
            Credentials = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsArgs
            {
                IamRole = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs
                {
                    RoleArn = "string",
                },
                IamUser = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationCredentialsIamUserArgs
                {
                    AwsAccessKeyId = "string",
                    AwsSecretAccessKey = "string",
                },
            },
            LakeformationDatabaseName = "string",
            AwsAccountId = "string",
            BucketPrefix = "string",
            Format = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatArgs
            {
                JsonLinesNewlineDelimitedJson = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs
                {
                    CompressionCodec = "string",
                    FormatType = "string",
                },
                ParquetColumnarStorage = new Airbyte.Inputs.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs
                {
                    CompressionCodec = "string",
                    FormatType = "string",
                },
            },
            GlueCatalogFloatAsDecimal = false,
            LakeformationDatabaseDefaultTagKey = "string",
            LakeformationDatabaseDefaultTagValues = "string",
            LakeformationGovernedTables = false,
            Partitioning = "string",
            Region = "string",
        },
        WorkspaceId = "string",
        DefinitionId = "string",
        Name = "string",
    });
    
    example, err := airbyte.NewDestinationAwsDatalake(ctx, "destinationAwsDatalakeResource", &airbyte.DestinationAwsDatalakeArgs{
    Configuration: &.DestinationAwsDatalakeConfigurationArgs{
    BucketName: pulumi.String("string"),
    Credentials: &.DestinationAwsDatalakeConfigurationCredentialsArgs{
    IamRole: &.DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs{
    RoleArn: pulumi.String("string"),
    },
    IamUser: &.DestinationAwsDatalakeConfigurationCredentialsIamUserArgs{
    AwsAccessKeyId: pulumi.String("string"),
    AwsSecretAccessKey: pulumi.String("string"),
    },
    },
    LakeformationDatabaseName: pulumi.String("string"),
    AwsAccountId: pulumi.String("string"),
    BucketPrefix: pulumi.String("string"),
    Format: &.DestinationAwsDatalakeConfigurationFormatArgs{
    JsonLinesNewlineDelimitedJson: &.DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs{
    CompressionCodec: pulumi.String("string"),
    FormatType: pulumi.String("string"),
    },
    ParquetColumnarStorage: &.DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs{
    CompressionCodec: pulumi.String("string"),
    FormatType: pulumi.String("string"),
    },
    },
    GlueCatalogFloatAsDecimal: pulumi.Bool(false),
    LakeformationDatabaseDefaultTagKey: pulumi.String("string"),
    LakeformationDatabaseDefaultTagValues: pulumi.String("string"),
    LakeformationGovernedTables: pulumi.Bool(false),
    Partitioning: pulumi.String("string"),
    Region: pulumi.String("string"),
    },
    WorkspaceId: pulumi.String("string"),
    DefinitionId: pulumi.String("string"),
    Name: pulumi.String("string"),
    })
    
    var destinationAwsDatalakeResource = new DestinationAwsDatalake("destinationAwsDatalakeResource", DestinationAwsDatalakeArgs.builder()
        .configuration(DestinationAwsDatalakeConfigurationArgs.builder()
            .bucketName("string")
            .credentials(DestinationAwsDatalakeConfigurationCredentialsArgs.builder()
                .iamRole(DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs.builder()
                    .roleArn("string")
                    .build())
                .iamUser(DestinationAwsDatalakeConfigurationCredentialsIamUserArgs.builder()
                    .awsAccessKeyId("string")
                    .awsSecretAccessKey("string")
                    .build())
                .build())
            .lakeformationDatabaseName("string")
            .awsAccountId("string")
            .bucketPrefix("string")
            .format(DestinationAwsDatalakeConfigurationFormatArgs.builder()
                .jsonLinesNewlineDelimitedJson(DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs.builder()
                    .compressionCodec("string")
                    .formatType("string")
                    .build())
                .parquetColumnarStorage(DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs.builder()
                    .compressionCodec("string")
                    .formatType("string")
                    .build())
                .build())
            .glueCatalogFloatAsDecimal(false)
            .lakeformationDatabaseDefaultTagKey("string")
            .lakeformationDatabaseDefaultTagValues("string")
            .lakeformationGovernedTables(false)
            .partitioning("string")
            .region("string")
            .build())
        .workspaceId("string")
        .definitionId("string")
        .name("string")
        .build());
    
    destination_aws_datalake_resource = airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource",
        configuration={
            "bucket_name": "string",
            "credentials": {
                "iam_role": {
                    "role_arn": "string",
                },
                "iam_user": {
                    "aws_access_key_id": "string",
                    "aws_secret_access_key": "string",
                },
            },
            "lakeformation_database_name": "string",
            "aws_account_id": "string",
            "bucket_prefix": "string",
            "format": {
                "json_lines_newline_delimited_json": {
                    "compression_codec": "string",
                    "format_type": "string",
                },
                "parquet_columnar_storage": {
                    "compression_codec": "string",
                    "format_type": "string",
                },
            },
            "glue_catalog_float_as_decimal": False,
            "lakeformation_database_default_tag_key": "string",
            "lakeformation_database_default_tag_values": "string",
            "lakeformation_governed_tables": False,
            "partitioning": "string",
            "region": "string",
        },
        workspace_id="string",
        definition_id="string",
        name="string")
    
    const destinationAwsDatalakeResource = new airbyte.DestinationAwsDatalake("destinationAwsDatalakeResource", {
        configuration: {
            bucketName: "string",
            credentials: {
                iamRole: {
                    roleArn: "string",
                },
                iamUser: {
                    awsAccessKeyId: "string",
                    awsSecretAccessKey: "string",
                },
            },
            lakeformationDatabaseName: "string",
            awsAccountId: "string",
            bucketPrefix: "string",
            format: {
                jsonLinesNewlineDelimitedJson: {
                    compressionCodec: "string",
                    formatType: "string",
                },
                parquetColumnarStorage: {
                    compressionCodec: "string",
                    formatType: "string",
                },
            },
            glueCatalogFloatAsDecimal: false,
            lakeformationDatabaseDefaultTagKey: "string",
            lakeformationDatabaseDefaultTagValues: "string",
            lakeformationGovernedTables: false,
            partitioning: "string",
            region: "string",
        },
        workspaceId: "string",
        definitionId: "string",
        name: "string",
    });
    
    type: airbyte:DestinationAwsDatalake
    properties:
        configuration:
            awsAccountId: string
            bucketName: string
            bucketPrefix: string
            credentials:
                iamRole:
                    roleArn: string
                iamUser:
                    awsAccessKeyId: string
                    awsSecretAccessKey: string
            format:
                jsonLinesNewlineDelimitedJson:
                    compressionCodec: string
                    formatType: string
                parquetColumnarStorage:
                    compressionCodec: string
                    formatType: string
            glueCatalogFloatAsDecimal: false
            lakeformationDatabaseDefaultTagKey: string
            lakeformationDatabaseDefaultTagValues: string
            lakeformationDatabaseName: string
            lakeformationGovernedTables: false
            partitioning: string
            region: string
        definitionId: string
        name: string
        workspaceId: string
    

    DestinationAwsDatalake Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DestinationAwsDatalake resource accepts the following input properties:

    Configuration DestinationAwsDatalakeConfiguration
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    Configuration DestinationAwsDatalakeConfigurationArgs
    WorkspaceId string
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    Name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationAwsDatalakeConfiguration
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationAwsDatalakeConfiguration
    workspaceId string
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name string
    Name of the destination e.g. dev-mysql-instance.
    configuration DestinationAwsDatalakeConfigurationArgs
    workspace_id str
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name str
    Name of the destination e.g. dev-mysql-instance.
    configuration Property Map
    workspaceId String
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    name String
    Name of the destination e.g. dev-mysql-instance.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DestinationAwsDatalake resource produces the following output properties:

    CreatedAt double
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    CreatedAt float64
    DestinationId string
    DestinationType string
    Id string
    The provider-assigned unique ID for this managed resource.
    ResourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Double
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt number
    destinationId string
    destinationType string
    id string
    The provider-assigned unique ID for this managed resource.
    resourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    created_at float
    destination_id str
    destination_type str
    id str
    The provider-assigned unique ID for this managed resource.
    resource_allocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    createdAt Number
    destinationId String
    destinationType String
    id String
    The provider-assigned unique ID for this managed resource.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.

    Look up Existing DestinationAwsDatalake Resource

    Get an existing DestinationAwsDatalake resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DestinationAwsDatalakeState, opts?: CustomResourceOptions): DestinationAwsDatalake
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            configuration: Optional[DestinationAwsDatalakeConfigurationArgs] = None,
            created_at: Optional[float] = None,
            definition_id: Optional[str] = None,
            destination_id: Optional[str] = None,
            destination_type: Optional[str] = None,
            name: Optional[str] = None,
            resource_allocation: Optional[DestinationAwsDatalakeResourceAllocationArgs] = None,
            workspace_id: Optional[str] = None) -> DestinationAwsDatalake
    func GetDestinationAwsDatalake(ctx *Context, name string, id IDInput, state *DestinationAwsDatalakeState, opts ...ResourceOption) (*DestinationAwsDatalake, error)
    public static DestinationAwsDatalake Get(string name, Input<string> id, DestinationAwsDatalakeState? state, CustomResourceOptions? opts = null)
    public static DestinationAwsDatalake get(String name, Output<String> id, DestinationAwsDatalakeState state, CustomResourceOptions options)
    resources:  _:    type: airbyte:DestinationAwsDatalake    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Configuration DestinationAwsDatalakeConfiguration
    CreatedAt double
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    Configuration DestinationAwsDatalakeConfigurationArgs
    CreatedAt float64
    DefinitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    DestinationId string
    DestinationType string
    Name string
    Name of the destination e.g. dev-mysql-instance.
    ResourceAllocation DestinationAwsDatalakeResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    WorkspaceId string
    configuration DestinationAwsDatalakeConfiguration
    createdAt Double
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String
    configuration DestinationAwsDatalakeConfiguration
    createdAt number
    definitionId string
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId string
    destinationType string
    name string
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation DestinationAwsDatalakeResourceAllocation
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId string
    configuration DestinationAwsDatalakeConfigurationArgs
    created_at float
    definition_id str
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destination_id str
    destination_type str
    name str
    Name of the destination e.g. dev-mysql-instance.
    resource_allocation DestinationAwsDatalakeResourceAllocationArgs
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspace_id str
    configuration Property Map
    createdAt Number
    definitionId String
    The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. Requires replacement if changed.
    destinationId String
    destinationType String
    name String
    Name of the destination e.g. dev-mysql-instance.
    resourceAllocation Property Map
    actor or actor definition specific resource requirements. if default is set, these are the requirements that should be set for ALL jobs run for this actor definition. it is overriden by the job type specific configurations. if not set, the platform will use defaults. these values will be overriden by configuration at the connection level.
    workspaceId String

    Supporting Types

    DestinationAwsDatalakeConfiguration, DestinationAwsDatalakeConfigurationArgs

    BucketName string
    The name of the S3 bucket. Read more \n\nhere\n\n.
    Credentials DestinationAwsDatalakeConfigurationCredentials
    Choose How to Authenticate to AWS.
    LakeformationDatabaseName string
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    AwsAccountId string
    target aws account id
    BucketPrefix string
    S3 prefix
    Format DestinationAwsDatalakeConfigurationFormat
    Format of the data output.
    GlueCatalogFloatAsDecimal bool
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    LakeformationDatabaseDefaultTagKey string
    Add a default tag key to databases created by this destination
    LakeformationDatabaseDefaultTagValues string
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    LakeformationGovernedTables bool
    Whether to create tables as LF governed tables. Default: false
    Partitioning string
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    Region string
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    BucketName string
    The name of the S3 bucket. Read more \n\nhere\n\n.
    Credentials DestinationAwsDatalakeConfigurationCredentials
    Choose How to Authenticate to AWS.
    LakeformationDatabaseName string
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    AwsAccountId string
    target aws account id
    BucketPrefix string
    S3 prefix
    Format DestinationAwsDatalakeConfigurationFormat
    Format of the data output.
    GlueCatalogFloatAsDecimal bool
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    LakeformationDatabaseDefaultTagKey string
    Add a default tag key to databases created by this destination
    LakeformationDatabaseDefaultTagValues string
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    LakeformationGovernedTables bool
    Whether to create tables as LF governed tables. Default: false
    Partitioning string
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    Region string
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    bucketName String
    The name of the S3 bucket. Read more \n\nhere\n\n.
    credentials DestinationAwsDatalakeConfigurationCredentials
    Choose How to Authenticate to AWS.
    lakeformationDatabaseName String
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    awsAccountId String
    target aws account id
    bucketPrefix String
    S3 prefix
    format DestinationAwsDatalakeConfigurationFormat
    Format of the data output.
    glueCatalogFloatAsDecimal Boolean
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    lakeformationDatabaseDefaultTagKey String
    Add a default tag key to databases created by this destination
    lakeformationDatabaseDefaultTagValues String
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    lakeformationGovernedTables Boolean
    Whether to create tables as LF governed tables. Default: false
    partitioning String
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    region String
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    bucketName string
    The name of the S3 bucket. Read more \n\nhere\n\n.
    credentials DestinationAwsDatalakeConfigurationCredentials
    Choose How to Authenticate to AWS.
    lakeformationDatabaseName string
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    awsAccountId string
    target aws account id
    bucketPrefix string
    S3 prefix
    format DestinationAwsDatalakeConfigurationFormat
    Format of the data output.
    glueCatalogFloatAsDecimal boolean
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    lakeformationDatabaseDefaultTagKey string
    Add a default tag key to databases created by this destination
    lakeformationDatabaseDefaultTagValues string
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    lakeformationGovernedTables boolean
    Whether to create tables as LF governed tables. Default: false
    partitioning string
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    region string
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    bucket_name str
    The name of the S3 bucket. Read more \n\nhere\n\n.
    credentials DestinationAwsDatalakeConfigurationCredentials
    Choose How to Authenticate to AWS.
    lakeformation_database_name str
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    aws_account_id str
    target aws account id
    bucket_prefix str
    S3 prefix
    format DestinationAwsDatalakeConfigurationFormat
    Format of the data output.
    glue_catalog_float_as_decimal bool
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    lakeformation_database_default_tag_key str
    Add a default tag key to databases created by this destination
    lakeformation_database_default_tag_values str
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    lakeformation_governed_tables bool
    Whether to create tables as LF governed tables. Default: false
    partitioning str
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    region str
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]
    bucketName String
    The name of the S3 bucket. Read more \n\nhere\n\n.
    credentials Property Map
    Choose How to Authenticate to AWS.
    lakeformationDatabaseName String
    The default database this destination will use to create tables in per stream. Can be changed per connection by customizing the namespace.
    awsAccountId String
    target aws account id
    bucketPrefix String
    S3 prefix
    format Property Map
    Format of the data output.
    glueCatalogFloatAsDecimal Boolean
    Cast float/double as decimal(38,18). This can help achieve higher accuracy and represent numbers correctly as received from the source. Default: false
    lakeformationDatabaseDefaultTagKey String
    Add a default tag key to databases created by this destination
    lakeformationDatabaseDefaultTagValues String
    Add default values for the Tag Key to databases created by this destination. Comma separate for multiple values.
    lakeformationGovernedTables Boolean
    Whether to create tables as LF governed tables. Default: false
    partitioning String
    Partition data by cursor fields when a cursor field is a date. Default: "NO PARTITIONING"; must be one of ["NO PARTITIONING", "DATE", "YEAR", "MONTH", "DAY", "YEAR/MONTH", "YEAR/MONTH/DAY"]
    region String
    The region of the S3 bucket. See \n\nhere\n\n for all region codes. Default: ""; must be one of ["", "af-south-1", "ap-east-1", "ap-northeast-1", "ap-northeast-2", "ap-northeast-3", "ap-south-1", "ap-south-2", "ap-southeast-1", "ap-southeast-2", "ap-southeast-3", "ap-southeast-4", "ca-central-1", "ca-west-1", "cn-north-1", "cn-northwest-1", "eu-central-1", "eu-central-2", "eu-north-1", "eu-south-1", "eu-south-2", "eu-west-1", "eu-west-2", "eu-west-3", "il-central-1", "me-central-1", "me-south-1", "sa-east-1", "us-east-1", "us-east-2", "us-gov-east-1", "us-gov-west-1", "us-west-1", "us-west-2"]

    DestinationAwsDatalakeConfigurationCredentials, DestinationAwsDatalakeConfigurationCredentialsArgs

    DestinationAwsDatalakeConfigurationCredentialsIamRole, DestinationAwsDatalakeConfigurationCredentialsIamRoleArgs

    RoleArn string
    Will assume this role to write data to s3
    RoleArn string
    Will assume this role to write data to s3
    roleArn String
    Will assume this role to write data to s3
    roleArn string
    Will assume this role to write data to s3
    role_arn str
    Will assume this role to write data to s3
    roleArn String
    Will assume this role to write data to s3

    DestinationAwsDatalakeConfigurationCredentialsIamUser, DestinationAwsDatalakeConfigurationCredentialsIamUserArgs

    AwsAccessKeyId string
    AWS User Access Key Id
    AwsSecretAccessKey string
    Secret Access Key
    AwsAccessKeyId string
    AWS User Access Key Id
    AwsSecretAccessKey string
    Secret Access Key
    awsAccessKeyId String
    AWS User Access Key Id
    awsSecretAccessKey String
    Secret Access Key
    awsAccessKeyId string
    AWS User Access Key Id
    awsSecretAccessKey string
    Secret Access Key
    aws_access_key_id str
    AWS User Access Key Id
    aws_secret_access_key str
    Secret Access Key
    awsAccessKeyId String
    AWS User Access Key Id
    awsSecretAccessKey String
    Secret Access Key

    DestinationAwsDatalakeConfigurationFormat, DestinationAwsDatalakeConfigurationFormatArgs

    DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJson, DestinationAwsDatalakeConfigurationFormatJsonLinesNewlineDelimitedJsonArgs

    CompressionCodec string
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    FormatType string
    Default: "JSONL"; must be "JSONL"
    CompressionCodec string
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    FormatType string
    Default: "JSONL"; must be "JSONL"
    compressionCodec String
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    formatType String
    Default: "JSONL"; must be "JSONL"
    compressionCodec string
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    formatType string
    Default: "JSONL"; must be "JSONL"
    compression_codec str
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    format_type str
    Default: "JSONL"; must be "JSONL"
    compressionCodec String
    The compression algorithm used to compress data. Default: "UNCOMPRESSED"; must be one of ["UNCOMPRESSED", "GZIP"]
    formatType String
    Default: "JSONL"; must be "JSONL"

    DestinationAwsDatalakeConfigurationFormatParquetColumnarStorage, DestinationAwsDatalakeConfigurationFormatParquetColumnarStorageArgs

    CompressionCodec string
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    FormatType string
    Default: "Parquet"; must be "Parquet"
    CompressionCodec string
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    FormatType string
    Default: "Parquet"; must be "Parquet"
    compressionCodec String
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    formatType String
    Default: "Parquet"; must be "Parquet"
    compressionCodec string
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    formatType string
    Default: "Parquet"; must be "Parquet"
    compression_codec str
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    format_type str
    Default: "Parquet"; must be "Parquet"
    compressionCodec String
    The compression algorithm used to compress data. Default: "SNAPPY"; must be one of ["UNCOMPRESSED", "SNAPPY", "GZIP", "ZSTD"]
    formatType String
    Default: "Parquet"; must be "Parquet"

    DestinationAwsDatalakeResourceAllocation, DestinationAwsDatalakeResourceAllocationArgs

    Default DestinationAwsDatalakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    JobSpecifics []DestinationAwsDatalakeResourceAllocationJobSpecific
    default DestinationAwsDatalakeResourceAllocationDefault
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics DestinationAwsDatalakeResourceAllocationJobSpecific[]
    default Property Map
    optional resource requirements to run workers (blank for unbounded allocations)
    jobSpecifics List<Property Map>

    DestinationAwsDatalakeResourceAllocationDefault, DestinationAwsDatalakeResourceAllocationDefaultArgs

    DestinationAwsDatalakeResourceAllocationJobSpecific, DestinationAwsDatalakeResourceAllocationJobSpecificArgs

    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    JobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    ResourceRequirements DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType string
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    job_type str
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resource_requirements DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements
    optional resource requirements to run workers (blank for unbounded allocations)
    jobType String
    enum that describes the different types of jobs that the platform runs. must be one of ["getspec", "checkconnection", "discoverschema", "sync", "resetconnection", "connection_updater", "replicate"]
    resourceRequirements Property Map
    optional resource requirements to run workers (blank for unbounded allocations)

    DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirements, DestinationAwsDatalakeResourceAllocationJobSpecificResourceRequirementsArgs

    Import

    $ pulumi import airbyte:index/destinationAwsDatalake:DestinationAwsDatalake my_airbyte_destination_aws_datalake ""
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    airbyte airbytehq/terraform-provider-airbyte
    License
    Notes
    This Pulumi package is based on the airbyte Terraform Provider.
    airbyte logo
    airbyte 0.8.0-beta2 published on Thursday, Mar 27, 2025 by airbytehq