airbyte.SourceFile
Explore with Pulumi AI
SourceFile Resource
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.airbyte.SourceFile;
import com.pulumi.airbyte.SourceFileArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderGcsGoogleCloudStorageArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderHttpsPublicWebArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderLocalFilesystemLimitedArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderS3AmazonWebServicesArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderScpSecureCopyProtocolArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs;
import com.pulumi.airbyte.inputs.SourceFileConfigurationProviderSshSecureShellArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var mySourceFile = new SourceFile("mySourceFile", SourceFileArgs.builder()
            .configuration(SourceFileConfigurationArgs.builder()
                .dataset_name("...my_dataset_name...")
                .format("csv")
                .provider(SourceFileConfigurationProviderArgs.builder()
                    .azBlobAzureBlobStorage(SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs.builder()
                        .sasToken("...my_sas_token...")
                        .sharedKey("...my_shared_key...")
                        .storageAccount("...my_storage_account...")
                        .build())
                    .gcsGoogleCloudStorage(SourceFileConfigurationProviderGcsGoogleCloudStorageArgs.builder()
                        .serviceAccountJson("...my_service_account_json...")
                        .build())
                    .httpsPublicWeb(SourceFileConfigurationProviderHttpsPublicWebArgs.builder()
                        .userAgent(false)
                        .build())
                    .localFilesystemLimited()
                    .s3AmazonWebServices(SourceFileConfigurationProviderS3AmazonWebServicesArgs.builder()
                        .awsAccessKeyId("...my_aws_access_key_id...")
                        .awsSecretAccessKey("...my_aws_secret_access_key...")
                        .build())
                    .scpSecureCopyProtocol(SourceFileConfigurationProviderScpSecureCopyProtocolArgs.builder()
                        .host("...my_host...")
                        .password("...my_password...")
                        .port("...my_port...")
                        .user("...my_user...")
                        .build())
                    .sftpSecureFileTransferProtocol(SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs.builder()
                        .host("...my_host...")
                        .password("...my_password...")
                        .port("...my_port...")
                        .user("...my_user...")
                        .build())
                    .sshSecureShell(SourceFileConfigurationProviderSshSecureShellArgs.builder()
                        .host("...my_host...")
                        .password("...my_password...")
                        .port("...my_port...")
                        .user("...my_user...")
                        .build())
                    .build())
                .reader_options("{}")
                .url("https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv")
                .build())
            .definitionId("a86f29c4-a6d3-472d-a3d8-9e8b8db9cd49")
            .secretId("...my_secret_id...")
            .workspaceId("6c152f5f-2668-4edb-bbeb-b6add70adfbc")
            .build());
    }
}
resources:
  mySourceFile:
    type: airbyte:SourceFile
    properties:
      configuration:
        dataset_name: '...my_dataset_name...'
        format: csv
        provider:
          azBlobAzureBlobStorage:
            sasToken: '...my_sas_token...'
            sharedKey: '...my_shared_key...'
            storageAccount: '...my_storage_account...'
          gcsGoogleCloudStorage:
            serviceAccountJson: '...my_service_account_json...'
          httpsPublicWeb:
            userAgent: false
          localFilesystemLimited: {}
          s3AmazonWebServices:
            awsAccessKeyId: '...my_aws_access_key_id...'
            awsSecretAccessKey: '...my_aws_secret_access_key...'
          scpSecureCopyProtocol:
            host: '...my_host...'
            password: '...my_password...'
            port: '...my_port...'
            user: '...my_user...'
          sftpSecureFileTransferProtocol:
            host: '...my_host...'
            password: '...my_password...'
            port: '...my_port...'
            user: '...my_user...'
          sshSecureShell:
            host: '...my_host...'
            password: '...my_password...'
            port: '...my_port...'
            user: '...my_user...'
        reader_options: '{}'
        url: https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv
      definitionId: a86f29c4-a6d3-472d-a3d8-9e8b8db9cd49
      secretId: '...my_secret_id...'
      workspaceId: 6c152f5f-2668-4edb-bbeb-b6add70adfbc
Create SourceFile Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new SourceFile(name: string, args: SourceFileArgs, opts?: CustomResourceOptions);@overload
def SourceFile(resource_name: str,
               args: SourceFileArgs,
               opts: Optional[ResourceOptions] = None)
@overload
def SourceFile(resource_name: str,
               opts: Optional[ResourceOptions] = None,
               configuration: Optional[SourceFileConfigurationArgs] = None,
               workspace_id: Optional[str] = None,
               definition_id: Optional[str] = None,
               name: Optional[str] = None,
               secret_id: Optional[str] = None)func NewSourceFile(ctx *Context, name string, args SourceFileArgs, opts ...ResourceOption) (*SourceFile, error)public SourceFile(string name, SourceFileArgs args, CustomResourceOptions? opts = null)
public SourceFile(String name, SourceFileArgs args)
public SourceFile(String name, SourceFileArgs args, CustomResourceOptions options)
type: airbyte:SourceFile
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SourceFileArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SourceFileArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SourceFileArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SourceFileArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SourceFileArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var sourceFileResource = new Airbyte.SourceFile("sourceFileResource", new()
{
    Configuration = new Airbyte.Inputs.SourceFileConfigurationArgs
    {
        DatasetName = "string",
        Provider = new Airbyte.Inputs.SourceFileConfigurationProviderArgs
        {
            AzBlobAzureBlobStorage = new Airbyte.Inputs.SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs
            {
                StorageAccount = "string",
                SasToken = "string",
                SharedKey = "string",
            },
            GcsGoogleCloudStorage = new Airbyte.Inputs.SourceFileConfigurationProviderGcsGoogleCloudStorageArgs
            {
                ServiceAccountJson = "string",
            },
            HttpsPublicWeb = new Airbyte.Inputs.SourceFileConfigurationProviderHttpsPublicWebArgs
            {
                UserAgent = false,
            },
            LocalFilesystemLimited = null,
            S3AmazonWebServices = new Airbyte.Inputs.SourceFileConfigurationProviderS3AmazonWebServicesArgs
            {
                AwsAccessKeyId = "string",
                AwsSecretAccessKey = "string",
            },
            ScpSecureCopyProtocol = new Airbyte.Inputs.SourceFileConfigurationProviderScpSecureCopyProtocolArgs
            {
                Host = "string",
                User = "string",
                Password = "string",
                Port = "string",
            },
            SftpSecureFileTransferProtocol = new Airbyte.Inputs.SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs
            {
                Host = "string",
                User = "string",
                Password = "string",
                Port = "string",
            },
            SshSecureShell = new Airbyte.Inputs.SourceFileConfigurationProviderSshSecureShellArgs
            {
                Host = "string",
                User = "string",
                Password = "string",
                Port = "string",
            },
        },
        Url = "string",
        Format = "string",
        ReaderOptions = "string",
    },
    WorkspaceId = "string",
    DefinitionId = "string",
    Name = "string",
    SecretId = "string",
});
example, err := airbyte.NewSourceFile(ctx, "sourceFileResource", &airbyte.SourceFileArgs{
Configuration: &.SourceFileConfigurationArgs{
DatasetName: pulumi.String("string"),
Provider: &.SourceFileConfigurationProviderArgs{
AzBlobAzureBlobStorage: &.SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs{
StorageAccount: pulumi.String("string"),
SasToken: pulumi.String("string"),
SharedKey: pulumi.String("string"),
},
GcsGoogleCloudStorage: &.SourceFileConfigurationProviderGcsGoogleCloudStorageArgs{
ServiceAccountJson: pulumi.String("string"),
},
HttpsPublicWeb: &.SourceFileConfigurationProviderHttpsPublicWebArgs{
UserAgent: pulumi.Bool(false),
},
LocalFilesystemLimited: &.SourceFileConfigurationProviderLocalFilesystemLimitedArgs{
},
S3AmazonWebServices: &.SourceFileConfigurationProviderS3AmazonWebServicesArgs{
AwsAccessKeyId: pulumi.String("string"),
AwsSecretAccessKey: pulumi.String("string"),
},
ScpSecureCopyProtocol: &.SourceFileConfigurationProviderScpSecureCopyProtocolArgs{
Host: pulumi.String("string"),
User: pulumi.String("string"),
Password: pulumi.String("string"),
Port: pulumi.String("string"),
},
SftpSecureFileTransferProtocol: &.SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs{
Host: pulumi.String("string"),
User: pulumi.String("string"),
Password: pulumi.String("string"),
Port: pulumi.String("string"),
},
SshSecureShell: &.SourceFileConfigurationProviderSshSecureShellArgs{
Host: pulumi.String("string"),
User: pulumi.String("string"),
Password: pulumi.String("string"),
Port: pulumi.String("string"),
},
},
Url: pulumi.String("string"),
Format: pulumi.String("string"),
ReaderOptions: pulumi.String("string"),
},
WorkspaceId: pulumi.String("string"),
DefinitionId: pulumi.String("string"),
Name: pulumi.String("string"),
SecretId: pulumi.String("string"),
})
var sourceFileResource = new SourceFile("sourceFileResource", SourceFileArgs.builder()
    .configuration(SourceFileConfigurationArgs.builder()
        .datasetName("string")
        .provider(SourceFileConfigurationProviderArgs.builder()
            .azBlobAzureBlobStorage(SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs.builder()
                .storageAccount("string")
                .sasToken("string")
                .sharedKey("string")
                .build())
            .gcsGoogleCloudStorage(SourceFileConfigurationProviderGcsGoogleCloudStorageArgs.builder()
                .serviceAccountJson("string")
                .build())
            .httpsPublicWeb(SourceFileConfigurationProviderHttpsPublicWebArgs.builder()
                .userAgent(false)
                .build())
            .localFilesystemLimited()
            .s3AmazonWebServices(SourceFileConfigurationProviderS3AmazonWebServicesArgs.builder()
                .awsAccessKeyId("string")
                .awsSecretAccessKey("string")
                .build())
            .scpSecureCopyProtocol(SourceFileConfigurationProviderScpSecureCopyProtocolArgs.builder()
                .host("string")
                .user("string")
                .password("string")
                .port("string")
                .build())
            .sftpSecureFileTransferProtocol(SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs.builder()
                .host("string")
                .user("string")
                .password("string")
                .port("string")
                .build())
            .sshSecureShell(SourceFileConfigurationProviderSshSecureShellArgs.builder()
                .host("string")
                .user("string")
                .password("string")
                .port("string")
                .build())
            .build())
        .url("string")
        .format("string")
        .readerOptions("string")
        .build())
    .workspaceId("string")
    .definitionId("string")
    .name("string")
    .secretId("string")
    .build());
source_file_resource = airbyte.SourceFile("sourceFileResource",
    configuration={
        "dataset_name": "string",
        "provider": {
            "az_blob_azure_blob_storage": {
                "storage_account": "string",
                "sas_token": "string",
                "shared_key": "string",
            },
            "gcs_google_cloud_storage": {
                "service_account_json": "string",
            },
            "https_public_web": {
                "user_agent": False,
            },
            "local_filesystem_limited": {},
            "s3_amazon_web_services": {
                "aws_access_key_id": "string",
                "aws_secret_access_key": "string",
            },
            "scp_secure_copy_protocol": {
                "host": "string",
                "user": "string",
                "password": "string",
                "port": "string",
            },
            "sftp_secure_file_transfer_protocol": {
                "host": "string",
                "user": "string",
                "password": "string",
                "port": "string",
            },
            "ssh_secure_shell": {
                "host": "string",
                "user": "string",
                "password": "string",
                "port": "string",
            },
        },
        "url": "string",
        "format": "string",
        "reader_options": "string",
    },
    workspace_id="string",
    definition_id="string",
    name="string",
    secret_id="string")
const sourceFileResource = new airbyte.SourceFile("sourceFileResource", {
    configuration: {
        datasetName: "string",
        provider: {
            azBlobAzureBlobStorage: {
                storageAccount: "string",
                sasToken: "string",
                sharedKey: "string",
            },
            gcsGoogleCloudStorage: {
                serviceAccountJson: "string",
            },
            httpsPublicWeb: {
                userAgent: false,
            },
            localFilesystemLimited: {},
            s3AmazonWebServices: {
                awsAccessKeyId: "string",
                awsSecretAccessKey: "string",
            },
            scpSecureCopyProtocol: {
                host: "string",
                user: "string",
                password: "string",
                port: "string",
            },
            sftpSecureFileTransferProtocol: {
                host: "string",
                user: "string",
                password: "string",
                port: "string",
            },
            sshSecureShell: {
                host: "string",
                user: "string",
                password: "string",
                port: "string",
            },
        },
        url: "string",
        format: "string",
        readerOptions: "string",
    },
    workspaceId: "string",
    definitionId: "string",
    name: "string",
    secretId: "string",
});
type: airbyte:SourceFile
properties:
    configuration:
        datasetName: string
        format: string
        provider:
            azBlobAzureBlobStorage:
                sasToken: string
                sharedKey: string
                storageAccount: string
            gcsGoogleCloudStorage:
                serviceAccountJson: string
            httpsPublicWeb:
                userAgent: false
            localFilesystemLimited: {}
            s3AmazonWebServices:
                awsAccessKeyId: string
                awsSecretAccessKey: string
            scpSecureCopyProtocol:
                host: string
                password: string
                port: string
                user: string
            sftpSecureFileTransferProtocol:
                host: string
                password: string
                port: string
                user: string
            sshSecureShell:
                host: string
                password: string
                port: string
                user: string
        readerOptions: string
        url: string
    definitionId: string
    name: string
    secretId: string
    workspaceId: string
SourceFile Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The SourceFile resource accepts the following input properties:
- Configuration
SourceFile Configuration 
- WorkspaceId string
- DefinitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- SecretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- Configuration
SourceFile Configuration Args 
- WorkspaceId string
- DefinitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- SecretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
SourceFile Configuration 
- workspaceId String
- definitionId String
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secretId String
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
SourceFile Configuration 
- workspaceId string
- definitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the source e.g. dev-mysql-instance.
- secretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration
SourceFile Configuration Args 
- workspace_id str
- definition_id str
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the source e.g. dev-mysql-instance.
- secret_id str
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- configuration Property Map
- workspaceId String
- definitionId String
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secretId String
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
Outputs
All input properties are implicitly available as output properties. Additionally, the SourceFile resource produces the following output properties:
- CreatedAt double
- Id string
- The provider-assigned unique ID for this managed resource.
- SourceId string
- SourceType string
- CreatedAt float64
- Id string
- The provider-assigned unique ID for this managed resource.
- SourceId string
- SourceType string
- createdAt Double
- id String
- The provider-assigned unique ID for this managed resource.
- sourceId String
- sourceType String
- createdAt number
- id string
- The provider-assigned unique ID for this managed resource.
- sourceId string
- sourceType string
- created_at float
- id str
- The provider-assigned unique ID for this managed resource.
- source_id str
- source_type str
- createdAt Number
- id String
- The provider-assigned unique ID for this managed resource.
- sourceId String
- sourceType String
Look up Existing SourceFile Resource
Get an existing SourceFile resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SourceFileState, opts?: CustomResourceOptions): SourceFile@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        configuration: Optional[SourceFileConfigurationArgs] = None,
        created_at: Optional[float] = None,
        definition_id: Optional[str] = None,
        name: Optional[str] = None,
        secret_id: Optional[str] = None,
        source_id: Optional[str] = None,
        source_type: Optional[str] = None,
        workspace_id: Optional[str] = None) -> SourceFilefunc GetSourceFile(ctx *Context, name string, id IDInput, state *SourceFileState, opts ...ResourceOption) (*SourceFile, error)public static SourceFile Get(string name, Input<string> id, SourceFileState? state, CustomResourceOptions? opts = null)public static SourceFile get(String name, Output<String> id, SourceFileState state, CustomResourceOptions options)resources:  _:    type: airbyte:SourceFile    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
SourceFile Configuration 
- CreatedAt double
- DefinitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- SecretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- SourceId string
- SourceType string
- WorkspaceId string
- Configuration
SourceFile Configuration Args 
- CreatedAt float64
- DefinitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- Name string
- Name of the source e.g. dev-mysql-instance.
- SecretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- SourceId string
- SourceType string
- WorkspaceId string
- configuration
SourceFile Configuration 
- createdAt Double
- definitionId String
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secretId String
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- sourceId String
- sourceType String
- workspaceId String
- configuration
SourceFile Configuration 
- createdAt number
- definitionId string
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name string
- Name of the source e.g. dev-mysql-instance.
- secretId string
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- sourceId string
- sourceType string
- workspaceId string
- configuration
SourceFile Configuration Args 
- created_at float
- definition_id str
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name str
- Name of the source e.g. dev-mysql-instance.
- secret_id str
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- source_id str
- source_type str
- workspace_id str
- configuration Property Map
- createdAt Number
- definitionId String
- The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. Requires replacement if changed.
- name String
- Name of the source e.g. dev-mysql-instance.
- secretId String
- Optional secretID obtained through the public API OAuth redirect flow. Requires replacement if changed.
- sourceId String
- sourceType String
- workspaceId String
Supporting Types
SourceFileConfiguration, SourceFileConfigurationArgs      
- DatasetName string
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- Provider
SourceFile Configuration Provider 
- The storage Provider or Location of the file(s) which should be replicated.
- Url string
- The URL path to access the file which should be replicated.
- Format string
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- ReaderOptions string
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- DatasetName string
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- Provider
SourceFile Configuration Provider 
- The storage Provider or Location of the file(s) which should be replicated.
- Url string
- The URL path to access the file which should be replicated.
- Format string
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- ReaderOptions string
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- datasetName String
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- provider
SourceFile Configuration Provider 
- The storage Provider or Location of the file(s) which should be replicated.
- url String
- The URL path to access the file which should be replicated.
- format String
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- readerOptions String
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- datasetName string
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- provider
SourceFile Configuration Provider 
- The storage Provider or Location of the file(s) which should be replicated.
- url string
- The URL path to access the file which should be replicated.
- format string
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- readerOptions string
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- dataset_name str
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- provider
SourceFile Configuration Provider 
- The storage Provider or Location of the file(s) which should be replicated.
- url str
- The URL path to access the file which should be replicated.
- format str
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- reader_options str
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
- datasetName String
- The Name of the final table to replicate this file into (should include letters, numbers dash and underscores only).
- provider Property Map
- The storage Provider or Location of the file(s) which should be replicated.
- url String
- The URL path to access the file which should be replicated.
- format String
- The Format of the file which should be replicated (Warning: some formats may be experimental, please refer to the docs). Default: "csv"; must be one of ["csv", "json", "jsonl", "excel", "excel_binary", "fwf", "feather", "parquet", "yaml"]
- readerOptions String
- This should be a string in JSON format. It depends on the chosen file format to provide additional options and tune its behavior.
SourceFileConfigurationProvider, SourceFileConfigurationProviderArgs        
- AzBlob SourceAzure Blob Storage File Configuration Provider Az Blob Azure Blob Storage 
- GcsGoogle SourceCloud Storage File Configuration Provider Gcs Google Cloud Storage 
- HttpsPublic SourceWeb File Configuration Provider Https Public Web 
- LocalFilesystem SourceLimited File Configuration Provider Local Filesystem Limited 
- S3AmazonWeb SourceServices File Configuration Provider S3Amazon Web Services 
- ScpSecure SourceCopy Protocol File Configuration Provider Scp Secure Copy Protocol 
- SftpSecure SourceFile Transfer Protocol File Configuration Provider Sftp Secure File Transfer Protocol 
- SshSecure SourceShell File Configuration Provider Ssh Secure Shell 
- AzBlob SourceAzure Blob Storage File Configuration Provider Az Blob Azure Blob Storage 
- GcsGoogle SourceCloud Storage File Configuration Provider Gcs Google Cloud Storage 
- HttpsPublic SourceWeb File Configuration Provider Https Public Web 
- LocalFilesystem SourceLimited File Configuration Provider Local Filesystem Limited 
- S3AmazonWeb SourceServices File Configuration Provider S3Amazon Web Services 
- ScpSecure SourceCopy Protocol File Configuration Provider Scp Secure Copy Protocol 
- SftpSecure SourceFile Transfer Protocol File Configuration Provider Sftp Secure File Transfer Protocol 
- SshSecure SourceShell File Configuration Provider Ssh Secure Shell 
- azBlob SourceAzure Blob Storage File Configuration Provider Az Blob Azure Blob Storage 
- gcsGoogle SourceCloud Storage File Configuration Provider Gcs Google Cloud Storage 
- httpsPublic SourceWeb File Configuration Provider Https Public Web 
- localFilesystem SourceLimited File Configuration Provider Local Filesystem Limited 
- s3AmazonWeb SourceServices File Configuration Provider S3Amazon Web Services 
- scpSecure SourceCopy Protocol File Configuration Provider Scp Secure Copy Protocol 
- sftpSecure SourceFile Transfer Protocol File Configuration Provider Sftp Secure File Transfer Protocol 
- sshSecure SourceShell File Configuration Provider Ssh Secure Shell 
- azBlob SourceAzure Blob Storage File Configuration Provider Az Blob Azure Blob Storage 
- gcsGoogle SourceCloud Storage File Configuration Provider Gcs Google Cloud Storage 
- httpsPublic SourceWeb File Configuration Provider Https Public Web 
- localFilesystem SourceLimited File Configuration Provider Local Filesystem Limited 
- s3AmazonWeb SourceServices File Configuration Provider S3Amazon Web Services 
- scpSecure SourceCopy Protocol File Configuration Provider Scp Secure Copy Protocol 
- sftpSecure SourceFile Transfer Protocol File Configuration Provider Sftp Secure File Transfer Protocol 
- sshSecure SourceShell File Configuration Provider Ssh Secure Shell 
- az_blob_ Sourceazure_ blob_ storage File Configuration Provider Az Blob Azure Blob Storage 
- gcs_google_ Sourcecloud_ storage File Configuration Provider Gcs Google Cloud Storage 
- https_public_ Sourceweb File Configuration Provider Https Public Web 
- local_filesystem_ Sourcelimited File Configuration Provider Local Filesystem Limited 
- s3_amazon_ Sourceweb_ services File Configuration Provider S3Amazon Web Services 
- scp_secure_ Sourcecopy_ protocol File Configuration Provider Scp Secure Copy Protocol 
- sftp_secure_ Sourcefile_ transfer_ protocol File Configuration Provider Sftp Secure File Transfer Protocol 
- ssh_secure_ Sourceshell File Configuration Provider Ssh Secure Shell 
SourceFileConfigurationProviderAzBlobAzureBlobStorage, SourceFileConfigurationProviderAzBlobAzureBlobStorageArgs                  
- StorageAccount string
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- SasToken string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- StorageAccount string
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- SasToken string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- storageAccount String
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- sasToken String
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- String
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- storageAccount string
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- sasToken string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- string
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- storage_account str
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- sas_token str
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- str
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
- storageAccount String
- The globally unique name of the storage account that the desired blob sits within. See \n\nhere\n\n for more details.
- sasToken String
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a SAS (Shared Access Signature) token. If accessing publicly available data, this field is not necessary.
- String
- To access Azure Blob Storage, this connector would need credentials with the proper permissions. One option is a storage account shared key (aka account key or access key). If accessing publicly available data, this field is not necessary.
SourceFileConfigurationProviderGcsGoogleCloudStorage, SourceFileConfigurationProviderGcsGoogleCloudStorageArgs                
- ServiceAccount stringJson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- ServiceAccount stringJson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- serviceAccount StringJson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- serviceAccount stringJson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- service_account_ strjson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
- serviceAccount StringJson 
- In order to access private Buckets stored on Google Cloud, this connector would need a service account json credentials with the proper permissions as described \n\nhere\n\n. Please generate the credentials.json file and copy/paste its content to this field (expecting JSON formats). If accessing publicly available data, this field is not necessary.
SourceFileConfigurationProviderHttpsPublicWeb, SourceFileConfigurationProviderHttpsPublicWebArgs              
- UserAgent bool
- Add User-Agent to request. Default: false
- UserAgent bool
- Add User-Agent to request. Default: false
- userAgent Boolean
- Add User-Agent to request. Default: false
- userAgent boolean
- Add User-Agent to request. Default: false
- user_agent bool
- Add User-Agent to request. Default: false
- userAgent Boolean
- Add User-Agent to request. Default: false
SourceFileConfigurationProviderS3AmazonWebServices, SourceFileConfigurationProviderS3AmazonWebServicesArgs              
- AwsAccess stringKey Id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsSecret stringAccess Key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsAccess stringKey Id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- AwsSecret stringAccess Key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsAccess StringKey Id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsSecret StringAccess Key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsAccess stringKey Id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsSecret stringAccess Key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- aws_access_ strkey_ id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- aws_secret_ straccess_ key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsAccess StringKey Id 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
- awsSecret StringAccess Key 
- In order to access private Buckets stored on AWS S3, this connector would need credentials with the proper permissions. If accessing publicly available data, this field is not necessary.
SourceFileConfigurationProviderScpSecureCopyProtocol, SourceFileConfigurationProviderScpSecureCopyProtocolArgs                
SourceFileConfigurationProviderSftpSecureFileTransferProtocol, SourceFileConfigurationProviderSftpSecureFileTransferProtocolArgs                  
SourceFileConfigurationProviderSshSecureShell, SourceFileConfigurationProviderSshSecureShellArgs              
Import
$ pulumi import airbyte:index/sourceFile:SourceFile my_airbyte_source_file ""
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- airbyte airbytehq/terraform-provider-airbyte
- License
- Notes
- This Pulumi package is based on the airbyteTerraform Provider.