From 489a530489a59b2cfcac751ea4df437284d222d3 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 10 Dec 2021 16:09:04 -0800 Subject: [PATCH 1/5] bump versions --- airbyte-integrations/connectors/destination-redshift/Dockerfile | 2 +- airbyte-integrations/connectors/destination-s3/Dockerfile | 2 +- .../connectors/destination-snowflake/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte-integrations/connectors/destination-redshift/Dockerfile b/airbyte-integrations/connectors/destination-redshift/Dockerfile index 8d5274dbdfbaa..bb5be9d62c0a9 100644 --- a/airbyte-integrations/connectors/destination-redshift/Dockerfile +++ b/airbyte-integrations/connectors/destination-redshift/Dockerfile @@ -8,5 +8,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.3.20 +LABEL io.airbyte.version=0.3.21 LABEL io.airbyte.name=airbyte/destination-redshift diff --git a/airbyte-integrations/connectors/destination-s3/Dockerfile b/airbyte-integrations/connectors/destination-s3/Dockerfile index ef60785987f47..e68bba6018a1f 100644 --- a/airbyte-integrations/connectors/destination-s3/Dockerfile +++ b/airbyte-integrations/connectors/destination-s3/Dockerfile @@ -7,5 +7,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.1.15 +LABEL io.airbyte.version=0.1.16 LABEL io.airbyte.name=airbyte/destination-s3 diff --git a/airbyte-integrations/connectors/destination-snowflake/Dockerfile b/airbyte-integrations/connectors/destination-snowflake/Dockerfile index 5fe4592ffb1f2..827233b3d5fde 100644 --- a/airbyte-integrations/connectors/destination-snowflake/Dockerfile +++ b/airbyte-integrations/connectors/destination-snowflake/Dockerfile @@ -18,5 +18,5 @@ COPY build/distributions/${APPLICATION}*.tar ${APPLICATION}.tar RUN tar xf ${APPLICATION}.tar --strip-components=1 -LABEL io.airbyte.version=0.3.19 +LABEL io.airbyte.version=0.3.20 LABEL io.airbyte.name=airbyte/destination-snowflake From 154e1e672fdbe37208e7f80e6128a7f1d27f57d6 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 10 Dec 2021 16:19:29 -0800 Subject: [PATCH 2/5] update changelogs --- docs/integrations/destinations/redshift.md | 5 +++-- docs/integrations/destinations/s3.md | 3 ++- docs/integrations/destinations/snowflake.md | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/integrations/destinations/redshift.md b/docs/integrations/destinations/redshift.md index 60b22043c70ee..36b7d3cab0676 100644 --- a/docs/integrations/destinations/redshift.md +++ b/docs/integrations/destinations/redshift.md @@ -6,7 +6,7 @@ The Airbyte Redshift destination allows you to sync data to Redshift. This Redshift destination connector has two replication strategies: -1. INSERT: Replicates data via SQL INSERT queries. This is built on top of the destination-jdbc code base and is configured to rely on JDBC 4.2 standard drivers provided by Amazon via Mulesoft [here](https://mvnrepository.com/artifact/com.amazon.redshift/redshift-jdbc42) as described in Redshift documentation [here](https://docs.aws.amazon.com/redshift/latest/mgmt/jdbc20-install.html). **Not recommended for production workloads as this does not scale well**. +1. INSERT: Replicates data via SQL INSERT queries. This is built on top of the destination-jdbc code base and is configured to rely on JDBC 4.2 standard drivers provided by Amazon via Mulesoft [here](https://mvnrepository.com/artifact/com.amazon.redshift/redshift-jdbc42) as described in Redshift documentation [here](https://docs.aws.amazon.com/redshift/latest/mgmt/jdbc20-install.html). **Not recommended for production workloads as this does not scale well**. 2. COPY: Replicates data by first uploading data to an S3 bucket and issuing a COPY command. This is the recommended loading approach described by Redshift [best practices](https://docs.aws.amazon.com/redshift/latest/dg/c_loading-data-best-practices.html). Requires an S3 bucket and credentials. Airbyte automatically picks an approach depending on the given configuration - if S3 configuration is present, Airbyte will use the COPY strategy and vice versa. @@ -79,7 +79,7 @@ Provide the required S3 info. * Place the S3 bucket and the Redshift cluster in the same region to save on networking costs. * **Access Key Id** * See [this](https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html#access-keys-and-secret-access-keys) on how to generate an access key. - * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. + * We recommend creating an Airbyte-specific user. This user will require [read and write permissions](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_policies_examples_s3_rw-bucket.html) to objects in the staging bucket. * **Secret Access Key** * Corresponding key to the above key id. * **Part Size** @@ -118,6 +118,7 @@ All Redshift connections are encrypted using SSL | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.3.21 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management | | 0.3.20 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | | 0.3.19 | 2021-10-21 | [7234](https://github.com/airbytehq/airbyte/pull/7234) | Allow SSL traffic only | | 0.3.17 | 2021-10-12 | [6965](https://github.com/airbytehq/airbyte/pull/6965) | Added SSL Support | diff --git a/docs/integrations/destinations/s3.md b/docs/integrations/destinations/s3.md index baf37448d5477..e23b6777c3633 100644 --- a/docs/integrations/destinations/s3.md +++ b/docs/integrations/destinations/s3.md @@ -223,7 +223,8 @@ Under the hood, an Airbyte data stream in Json schema is first converted to an A | Version | Date | Pull Request | Subject | | :--- | :--- | :--- | :--- | -| 0.1.15 | 2021-12-03 | [\#9999](https://github.com/airbytehq/airbyte/pull/9999) | Remove excessive logging for Avro and Parquet invalid date strings. | +| 0.1.16 | 2021-12-10 | [\#8562](https://github.com/airbytehq/airbyte/pull/8562) | Swap dependencies with destination-jdbc. | +| 0.1.15 | 2021-12-03 | [\#8501](https://github.com/airbytehq/airbyte/pull/8501) | Remove excessive logging for Avro and Parquet invalid date strings. | | 0.1.14 | 2021-11-09 | [\#7732](https://github.com/airbytehq/airbyte/pull/7732) | Support timestamp in Avro and Parquet | | 0.1.13 | 2021-11-03 | [\#7288](https://github.com/airbytehq/airbyte/issues/7288) | Support Json `additionalProperties`. | | 0.1.12 | 2021-09-13 | [\#5720](https://github.com/airbytehq/airbyte/issues/5720) | Added configurable block size for stream. Each stream is limited to 10,000 by S3 | diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index 0329ba751a382..b84bbc84f841d 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -162,7 +162,7 @@ First you will need to create a GCS bucket. Then you will need to run the script below: -* You must run the script as the account admin for Snowflake. +* You must run the script as the account admin for Snowflake. * You should replace `AIRBYTE_ROLE` with the role you used for Airbyte's Snowflake configuration. * Replace `YOURBUCKETNAME` with your bucket name * The stage name can be modified to any valid name. @@ -194,6 +194,7 @@ Finally, you need to add read/write permissions to your bucket with that email. | Version | Date | Pull Request | Subject | | :------ | :-------- | :----- | :------ | +| 0.3.20 | 2021-12-10 | [#8562](https://github.com/airbytehq/airbyte/pull/8562) | Moving classes around for better dependency management; compatibility fix for Java 17 | | 0.3.19 | 2021-12-06 | [#8528](https://github.com/airbytehq/airbyte/pull/8528) | Set Internal Staging as default choice | | 0.3.18 | 2021-11-26 | [#8253](https://github.com/airbytehq/airbyte/pull/8253) | Snowflake Internal Staging Support | | 0.3.17 | 2021-11-08 | [#7719](https://github.com/airbytehq/airbyte/pull/7719) | Improve handling of wide rows by buffering records based on their byte size rather than their count | From 0406787f0edb395d7aa8b2b9adc78ebe1b645b2b Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 10 Dec 2021 16:22:00 -0800 Subject: [PATCH 3/5] update connector index --- .../src/main/resources/seed/destination_definitions.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml index 571c9e910d1ab..6cbfba046e5f1 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_definitions.yaml @@ -139,13 +139,13 @@ - name: Redshift destinationDefinitionId: f7a7d195-377f-cf5b-70a5-be6b819019dc dockerRepository: airbyte/destination-redshift - dockerImageTag: 0.3.20 + dockerImageTag: 0.3.21 documentationUrl: https://docs.airbyte.io/integrations/destinations/redshift icon: redshift.svg - name: S3 destinationDefinitionId: 4816b78f-1489-44c1-9060-4b19d5fa9362 dockerRepository: airbyte/destination-s3 - dockerImageTag: 0.1.15 + dockerImageTag: 0.1.16 documentationUrl: https://docs.airbyte.io/integrations/destinations/s3 icon: s3.svg - name: SFTP-JSON @@ -157,7 +157,7 @@ - name: Snowflake destinationDefinitionId: 424892c4-daac-4491-b35d-c6688ba547ba dockerRepository: airbyte/destination-snowflake - dockerImageTag: 0.3.19 + dockerImageTag: 0.3.20 documentationUrl: https://docs.airbyte.io/integrations/destinations/snowflake icon: snowflake.svg - name: MariaDB ColumnStore From ce9ed3e05287a5b3668545dee666ab247a7a0645 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 10 Dec 2021 18:49:47 -0800 Subject: [PATCH 4/5] regenerate seed --- .../resources/seed/destination_specs.yaml | 4349 +++++++++-------- 1 file changed, 2259 insertions(+), 2090 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index 2ea15ed073df1..d73cc400d4693 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -9,75 +9,78 @@ title: "AzureBlobStorage Destination Spec" type: "object" required: - - "azure_blob_storage_account_name" - - "azure_blob_storage_account_key" - - "format" + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" additionalProperties: false properties: azure_blob_storage_endpoint_domain_name: title: "Endpoint Domain Name" type: "string" default: "blob.core.windows.net" - description: "This is Azure Blob Storage endpoint domain name. Leave default\ + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ \ value (or leave it empty if run container from command line) to use\ \ Microsoft native from example." examples: - - "blob.core.windows.net" + - "blob.core.windows.net" azure_blob_storage_container_name: title: "Azure blob storage container (Bucket) Name" type: "string" - description: "The name of the Azure blob storage container. If not exists\ + description: + "The name of the Azure blob storage container. If not exists\ \ - will be created automatically. May be empty, then will be created\ \ automatically airbytecontainer+timestamp" examples: - - "airbytetescontainername" + - "airbytetescontainername" azure_blob_storage_account_name: title: "Azure Blob Storage account name" type: "string" description: "The account's name of the Azure Blob Storage." examples: - - "airbyte5storage" + - "airbyte5storage" azure_blob_storage_account_key: description: "The Azure blob storage account key." airbyte_secret: true type: "string" examples: - - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - const: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - const: "JSONL" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-amazon-sqs:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/amazon-sqs" @@ -86,8 +89,8 @@ title: "Destination Amazon Sqs" type: "object" required: - - "queue_url" - - "region" + - "queue_url" + - "region" additionalProperties: false properties: queue_url: @@ -95,54 +98,56 @@ description: "URL of the SQS Queue" type: "string" examples: - - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" order: 0 region: title: "AWS Region" description: "AWS Region of the SQS Queue" type: "string" enum: - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" order: 1 message_delay: title: "Message Delay" - description: "Modify the Message Delay of the individual message from the\ + description: + "Modify the Message Delay of the individual message from the\ \ Queue's default (seconds)." type: "integer" examples: - - "15" + - "15" order: 2 access_key: title: "AWS IAM Access Key ID" - description: "The Access Key ID of the AWS IAM Role to use for sending \ + description: + "The Access Key ID of the AWS IAM Role to use for sending \ \ messages" type: "string" examples: - - "xxxxxHRNxxx3TBxxxxxx" + - "xxxxxHRNxxx3TBxxxxxx" order: 3 airbyte_secret: true secret_key: @@ -150,32 +155,34 @@ description: "The Secret Key of the AWS IAM Role to use for sending messages" type: "string" examples: - - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" order: 4 airbyte_secret: true message_body_key: title: "Message Body Key" - description: "Use this property to extract the contents of the named key\ + description: + "Use this property to extract the contents of the named key\ \ in the input record to use as the SQS message body. If not set, the\ \ entire content of the input record data is used as the message body." type: "string" examples: - - "myDataPath" + - "myDataPath" order: 5 message_group_id: title: "Message Group Id" - description: "The tag that specifies that a message belongs to a specific\ + description: + "The tag that specifies that a message belongs to a specific\ \ message group. This parameter applies only to, and is REQUIRED by, FIFO\ \ queues." type: "string" examples: - - "my-fifo-group" + - "my-fifo-group" order: 6 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-bigquery:0.5.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -184,13 +191,14 @@ title: "BigQuery Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: big_query_client_buffer_size_mb: title: "Google BigQuery client chunk size" - description: "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ + description: + "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ \ 15) for each table. The default 15MiB value is used if not set explicitly.\ \ It's recommended to decrease value for big data sets migration for less\ \ HEAP memory consumption and avoiding crashes. For more details refer\ @@ -200,56 +208,60 @@ maximum: 15 default: 15 examples: - - "15" + - "15" project_id: type: "string" - description: "The GCP project ID for the project containing the target BigQuery\ + description: + "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: "Default BigQuery Dataset ID tables are replicated to if the\ + description: + "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: "The location of the dataset. Warning: Changes made after creation\ + description: + "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: "The contents of the JSON service account key. Check out the\ + description: + "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -257,101 +269,107 @@ airbyte_secret: true transformation_priority: type: "string" - description: "When running custom transformations or Basic normalization,\ + description: + "When running custom transformations or Basic normalization,\ \ running queries on interactive mode can hit BQ limits, choosing batch\ \ will solve those limitss." title: "Transformation Query Run Type" default: "interactive" enum: - - "interactive" - - "batch" + - "interactive" + - "batch" loading_method: type: "object" title: "Loading Method" - description: "Loading method used to send select the way data will be uploaded\ + description: + "Loading method used to send select the way data will be uploaded\ \ to BigQuery." oneOf: - - title: "Standard Inserts" - additionalProperties: false - description: "Direct uploading using streams." - required: - - "method" - properties: - method: - type: "string" - const: "Standard" - - title: "GCS Staging" - additionalProperties: false - description: "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "gcs_bucket_name" - - "gcs_bucket_path" - - "credential" - properties: - method: - type: "string" - const: "GCS Staging" - gcs_bucket_name: - title: "GCS Bucket Name" - type: "string" - description: "The name of the GCS bucket." - examples: - - "airbyte_sync" - gcs_bucket_path: - description: "Directory under the GCS bucket where data will be written." - type: "string" - examples: - - "data_sync/test" - keep_files_in_gcs-bucket: - type: "string" - description: "This upload method is supposed to temporary store records\ - \ in GCS bucket. What do you want to do with data in GCS bucket\ - \ when migration has finished?" - title: "GCS tmp files afterward processing" - default: "Delete all tmp files from GCS" - enum: - - "Delete all tmp files from GCS" - - "Keep all tmp files in GCS" - credential: - title: "Credential" - type: "object" - oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - const: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: "The corresponding secret for the access ID. It\ - \ is a 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "Standard Inserts" + additionalProperties: false + description: "Direct uploading using streams." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + - title: "GCS Staging" + additionalProperties: false + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the GCS bucket." + examples: + - "airbyte_sync" + gcs_bucket_path: + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. What do you want to do with data in GCS bucket\ + \ when migration has finished?" + title: "GCS tmp files afterward processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + credential: + title: "Credential" + type: "object" + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-bigquery-denormalized:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -360,59 +378,63 @@ title: "BigQuery Denormalized Typed Struct Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: project_id: type: "string" - description: "The GCP project ID for the project containing the target BigQuery\ + description: + "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: "Default BigQuery Dataset ID tables are replicated to if the\ + description: + "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: "The location of the dataset. Warning: Changes made after creation\ + description: + "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: "The contents of the JSON service account key. Check out the\ + description: + "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -422,8 +444,8 @@ supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-cassandra:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/cassandra" @@ -432,11 +454,11 @@ title: "Cassandra Destination Spec" type: "object" required: - - "keyspace" - - "username" - - "password" - - "address" - - "port" + - "keyspace" + - "username" + - "password" + - "address" + - "port" additionalProperties: true properties: keyspace: @@ -460,7 +482,7 @@ description: "Address to connect to." type: "string" examples: - - "localhost,127.0.0.1" + - "localhost,127.0.0.1" order: 3 port: title: "Port" @@ -479,7 +501,8 @@ replication: title: "Replication factor" type: "integer" - description: "Indicates to how many nodes the data should be replicated\ + description: + "Indicates to how many nodes the data should be replicated\ \ to." default: 1 order: 6 @@ -487,8 +510,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-keen:0.2.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen" @@ -497,25 +520,26 @@ title: "Keen Spec" type: "object" required: - - "project_id" - - "api_key" + - "project_id" + - "api_key" additionalProperties: false properties: project_id: description: "Keen Project ID" type: "string" examples: - - "58b4acc22ba938934e888322e" + - "58b4acc22ba938934e888322e" api_key: title: "API Key" description: "Keen Master API key" type: "string" examples: - - "ABCDEFGHIJKLMNOPRSTUWXYZ" + - "ABCDEFGHIJKLMNOPRSTUWXYZ" airbyte_secret: true infer_timestamp: title: "Infer Timestamp" - description: "Allow connector to guess keen.timestamp value based on the\ + description: + "Allow connector to guess keen.timestamp value based on the\ \ streamed data" type: "boolean" default: true @@ -523,8 +547,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-dynamodb:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb" @@ -533,79 +557,81 @@ title: "DynamoDB Destination Spec" type: "object" required: - - "dynamodb_table_name" - - "dynamodb_region" - - "access_key_id" - - "secret_access_key" + - "dynamodb_table_name" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" additionalProperties: false properties: dynamodb_endpoint: title: "Endpoint" type: "string" default: "" - description: "This is your DynamoDB endpoint url.(if you are working with\ + description: + "This is your DynamoDB endpoint url.(if you are working with\ \ AWS DynamoDB, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" dynamodb_table_name: title: "DynamoDB Table Name" type: "string" description: "The name of the DynamoDB table." examples: - - "airbyte_sync" + - "airbyte_sync" dynamodb_region: title: "DynamoDB Region" type: "string" default: "" description: "The region of the DynamoDB." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: "The access key id to access the DynamoDB. Airbyte requires\ + description: + "The access key id to access the DynamoDB. Airbyte requires\ \ Read and Write permissions to the DynamoDB." title: "DynamoDB Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "DynamoDB Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-elasticsearch:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/elasticsearch" @@ -614,7 +640,7 @@ title: "Elasticsearch Connection Configuration" type: "object" required: - - "endpoint" + - "endpoint" additionalProperties: false properties: endpoint: @@ -624,7 +650,8 @@ upsert: type: "boolean" title: "Upsert Records" - description: "If a primary key identifier is defined in the source, an upsert\ + description: + "If a primary key identifier is defined in the source, an upsert\ \ will be performed using the primary key value as the elasticsearch doc\ \ id. Does not support composite primary keys." default: true @@ -633,64 +660,67 @@ type: "object" description: "The type of authentication to be used" oneOf: - - title: "None" - additionalProperties: false - description: "No authentication will be used" - required: - - "method" - properties: - method: - type: "string" - const: "none" - - title: "Api Key/Secret" - additionalProperties: false - description: "Use a api key and secret combination to authenticate" - required: - - "method" - - "apiKeyId" - - "apiKeySecret" - properties: - method: - type: "string" - const: "secret" - apiKeyId: - title: "API Key ID" - description: "The Key ID to used when accessing an enterprise Elasticsearch\ - \ instance." - type: "string" - apiKeySecret: - title: "API Key Secret" - description: "The secret associated with the API Key ID." - type: "string" - airbyte_secret: true - - title: "Username/Password" - additionalProperties: false - description: "Basic auth header with a username and password" - required: - - "method" - - "username" - - "password" - properties: - method: - type: "string" - const: "basic" - username: - title: "Username" - description: "Basic auth username to access a secure Elasticsearch\ - \ server" - type: "string" - password: - title: "Password" - description: "Basic auth password to access a secure Elasticsearch\ - \ server" - type: "string" - airbyte_secret: true + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" supportsNamespaces: true - dockerImage: "airbyte/destination-gcs:0.1.15" spec: @@ -700,11 +730,11 @@ title: "GCS Destination Spec" type: "object" required: - - "gcs_bucket_name" - - "gcs_bucket_path" - - "gcs_bucket_region" - - "credential" - - "format" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "gcs_bucket_region" + - "credential" + - "format" additionalProperties: false properties: gcs_bucket_name: @@ -712,329 +742,343 @@ type: "string" description: "The name of the GCS bucket." examples: - - "airbyte_sync" + - "airbyte_sync" gcs_bucket_path: description: "Directory under the GCS bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" gcs_bucket_region: title: "GCS Bucket Region" type: "string" default: "" description: "The region of the GCS bucket." enum: - - "" - - "-- North America --" - - "northamerica-northeast1" - - "northamerica-northeast2" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west1" - - "us-west2" - - "us-west3" - - "us-west4" - - "-- South America --" - - "southamerica-east1" - - "southamerica-west1" - - "-- Europe --" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west6" - - "-- Asia --" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-south2" - - "asia-southeast1" - - "asia-southeast2" - - "-- Australia --" - - "australia-southeast1" - - "australia-southeast2" - - "-- Multi-regions --" - - "asia" - - "eu" - - "us" - - "-- Dual-regions --" - - "asia1" - - "eur4" - - "nam4" + - "" + - "-- North America --" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "-- South America --" + - "southamerica-east1" + - "southamerica-west1" + - "-- Europe --" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "-- Asia --" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "-- Australia --" + - "australia-southeast1" + - "australia-southeast2" + - "-- Multi-regions --" + - "asia" + - "eu" + - "us" + - "-- Dual-regions --" + - "asia1" + - "eur4" + - "nam4" credential: title: "Credential" type: "object" oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - enum: - - "HMAC_KEY" - default: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: "The corresponding secret for the access ID. It is a\ - \ 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" $schema: "http://json-schema.org/draft-07/schema#" - dockerImage: "airbyte/destination-pubsub:0.1.1" spec: @@ -1044,9 +1088,9 @@ title: "Google PubSub Destination Spec" type: "object" required: - - "project_id" - - "topic_id" - - "credentials_json" + - "project_id" + - "topic_id" + - "credentials_json" additionalProperties: true properties: project_id: @@ -1059,7 +1103,8 @@ title: "PubSub Topic ID" credentials_json: type: "string" - description: "The contents of the JSON service account key. Check out the\ + description: + "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key." title: "Credentials JSON" @@ -1068,7 +1113,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-kafka:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka" @@ -1077,31 +1122,32 @@ title: "Kafka Destination Spec" type: "object" required: - - "bootstrap_servers" - - "topic_pattern" - - "protocol" - - "acks" - - "enable_idempotence" - - "compression_type" - - "batch_size" - - "linger_ms" - - "max_in_flight_requests_per_connection" - - "client_dns_lookup" - - "buffer_memory" - - "max_request_size" - - "retries" - - "socket_connection_setup_timeout_ms" - - "socket_connection_setup_timeout_max_ms" - - "max_block_ms" - - "request_timeout_ms" - - "delivery_timeout_ms" - - "send_buffer_bytes" - - "receive_buffer_bytes" + - "bootstrap_servers" + - "topic_pattern" + - "protocol" + - "acks" + - "enable_idempotence" + - "compression_type" + - "batch_size" + - "linger_ms" + - "max_in_flight_requests_per_connection" + - "client_dns_lookup" + - "buffer_memory" + - "max_request_size" + - "retries" + - "socket_connection_setup_timeout_ms" + - "socket_connection_setup_timeout_max_ms" + - "max_block_ms" + - "request_timeout_ms" + - "delivery_timeout_ms" + - "send_buffer_bytes" + - "receive_buffer_bytes" additionalProperties: true properties: bootstrap_servers: title: "Bootstrap servers" - description: "A list of host/port pairs to use for establishing the initial\ + description: + "A list of host/port pairs to use for establishing the initial\ \ connection to the Kafka cluster. The client will make use of all servers\ \ irrespective of which servers are specified here for bootstrapping—this\ \ list only impacts the initial hosts used to discover the full set of\ @@ -1112,23 +1158,24 @@ \ though, in case a server is down)." type: "string" examples: - - "kafka-broker1:9092,kafka-broker2:9092" + - "kafka-broker1:9092,kafka-broker2:9092" topic_pattern: title: "Topic pattern" - description: "Topic pattern in which the records will be sent. You can use\ + description: + "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" test_topic: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" sync_producer: title: "Sync producer" description: "Wait synchronously until the record has been sent to Kafka." @@ -1139,92 +1186,99 @@ type: "object" description: "Protocol used to communicate with brokers." oneOf: - - title: "PLAINTEXT" - required: - - "security_protocol" - properties: - security_protocol: - type: "string" - enum: - - "PLAINTEXT" - default: "PLAINTEXT" - - title: "SASL PLAINTEXT" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_PLAINTEXT" - default: "SASL_PLAINTEXT" - sasl_mechanism: - title: "SASL mechanism" - description: "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "PLAIN" - enum: - - "PLAIN" - sasl_jaas_config: - title: "SASL JAAS config" - description: "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true - - title: "SASL SSL" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_SSL" - default: "SASL_SSL" - sasl_mechanism: - title: "SASL mechanism" - description: "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "GSSAPI" - enum: - - "GSSAPI" - - "OAUTHBEARER" - - "SCRAM-SHA-256" - sasl_jaas_config: - title: "SASL JAAS config" - description: "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true + - title: "PLAINTEXT" + required: + - "security_protocol" + properties: + security_protocol: + type: "string" + enum: + - "PLAINTEXT" + default: "PLAINTEXT" + - title: "SASL PLAINTEXT" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_PLAINTEXT" + default: "SASL_PLAINTEXT" + sasl_mechanism: + title: "SASL mechanism" + description: + "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "PLAIN" + enum: + - "PLAIN" + sasl_jaas_config: + title: "SASL JAAS config" + description: + "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true + - title: "SASL SSL" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_SSL" + default: "SASL_SSL" + sasl_mechanism: + title: "SASL mechanism" + description: + "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "GSSAPI" + enum: + - "GSSAPI" + - "OAUTHBEARER" + - "SCRAM-SHA-256" + sasl_jaas_config: + title: "SASL JAAS config" + description: + "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true client_id: title: "Client ID" - description: "An id string to pass to the server when making requests. The\ + description: + "An id string to pass to the server when making requests. The\ \ purpose of this is to be able to track the source of requests beyond\ \ just ip/port by allowing a logical application name to be included in\ \ server-side request logging." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" acks: title: "ACKs" - description: "The number of acknowledgments the producer requires the leader\ + description: + "The number of acknowledgments the producer requires the leader\ \ to have received before considering a request complete. This controls\ \ the durability of records that are sent." type: "string" default: "1" enum: - - "0" - - "1" - - "all" + - "0" + - "1" + - "all" enable_idempotence: title: "Enable idempotence" - description: "When set to 'true', the producer will ensure that exactly\ + description: + "When set to 'true', the producer will ensure that exactly\ \ one copy of each message is written in the stream. If 'false', producer\ \ retries due to broker failures, etc., may write duplicates of the retried\ \ message in the stream." @@ -1236,35 +1290,39 @@ type: "string" default: "none" enum: - - "none" - - "gzip" - - "snappy" - - "lz4" - - "zstd" + - "none" + - "gzip" + - "snappy" + - "lz4" + - "zstd" batch_size: title: "Batch size" - description: "The producer will attempt to batch records together into fewer\ + description: + "The producer will attempt to batch records together into fewer\ \ requests whenever multiple records are being sent to the same partition." type: "integer" examples: - - 16384 + - 16384 linger_ms: title: "Linger ms" - description: "The producer groups together any records that arrive in between\ + description: + "The producer groups together any records that arrive in between\ \ request transmissions into a single batched request." type: "string" examples: - - 0 + - 0 max_in_flight_requests_per_connection: title: "Max in flight requests per connection" - description: "The maximum number of unacknowledged requests the client will\ + description: + "The maximum number of unacknowledged requests the client will\ \ send on a single connection before blocking." type: "integer" examples: - - 5 + - 5 client_dns_lookup: title: "Client DNS lookup" - description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ + description: + "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ \ connect to each returned IP address in sequence until a successful connection\ \ is established. After a disconnection, the next IP is used. Once all\ \ IPs have been used once, the client resolves the IP(s) from the hostname\ @@ -1276,13 +1334,14 @@ type: "string" default: "use_all_dns_ips" enum: - - "default" - - "use_all_dns_ips" - - "resolve_canonical_bootstrap_servers_only" - - "use_all_dns_ips" + - "default" + - "use_all_dns_ips" + - "resolve_canonical_bootstrap_servers_only" + - "use_all_dns_ips" buffer_memory: title: "Buffer memory" - description: "The total bytes of memory the producer can use to buffer records\ + description: + "The total bytes of memory the producer can use to buffer records\ \ waiting to be sent to the server." type: "string" examples: 33554432 @@ -1291,72 +1350,80 @@ description: "The maximum size of a request in bytes." type: "integer" examples: - - 1048576 + - 1048576 retries: title: "Retries" - description: "Setting a value greater than zero will cause the client to\ + description: + "Setting a value greater than zero will cause the client to\ \ resend any record whose send fails with a potentially transient error." type: "integer" examples: - - 2147483647 + - 2147483647 socket_connection_setup_timeout_ms: title: "Socket connection setup timeout" - description: "The amount of time the client will wait for the socket connection\ + description: + "The amount of time the client will wait for the socket connection\ \ to be established." type: "string" examples: - - 10000 + - 10000 socket_connection_setup_timeout_max_ms: title: "Socket connection setup max timeout" - description: "The maximum amount of time the client will wait for the socket\ + description: + "The maximum amount of time the client will wait for the socket\ \ connection to be established. The connection setup timeout will increase\ \ exponentially for each consecutive connection failure up to this maximum." type: "string" examples: - - 30000 + - 30000 max_block_ms: title: "Max block ms" - description: "The configuration controls how long the KafkaProducer's send(),\ + description: + "The configuration controls how long the KafkaProducer's send(),\ \ partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction()\ \ and abortTransaction() methods will block." type: "string" examples: - - 60000 + - 60000 request_timeout_ms: title: "Request timeout" - description: "The configuration controls the maximum amount of time the\ + description: + "The configuration controls the maximum amount of time the\ \ client will wait for the response of a request. If the response is not\ \ received before the timeout elapses the client will resend the request\ \ if necessary or fail the request if retries are exhausted." type: "integer" examples: - - 30000 + - 30000 delivery_timeout_ms: title: "Delivery timeout" - description: "An upper bound on the time to report success or failure after\ + description: + "An upper bound on the time to report success or failure after\ \ a call to 'send()' returns." type: "integer" examples: - - 120000 + - 120000 send_buffer_bytes: title: "Send buffer bytes" - description: "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ + description: + "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ \ data. If the value is -1, the OS default will be used." type: "integer" examples: - - 131072 + - 131072 receive_buffer_bytes: title: "Receive buffer bytes" - description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\ + description: + "The size of the TCP receive buffer (SO_RCVBUF) to use when\ \ reading data. If the value is -1, the OS default will be used." type: "integer" examples: - - 32768 + - 32768 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-kinesis:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kinesis" @@ -1365,10 +1432,10 @@ title: "Kinesis Destination Spec" type: "object" required: - - "shardCount" - - "accessKey" - - "privateKey" - - "bufferSize" + - "shardCount" + - "accessKey" + - "privateKey" + - "bufferSize" additionalProperties: true properties: endpoint: @@ -1401,7 +1468,8 @@ order: 4 bufferSize: title: "bufferSize" - description: "Buffer size for storing kinesis records before being batch\ + description: + "Buffer size for storing kinesis records before being batch\ \ streamed." type: "integer" minimum: 1 @@ -1412,7 +1480,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-csv:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv" @@ -1421,24 +1489,25 @@ title: "CSV Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: "Path to the directory where csv files will be written. The\ + description: + "Path to the directory where csv files will be written. The\ \ destination uses the local mount \"/local\" and any data files will\ \ be placed inside that local mount. For more information check out our\ \ docs" type: "string" examples: - - "/local" + - "/local" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-local-json:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-json" @@ -1447,23 +1516,24 @@ title: "Local Json Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: "Path to the directory where json files will be written. The\ + description: + "Path to the directory where json files will be written. The\ \ files will be placed inside that local mount. For more information check\ \ out our docs" type: "string" examples: - - "/json_data" + - "/json_data" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mqtt:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mqtt" @@ -1472,16 +1542,16 @@ title: "MQTT Destination Spec" type: "object" required: - - "broker_host" - - "broker_port" - - "use_tls" - - "topic_pattern" - - "publisher_sync" - - "connect_timeout" - - "automatic_reconnect" - - "clean_session" - - "message_retained" - - "message_qos" + - "broker_host" + - "broker_port" + - "use_tls" + - "topic_pattern" + - "publisher_sync" + - "connect_timeout" + - "automatic_reconnect" + - "clean_session" + - "message_retained" + - "message_qos" additionalProperties: true properties: broker_host: @@ -1507,27 +1577,29 @@ type: "string" topic_pattern: title: "Topic pattern" - description: "Topic pattern in which the records will be sent. You can use\ + description: + "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}/{stream}/sample" + - "sample.topic" + - "{namespace}/{stream}/sample" topic_test: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test/topic" + - "test/topic" client: title: "Client ID" - description: "A client identifier that is unique on the server being connected\ + description: + "A client identifier that is unique on the server being connected\ \ to." type: "string" examples: - - "airbyte-client1" + - "airbyte-client1" publisher_sync: title: "Sync publisher" description: "Wait synchronously until the record has been sent to the broker." @@ -1535,25 +1607,29 @@ default: false connect_timeout: title: "Connect timeout" - description: " Maximum time interval (in seconds) the client will wait for\ + description: + " Maximum time interval (in seconds) the client will wait for\ \ the network connection to the MQTT server to be established." type: "integer" default: 30 automatic_reconnect: title: "Automatic reconnect" - description: "Whether the client will automatically attempt to reconnect\ + description: + "Whether the client will automatically attempt to reconnect\ \ to the server if the connection is lost." type: "boolean" default: true clean_session: title: "Clean session" - description: "Whether the client and server should remember state across\ + description: + "Whether the client and server should remember state across\ \ restarts and reconnects." type: "boolean" default: true message_retained: title: "Message retained" - description: "Whether or not the publish message should be retained by the\ + description: + "Whether or not the publish message should be retained by the\ \ messaging engine." type: "boolean" default: false @@ -1562,14 +1638,14 @@ description: "Quality of service used for each message to be delivered." default: "AT_LEAST_ONCE" enum: - - "AT_MOST_ONCE" - - "AT_LEAST_ONCE" - - "EXACTLY_ONCE" + - "AT_MOST_ONCE" + - "AT_LEAST_ONCE" + - "EXACTLY_ONCE" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-mssql:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" @@ -1578,11 +1654,11 @@ title: "MS SQL Server Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -1598,7 +1674,7 @@ maximum: 65536 default: 1433 examples: - - "1433" + - "1433" order: 1 database: title: "DB Name" @@ -1607,12 +1683,13 @@ order: 2 schema: title: "Default Schema" - description: "The default schema tables are written to if the source does\ + description: + "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -1632,159 +1709,169 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Encrypted (trust server certificate)" - additionalProperties: false - description: "Use the cert provided by the server without verification.\ - \ (For testing purposes only!)" - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_trust_server_certificate" - default: "encrypted_trust_server_certificate" - - title: "Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "ssl_method" - - "trustStoreName" - - "trustStorePassword" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - hostNameInCertificate: - title: "Host Name In Certificate" - type: "string" - description: "Specifies the host name of the server. The value of\ - \ this property must match the subject property of the certificate." - order: 7 + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + additionalProperties: false + description: + "Use the cert provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 7 tunnel_method: type: "object" title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ + description: + "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-meilisearch:0.2.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch" @@ -1793,7 +1880,7 @@ title: "MeiliSearch Destination Spec" type: "object" required: - - "host" + - "host" additionalProperties: true properties: host: @@ -1811,8 +1898,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mongodb:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mongodb" @@ -1821,91 +1908,94 @@ title: "MongoDB Destination Spec" type: "object" required: - - "database" - - "auth_type" + - "database" + - "auth_type" additionalProperties: true properties: instance_type: - description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ \ Set TLS connection is used by default." title: "MongoDb instance type" type: "object" order: 0 oneOf: - - title: "Standalone MongoDb Instance" - required: - - "instance" - - "host" - - "port" - properties: - instance: - type: "string" - enum: - - "standalone" - default: "standalone" - host: - title: "Host" - type: "string" - description: "Host of a Mongo database to be replicated." - order: 0 - port: - title: "Port" - type: "integer" - description: "Port of a Mongo database to be replicated." - minimum: 0 - maximum: 65536 - default: 27017 - examples: - - "27017" - order: 1 - tls: - title: "TLS connection" - type: "boolean" - description: "Indicates whether TLS encryption protocol will be used\ - \ to connect to MongoDB. It is recommended to use TLS connection\ - \ if possible. For more information see documentation." - default: false - order: 2 - - title: "Replica Set" - required: - - "instance" - - "server_addresses" - properties: - instance: - type: "string" - enum: - - "replica" - default: "replica" - server_addresses: - title: "Server addresses" - type: "string" - description: "The members of a replica set. Please specify `host`:`port`\ - \ of each member seperated by comma." - examples: - - "host1:27017,host2:27017,host3:27017" - order: 0 - replica_set: - title: "Replica Set" - type: "string" - description: "A replica set name." - order: 1 - - title: "MongoDB Atlas" - additionalProperties: false - required: - - "instance" - - "cluster_url" - properties: - instance: - type: "string" - enum: - - "atlas" - default: "atlas" - cluster_url: - title: "Cluster URL" - type: "string" - description: "URL of a cluster to connect to." - order: 0 + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + additionalProperties: false + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 database: title: "DB Name" description: "Name of the database." @@ -1916,45 +2006,45 @@ type: "object" description: "Authorization type." oneOf: - - title: "None" - additionalProperties: false - description: "None." - required: - - "authorization" - type: "object" - properties: - authorization: - type: "string" - const: "none" - - title: "Login/Password" - additionalProperties: false - description: "Login/Password." - required: - - "authorization" - - "username" - - "password" - type: "object" - properties: - authorization: - type: "string" - const: "login/password" - username: - title: "User" - description: "Username to use to access the database." - type: "string" - order: 1 - password: - title: "Password" - description: "Password associated with the username." - type: "string" - airbyte_secret: true - order: 2 + - title: "None" + additionalProperties: false + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + - title: "Login/Password" + additionalProperties: false + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mysql:0.1.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql" @@ -1963,10 +2053,10 @@ title: "MySQL Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" + - "host" + - "port" + - "username" + - "database" additionalProperties: true properties: host: @@ -1982,7 +2072,7 @@ maximum: 65536 default: 3306 examples: - - "3306" + - "3306" order: 1 database: title: "DB Name" @@ -2009,110 +2099,118 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ + description: + "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-oracle:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle" @@ -2121,10 +2219,10 @@ title: "Oracle Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "sid" + - "host" + - "port" + - "username" + - "sid" additionalProperties: true properties: host: @@ -2140,7 +2238,7 @@ maximum: 65536 default: 1521 examples: - - "1521" + - "1521" order: 1 sid: title: "SID" @@ -2149,7 +2247,8 @@ order: 2 username: title: "User" - description: "Username to use to access the database. This user must have\ + description: + "Username to use to access the database. This user must have\ \ CREATE USER privileges in the database." type: "string" order: 3 @@ -2161,14 +2260,15 @@ order: 4 schema: title: "Default Schema" - description: "The default schema tables are written to if the source does\ + description: + "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"airbyte\"\ . In Oracle, schemas and users are the same thing, so the \"user\" parameter\ \ is used as the login credentials and this is used for the default Airbyte\ \ message schema." type: "string" examples: - - "airbyte" + - "airbyte" default: "airbyte" order: 5 encryption: @@ -2177,169 +2277,180 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "unencrypted" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Native Network Ecryption (NNE)" - additionalProperties: false - description: "Native network encryption gives you the ability to encrypt\ - \ database connections, without the configuration overhead of TCP/IP\ - \ and SSL/TLS and without the need to open and listen on different ports." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "client_nne" - enum: - - "client_nne" - default: "client_nne" - encryption_algorithm: - type: "string" - description: "This parameter defines the encryption algorithm to be\ - \ used" - title: "Encryption Algorithm" - default: "AES256" - enum: - - "AES256" - - "RC4_56" - - "3DES168" - - title: "TLS Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "encryption_method" - - "ssl_certificate" - properties: - encryption_method: - type: "string" - const: "encrypted_verify_certificate" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - ssl_certificate: - title: "SSL PEM file" - description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\ - \ containers frequently used in certificate installations" - type: "string" - airbyte_secret: true - multiline: true + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Ecryption (NNE)" + additionalProperties: false + description: + "Native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines the encryption algorithm to be\ + \ used" + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations" + type: "string" + airbyte_secret: true + multiline: true tunnel_method: type: "object" title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ + description: + "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-postgres:0.3.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" @@ -2348,11 +2459,11 @@ title: "Postgres Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -2368,7 +2479,7 @@ maximum: 65536 default: 5432 examples: - - "5432" + - "5432" order: 1 database: title: "DB Name" @@ -2377,12 +2488,13 @@ order: 2 schema: title: "Default Schema" - description: "The default schema tables are written to if the source does\ + description: + "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -2405,111 +2517,119 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ + description: + "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-pulsar:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/pulsar" @@ -2518,29 +2638,30 @@ title: "Pulsar Destination Spec" type: "object" required: - - "brokers" - - "use_tls" - - "topic_type" - - "topic_tenant" - - "topic_namespace" - - "topic_pattern" - - "compression_type" - - "send_timeout_ms" - - "max_pending_messages" - - "max_pending_messages_across_partitions" - - "batching_enabled" - - "batching_max_messages" - - "batching_max_publish_delay" - - "block_if_queue_full" + - "brokers" + - "use_tls" + - "topic_type" + - "topic_tenant" + - "topic_namespace" + - "topic_pattern" + - "compression_type" + - "send_timeout_ms" + - "max_pending_messages" + - "max_pending_messages_across_partitions" + - "batching_enabled" + - "batching_max_messages" + - "batching_max_publish_delay" + - "block_if_queue_full" additionalProperties: true properties: brokers: title: "Pulsar brokers" - description: "A list of host/port pairs to use for establishing the initial\ + description: + "A list of host/port pairs to use for establishing the initial\ \ connection to the Pulsar cluster." type: "string" examples: - - "broker1:6650,broker2:6650" + - "broker1:6650,broker2:6650" use_tls: title: "Use TLS" description: "Whether to use TLS encryption on the connection." @@ -2548,7 +2669,8 @@ default: false topic_type: title: "Topic type" - description: "It identifies type of topic. Pulsar supports two kind of topics:\ + description: + "It identifies type of topic. Pulsar supports two kind of topics:\ \ persistent and non-persistent. In persistent topic, all messages are\ \ durably persisted on disk (that means on multiple disks unless the broker\ \ is standalone), whereas non-persistent topic does not persist message\ @@ -2556,48 +2678,52 @@ type: "string" default: "persistent" enum: - - "persistent" - - "non-persistent" + - "persistent" + - "non-persistent" topic_tenant: title: "Topic tenant" - description: "The topic tenant within the instance. Tenants are essential\ + description: + "The topic tenant within the instance. Tenants are essential\ \ to multi-tenancy in Pulsar, and spread across clusters." type: "string" default: "public" examples: - - "public" + - "public" topic_namespace: title: "Topic namespace" - description: "The administrative unit of the topic, which acts as a grouping\ + description: + "The administrative unit of the topic, which acts as a grouping\ \ mechanism for related topics. Most topic configuration is performed\ \ at the namespace level. Each tenant has one or multiple namespaces." type: "string" default: "default" examples: - - "default" + - "default" topic_pattern: title: "Topic pattern" - description: "Topic pattern in which the records will be sent. You can use\ + description: + "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" topic_test: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" producer_name: title: "Producer name" - description: "Name for the producer. If not filled, the system will generate\ + description: + "Name for the producer. If not filled, the system will generate\ \ a globally unique name which can be accessed with." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" producer_sync: title: "Sync producer" description: "Wait synchronously until the record has been sent to Pulsar." @@ -2609,14 +2735,15 @@ type: "string" default: "NONE" enum: - - "NONE" - - "LZ4" - - "ZLIB" - - "ZSTD" - - "SNAPPY" + - "NONE" + - "LZ4" + - "ZLIB" + - "ZSTD" + - "SNAPPY" send_timeout_ms: title: "Message send timeout" - description: "If a message is not acknowledged by a server before the send-timeout\ + description: + "If a message is not acknowledged by a server before the send-timeout\ \ expires, an error occurs (in ms)." type: "integer" default: 30000 @@ -2632,7 +2759,8 @@ default: 50000 batching_enabled: title: "Enable batching" - description: "Control whether automatic batching of messages is enabled\ + description: + "Control whether automatic batching of messages is enabled\ \ for the producer." type: "boolean" default: true @@ -2643,13 +2771,15 @@ default: 1000 batching_max_publish_delay: title: "Batching max publish delay" - description: " Time period in milliseconds within which the messages sent\ + description: + " Time period in milliseconds within which the messages sent\ \ will be batched." type: "integer" default: 1 block_if_queue_full: title: "Block if queue is full" - description: "If the send operation should block when the outgoing message\ + description: + "If the send operation should block when the outgoing message\ \ queue is full." type: "boolean" default: false @@ -2657,7 +2787,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-redis:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redis" @@ -2666,11 +2796,11 @@ title: "Redis Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "password" - - "cache_type" + - "host" + - "port" + - "username" + - "password" + - "cache_type" additionalProperties: false properties: host: @@ -2678,7 +2808,7 @@ description: "Redis host to connect to." type: "string" examples: - - "localhost,127.0.0.1" + - "localhost,127.0.0.1" order: 1 port: title: "Port" @@ -2705,15 +2835,15 @@ default: "hash" description: "Redis cache type to store data in." enum: - - "hash" + - "hash" order: 5 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" -- dockerImage: "airbyte/destination-redshift:0.3.20" + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-redshift:0.3.21" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" connectionSpecification: @@ -2721,16 +2851,17 @@ title: "Redshift Destination Spec" type: "object" required: - - "host" - - "port" - - "database" - - "username" - - "password" - - "schema" + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" additionalProperties: true properties: host: - description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ \ region and end with .redshift.amazonaws.com)" type: "string" title: "Host" @@ -2741,7 +2872,7 @@ maximum: 65536 default: 5439 examples: - - "5439" + - "5439" title: "Port" username: description: "Username to use to access the database." @@ -2757,57 +2888,61 @@ type: "string" title: "Database" schema: - description: "The default schema tables are written to if the source does\ + description: + "The default schema tables are written to if the source does\ \ not specify a namespace. Unless specifically configured, the usual value\ \ for this field is \"public\"." type: "string" examples: - - "public" + - "public" default: "public" title: "Default Schema" s3_bucket_name: title: "S3 Bucket Name" type: "string" - description: "The name of the staging S3 bucket to use if utilising a COPY\ + description: + "The name of the staging S3 bucket to use if utilising a COPY\ \ strategy. COPY is recommended for production workloads for better speed\ \ and scalability. See AWS docs for more details." examples: - - "airbyte.staging" + - "airbyte.staging" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" - description: "The region of the S3 staging bucket to use if utilising a\ + description: + "The region of the S3 staging bucket to use if utilising a\ \ copy strategy." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" access_key_id: type: "string" - description: "The Access Key Id granting allow one to access the above S3\ + description: + "The Access Key Id granting allow one to access the above S3\ \ staging bucket. Airbyte requires Read and Write permissions to the given\ \ bucket." title: "S3 Key Id" @@ -2822,8 +2957,9 @@ minimum: 10 maximum: 100 examples: - - "10" - description: "Optional. Increase this if syncing tables larger than 100GB.\ + - "10" + description: + "Optional. Increase this if syncing tables larger than 100GB.\ \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\ \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\ \ file, part size affects the table size. This is 10MB by default, resulting\ @@ -2835,10 +2971,10 @@ supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" -- dockerImage: "airbyte/destination-s3:0.1.15" + - "overwrite" + - "append" + - "append_dedup" +- dockerImage: "airbyte/destination-s3:0.1.16" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" connectionSpecification: @@ -2846,316 +2982,330 @@ title: "S3 Destination Spec" type: "object" required: - - "s3_bucket_name" - - "s3_bucket_path" - - "s3_bucket_region" - - "access_key_id" - - "secret_access_key" - - "format" + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + - "format" additionalProperties: false properties: s3_endpoint: title: "Endpoint" type: "string" default: "" - description: "This is your S3 endpoint url.(if you are working with AWS\ + description: + "This is your S3 endpoint url.(if you are working with AWS\ \ S3, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" s3_bucket_name: title: "S3 Bucket Name" type: "string" description: "The name of the S3 bucket." examples: - - "airbyte_sync" + - "airbyte_sync" s3_bucket_path: description: "Directory under the S3 bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" description: "The region of the S3 bucket." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: "The access key id to access the S3 bucket. Airbyte requires\ + description: + "The access key id to access the S3 bucket. Airbyte requires\ \ Read and Write permissions to the given bucket." title: "S3 Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "S3 Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: + "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-sftp-json:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/sftp-json" @@ -3164,10 +3314,10 @@ title: "Destination SFTP JSON" type: "object" required: - - "host" - - "username" - - "password" - - "destination_path" + - "host" + - "username" + - "password" + - "destination_path" additionalProperties: false properties: host: @@ -3183,7 +3333,7 @@ maximum: 65536 default: 22 examples: - - 22 + - 22 order: 1 username: title: "User" @@ -3201,15 +3351,15 @@ type: "string" description: "Path to the directory where json files will be written." examples: - - "/json_data" + - "/json_data" order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" -- dockerImage: "airbyte/destination-snowflake:0.3.19" + - "overwrite" + - "append" +- dockerImage: "airbyte/destination-snowflake:0.3.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" connectionSpecification: @@ -3217,56 +3367,58 @@ title: "Snowflake Destination Spec" type: "object" required: - - "host" - - "role" - - "warehouse" - - "database" - - "schema" - - "username" - - "password" + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "password" additionalProperties: true properties: host: - description: "Host domain of the snowflake instance (must include the account,\ + description: + "Host domain of the snowflake instance (must include the account,\ \ region, cloud environment, and end with snowflakecomputing.com)." examples: - - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Host" order: 0 role: description: "The role you created for Airbyte to access Snowflake." examples: - - "AIRBYTE_ROLE" + - "AIRBYTE_ROLE" type: "string" title: "Role" order: 1 warehouse: description: "The warehouse you created for Airbyte to sync data into." examples: - - "AIRBYTE_WAREHOUSE" + - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" order: 2 database: description: "The database you created for Airbyte to sync data into." examples: - - "AIRBYTE_DATABASE" + - "AIRBYTE_DATABASE" type: "string" title: "Database" order: 3 schema: - description: "The default Snowflake schema tables are written to if the\ + description: + "The default Snowflake schema tables are written to if the\ \ source does not specify a namespace." examples: - - "AIRBYTE_SCHEMA" + - "AIRBYTE_SCHEMA" type: "string" title: "Default Schema" order: 4 username: description: "The username you created to allow Airbyte to access the database." examples: - - "AIRBYTE_USER" + - "AIRBYTE_USER" type: "string" title: "Username" order: 5 @@ -3282,156 +3434,165 @@ description: "Loading method used to send data to Snowflake." order: 7 oneOf: - - title: "[Recommended] Internal Staging" - additionalProperties: false - description: "Writes large batches of records to a file, uploads the file\ - \ to Snowflake, then uses
COPY INTO table
to upload the file.\ - \ Recommended for large production workloads for better speed and scalability." - required: - - "method" - properties: - method: - type: "string" - enum: - - "Internal Staging" - default: "Internal Staging" - - title: "Standard Inserts" - additionalProperties: false - description: "Uses
INSERT
statements to send batches of records\ - \ to Snowflake. Easiest (no setup) but not recommended for large production\ - \ workloads due to slow speed." - required: - - "method" - properties: - method: - type: "string" - enum: - - "Standard" - default: "Standard" - - title: "AWS S3 Staging" - additionalProperties: false - description: "Writes large batches of records to a file, uploads the file\ - \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "s3_bucket_name" - - "access_key_id" - - "secret_access_key" - properties: - method: - type: "string" - enum: - - "S3 Staging" - default: "S3 Staging" - order: 0 - s3_bucket_name: - title: "S3 Bucket Name" - type: "string" - description: "The name of the staging S3 bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte.staging" - order: 1 - s3_bucket_region: - title: "S3 Bucket Region" - type: "string" - default: "" - description: "The region of the S3 staging bucket to use if utilising\ - \ a copy strategy." - enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "eu-south-1" - - "eu-north-1" - - "sa-east-1" - - "me-south-1" - order: 2 - access_key_id: - type: "string" - description: "The Access Key Id granting allow one to access the above\ - \ S3 staging bucket. Airbyte requires Read and Write permissions\ - \ to the given bucket." - title: "S3 Key Id" - airbyte_secret: true - order: 3 - secret_access_key: - type: "string" - description: "The corresponding secret to the above access key id." - title: "S3 Access Key" - airbyte_secret: true - order: 4 - - title: "GCS Staging" - additionalProperties: false - description: "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "project_id" - - "bucket_name" - - "credentials_json" - properties: - method: - type: "string" - enum: - - "GCS Staging" - default: "GCS Staging" - order: 0 - project_id: - title: "GCP Project ID" - type: "string" - description: "The name of the GCP project ID for your credentials." - examples: - - "my-project" - order: 1 - bucket_name: - title: "GCS Bucket Name" - type: "string" - description: "The name of the staging GCS bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte-staging" - order: 2 - credentials_json: - title: "Google Application Credentials" - type: "string" - description: "The contents of the JSON key file that has read/write\ - \ permissions to the staging GCS bucket. You will separately need\ - \ to grant bucket access to your Snowflake GCP service account.\ - \ See the GCP docs for more information on how to generate a JSON key\ - \ for your service account." - airbyte_secret: true - multiline: true - order: 3 + - title: "[Recommended] Internal Staging" + additionalProperties: false + description: + "Writes large batches of records to a file, uploads the file\ + \ to Snowflake, then uses
COPY INTO table
to upload the file.\ + \ Recommended for large production workloads for better speed and scalability." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Internal Staging" + default: "Internal Staging" + - title: "Standard Inserts" + additionalProperties: false + description: + "Uses
INSERT
statements to send batches of records\ + \ to Snowflake. Easiest (no setup) but not recommended for large production\ + \ workloads due to slow speed." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Standard" + default: "Standard" + - title: "AWS S3 Staging" + additionalProperties: false + description: + "Writes large batches of records to a file, uploads the file\ + \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "s3_bucket_name" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + enum: + - "S3 Staging" + default: "S3 Staging" + order: 0 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the staging S3 bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte.staging" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 staging bucket to use if utilising\ + \ a copy strategy." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "eu-south-1" + - "eu-north-1" + - "sa-east-1" + - "me-south-1" + order: 2 + access_key_id: + type: "string" + description: + "The Access Key Id granting allow one to access the above\ + \ S3 staging bucket. Airbyte requires Read and Write permissions\ + \ to the given bucket." + title: "S3 Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: "The corresponding secret to the above access key id." + title: "S3 Access Key" + airbyte_secret: true + order: 4 + - title: "GCS Staging" + additionalProperties: false + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "project_id" + - "bucket_name" + - "credentials_json" + properties: + method: + type: "string" + enum: + - "GCS Staging" + default: "GCS Staging" + order: 0 + project_id: + title: "GCP Project ID" + type: "string" + description: "The name of the GCP project ID for your credentials." + examples: + - "my-project" + order: 1 + bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the staging GCS bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte-staging" + order: 2 + credentials_json: + title: "Google Application Credentials" + type: "string" + description: + "The contents of the JSON key file that has read/write\ + \ permissions to the staging GCS bucket. You will separately need\ + \ to grant bucket access to your Snowflake GCP service account.\ + \ See the GCP docs for more information on how to generate a JSON key\ + \ for your service account." + airbyte_secret: true + multiline: true + order: 3 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-mariadb-columnstore:0.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mariadb-columnstore" @@ -3440,10 +3601,10 @@ title: "MariaDB Columnstore Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" + - "host" + - "port" + - "username" + - "database" additionalProperties: true properties: host: @@ -3459,7 +3620,7 @@ maximum: 65536 default: 3306 examples: - - "3306" + - "3306" order: 1 database: title: "Database" @@ -3480,107 +3641,115 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ + description: + "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" From 58b025e5e20302e7d36cb168b5660c86eddc3df5 Mon Sep 17 00:00:00 2001 From: Edward Gao Date: Fri, 10 Dec 2021 19:10:25 -0800 Subject: [PATCH 5/5] regenerate seed, skip autoformatter? --- .../resources/seed/destination_specs.yaml | 4343 ++++++++--------- 1 file changed, 2087 insertions(+), 2256 deletions(-) diff --git a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml index d73cc400d4693..86739d566e2fe 100644 --- a/airbyte-config/init/src/main/resources/seed/destination_specs.yaml +++ b/airbyte-config/init/src/main/resources/seed/destination_specs.yaml @@ -9,78 +9,75 @@ title: "AzureBlobStorage Destination Spec" type: "object" required: - - "azure_blob_storage_account_name" - - "azure_blob_storage_account_key" - - "format" + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" additionalProperties: false properties: azure_blob_storage_endpoint_domain_name: title: "Endpoint Domain Name" type: "string" default: "blob.core.windows.net" - description: - "This is Azure Blob Storage endpoint domain name. Leave default\ + description: "This is Azure Blob Storage endpoint domain name. Leave default\ \ value (or leave it empty if run container from command line) to use\ \ Microsoft native from example." examples: - - "blob.core.windows.net" + - "blob.core.windows.net" azure_blob_storage_container_name: title: "Azure blob storage container (Bucket) Name" type: "string" - description: - "The name of the Azure blob storage container. If not exists\ + description: "The name of the Azure blob storage container. If not exists\ \ - will be created automatically. May be empty, then will be created\ \ automatically airbytecontainer+timestamp" examples: - - "airbytetescontainername" + - "airbytetescontainername" azure_blob_storage_account_name: title: "Azure Blob Storage account name" type: "string" description: "The account's name of the Azure Blob Storage." examples: - - "airbyte5storage" + - "airbyte5storage" azure_blob_storage_account_key: description: "The Azure blob storage account key." airbyte_secret: true type: "string" examples: - - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - const: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - const: "JSONL" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-amazon-sqs:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/amazon-sqs" @@ -89,8 +86,8 @@ title: "Destination Amazon Sqs" type: "object" required: - - "queue_url" - - "region" + - "queue_url" + - "region" additionalProperties: false properties: queue_url: @@ -98,56 +95,54 @@ description: "URL of the SQS Queue" type: "string" examples: - - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" order: 0 region: title: "AWS Region" description: "AWS Region of the SQS Queue" type: "string" enum: - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" order: 1 message_delay: title: "Message Delay" - description: - "Modify the Message Delay of the individual message from the\ + description: "Modify the Message Delay of the individual message from the\ \ Queue's default (seconds)." type: "integer" examples: - - "15" + - "15" order: 2 access_key: title: "AWS IAM Access Key ID" - description: - "The Access Key ID of the AWS IAM Role to use for sending \ + description: "The Access Key ID of the AWS IAM Role to use for sending \ \ messages" type: "string" examples: - - "xxxxxHRNxxx3TBxxxxxx" + - "xxxxxHRNxxx3TBxxxxxx" order: 3 airbyte_secret: true secret_key: @@ -155,34 +150,32 @@ description: "The Secret Key of the AWS IAM Role to use for sending messages" type: "string" examples: - - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" order: 4 airbyte_secret: true message_body_key: title: "Message Body Key" - description: - "Use this property to extract the contents of the named key\ + description: "Use this property to extract the contents of the named key\ \ in the input record to use as the SQS message body. If not set, the\ \ entire content of the input record data is used as the message body." type: "string" examples: - - "myDataPath" + - "myDataPath" order: 5 message_group_id: title: "Message Group Id" - description: - "The tag that specifies that a message belongs to a specific\ + description: "The tag that specifies that a message belongs to a specific\ \ message group. This parameter applies only to, and is REQUIRED by, FIFO\ \ queues." type: "string" examples: - - "my-fifo-group" + - "my-fifo-group" order: 6 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-bigquery:0.5.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -191,14 +184,13 @@ title: "BigQuery Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: big_query_client_buffer_size_mb: title: "Google BigQuery client chunk size" - description: - "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ + description: "Google BigQuery client's chunk(buffer) size (MIN=1, MAX =\ \ 15) for each table. The default 15MiB value is used if not set explicitly.\ \ It's recommended to decrease value for big data sets migration for less\ \ HEAP memory consumption and avoiding crashes. For more details refer\ @@ -208,60 +200,56 @@ maximum: 15 default: 15 examples: - - "15" + - "15" project_id: type: "string" - description: - "The GCP project ID for the project containing the target BigQuery\ + description: "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: - "Default BigQuery Dataset ID tables are replicated to if the\ + description: "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: - "The location of the dataset. Warning: Changes made after creation\ + description: "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -269,107 +257,101 @@ airbyte_secret: true transformation_priority: type: "string" - description: - "When running custom transformations or Basic normalization,\ + description: "When running custom transformations or Basic normalization,\ \ running queries on interactive mode can hit BQ limits, choosing batch\ \ will solve those limitss." title: "Transformation Query Run Type" default: "interactive" enum: - - "interactive" - - "batch" + - "interactive" + - "batch" loading_method: type: "object" title: "Loading Method" - description: - "Loading method used to send select the way data will be uploaded\ + description: "Loading method used to send select the way data will be uploaded\ \ to BigQuery." oneOf: - - title: "Standard Inserts" - additionalProperties: false - description: "Direct uploading using streams." - required: - - "method" - properties: - method: - type: "string" - const: "Standard" - - title: "GCS Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "gcs_bucket_name" - - "gcs_bucket_path" - - "credential" - properties: - method: - type: "string" - const: "GCS Staging" - gcs_bucket_name: - title: "GCS Bucket Name" - type: "string" - description: "The name of the GCS bucket." - examples: - - "airbyte_sync" - gcs_bucket_path: - description: "Directory under the GCS bucket where data will be written." - type: "string" - examples: - - "data_sync/test" - keep_files_in_gcs-bucket: - type: "string" - description: - "This upload method is supposed to temporary store records\ - \ in GCS bucket. What do you want to do with data in GCS bucket\ - \ when migration has finished?" - title: "GCS tmp files afterward processing" - default: "Delete all tmp files from GCS" - enum: - - "Delete all tmp files from GCS" - - "Keep all tmp files in GCS" - credential: - title: "Credential" - type: "object" - oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - const: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: - "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: - "The corresponding secret for the access ID. It\ - \ is a 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "Standard Inserts" + additionalProperties: false + description: "Direct uploading using streams." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the GCS bucket." + examples: + - "airbyte_sync" + gcs_bucket_path: + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + keep_files_in_gcs-bucket: + type: "string" + description: "This upload method is supposed to temporary store records\ + \ in GCS bucket. What do you want to do with data in GCS bucket\ + \ when migration has finished?" + title: "GCS tmp files afterward processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + credential: + title: "Credential" + type: "object" + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-bigquery-denormalized:0.1.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/bigquery" @@ -378,63 +360,59 @@ title: "BigQuery Denormalized Typed Struct Destination Spec" type: "object" required: - - "project_id" - - "dataset_id" + - "project_id" + - "dataset_id" additionalProperties: true properties: project_id: type: "string" - description: - "The GCP project ID for the project containing the target BigQuery\ + description: "The GCP project ID for the project containing the target BigQuery\ \ dataset." title: "Project ID" dataset_id: type: "string" - description: - "Default BigQuery Dataset ID tables are replicated to if the\ + description: "Default BigQuery Dataset ID tables are replicated to if the\ \ source does not specify a namespace." title: "Default Dataset ID" dataset_location: type: "string" - description: - "The location of the dataset. Warning: Changes made after creation\ + description: "The location of the dataset. Warning: Changes made after creation\ \ will not be applied." title: "Dataset Location" default: "US" enum: - - "US" - - "EU" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-southeast1" - - "asia-southeast2" - - "australia-southeast1" - - "europe-central1" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west5" - - "europe-west6" - - "northamerica-northeast1" - - "southamerica-east1" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west-1" - - "us-west-2" - - "us-west-3" - - "us-west-4" + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west5" + - "europe-west6" + - "northamerica-northeast1" + - "southamerica-east1" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west-1" + - "us-west-2" + - "us-west-3" + - "us-west-4" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key. Default credentials will\ \ be used if this field is left empty." @@ -444,8 +422,8 @@ supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-cassandra:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/cassandra" @@ -454,11 +432,11 @@ title: "Cassandra Destination Spec" type: "object" required: - - "keyspace" - - "username" - - "password" - - "address" - - "port" + - "keyspace" + - "username" + - "password" + - "address" + - "port" additionalProperties: true properties: keyspace: @@ -482,7 +460,7 @@ description: "Address to connect to." type: "string" examples: - - "localhost,127.0.0.1" + - "localhost,127.0.0.1" order: 3 port: title: "Port" @@ -501,8 +479,7 @@ replication: title: "Replication factor" type: "integer" - description: - "Indicates to how many nodes the data should be replicated\ + description: "Indicates to how many nodes the data should be replicated\ \ to." default: 1 order: 6 @@ -510,8 +487,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-keen:0.2.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/keen" @@ -520,26 +497,25 @@ title: "Keen Spec" type: "object" required: - - "project_id" - - "api_key" + - "project_id" + - "api_key" additionalProperties: false properties: project_id: description: "Keen Project ID" type: "string" examples: - - "58b4acc22ba938934e888322e" + - "58b4acc22ba938934e888322e" api_key: title: "API Key" description: "Keen Master API key" type: "string" examples: - - "ABCDEFGHIJKLMNOPRSTUWXYZ" + - "ABCDEFGHIJKLMNOPRSTUWXYZ" airbyte_secret: true infer_timestamp: title: "Infer Timestamp" - description: - "Allow connector to guess keen.timestamp value based on the\ + description: "Allow connector to guess keen.timestamp value based on the\ \ streamed data" type: "boolean" default: true @@ -547,8 +523,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-dynamodb:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/dynamodb" @@ -557,81 +533,79 @@ title: "DynamoDB Destination Spec" type: "object" required: - - "dynamodb_table_name" - - "dynamodb_region" - - "access_key_id" - - "secret_access_key" + - "dynamodb_table_name" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" additionalProperties: false properties: dynamodb_endpoint: title: "Endpoint" type: "string" default: "" - description: - "This is your DynamoDB endpoint url.(if you are working with\ + description: "This is your DynamoDB endpoint url.(if you are working with\ \ AWS DynamoDB, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" dynamodb_table_name: title: "DynamoDB Table Name" type: "string" description: "The name of the DynamoDB table." examples: - - "airbyte_sync" + - "airbyte_sync" dynamodb_region: title: "DynamoDB Region" type: "string" default: "" description: "The region of the DynamoDB." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: - "The access key id to access the DynamoDB. Airbyte requires\ + description: "The access key id to access the DynamoDB. Airbyte requires\ \ Read and Write permissions to the DynamoDB." title: "DynamoDB Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "DynamoDB Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-elasticsearch:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/elasticsearch" @@ -640,7 +614,7 @@ title: "Elasticsearch Connection Configuration" type: "object" required: - - "endpoint" + - "endpoint" additionalProperties: false properties: endpoint: @@ -650,8 +624,7 @@ upsert: type: "boolean" title: "Upsert Records" - description: - "If a primary key identifier is defined in the source, an upsert\ + description: "If a primary key identifier is defined in the source, an upsert\ \ will be performed using the primary key value as the elasticsearch doc\ \ id. Does not support composite primary keys." default: true @@ -660,67 +633,64 @@ type: "object" description: "The type of authentication to be used" oneOf: - - title: "None" - additionalProperties: false - description: "No authentication will be used" - required: - - "method" - properties: - method: - type: "string" - const: "none" - - title: "Api Key/Secret" - additionalProperties: false - description: "Use a api key and secret combination to authenticate" - required: - - "method" - - "apiKeyId" - - "apiKeySecret" - properties: - method: - type: "string" - const: "secret" - apiKeyId: - title: "API Key ID" - description: - "The Key ID to used when accessing an enterprise Elasticsearch\ - \ instance." - type: "string" - apiKeySecret: - title: "API Key Secret" - description: "The secret associated with the API Key ID." - type: "string" - airbyte_secret: true - - title: "Username/Password" - additionalProperties: false - description: "Basic auth header with a username and password" - required: - - "method" - - "username" - - "password" - properties: - method: - type: "string" - const: "basic" - username: - title: "Username" - description: - "Basic auth username to access a secure Elasticsearch\ - \ server" - type: "string" - password: - title: "Password" - description: - "Basic auth password to access a secure Elasticsearch\ - \ server" - type: "string" - airbyte_secret: true + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + apiKeyId: + title: "API Key ID" + description: "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + username: + title: "Username" + description: "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" supportsNamespaces: true - dockerImage: "airbyte/destination-gcs:0.1.15" spec: @@ -730,11 +700,11 @@ title: "GCS Destination Spec" type: "object" required: - - "gcs_bucket_name" - - "gcs_bucket_path" - - "gcs_bucket_region" - - "credential" - - "format" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "gcs_bucket_region" + - "credential" + - "format" additionalProperties: false properties: gcs_bucket_name: @@ -742,343 +712,329 @@ type: "string" description: "The name of the GCS bucket." examples: - - "airbyte_sync" + - "airbyte_sync" gcs_bucket_path: description: "Directory under the GCS bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" gcs_bucket_region: title: "GCS Bucket Region" type: "string" default: "" description: "The region of the GCS bucket." enum: - - "" - - "-- North America --" - - "northamerica-northeast1" - - "northamerica-northeast2" - - "us-central1" - - "us-east1" - - "us-east4" - - "us-west1" - - "us-west2" - - "us-west3" - - "us-west4" - - "-- South America --" - - "southamerica-east1" - - "southamerica-west1" - - "-- Europe --" - - "europe-central2" - - "europe-north1" - - "europe-west1" - - "europe-west2" - - "europe-west3" - - "europe-west4" - - "europe-west6" - - "-- Asia --" - - "asia-east1" - - "asia-east2" - - "asia-northeast1" - - "asia-northeast2" - - "asia-northeast3" - - "asia-south1" - - "asia-south2" - - "asia-southeast1" - - "asia-southeast2" - - "-- Australia --" - - "australia-southeast1" - - "australia-southeast2" - - "-- Multi-regions --" - - "asia" - - "eu" - - "us" - - "-- Dual-regions --" - - "asia1" - - "eur4" - - "nam4" + - "" + - "-- North America --" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "-- South America --" + - "southamerica-east1" + - "southamerica-west1" + - "-- Europe --" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "-- Asia --" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "-- Australia --" + - "australia-southeast1" + - "australia-southeast2" + - "-- Multi-regions --" + - "asia" + - "eu" + - "us" + - "-- Dual-regions --" + - "asia1" + - "eur4" + - "nam4" credential: title: "Credential" type: "object" oneOf: - - title: "HMAC key" - required: - - "credential_type" - - "hmac_key_access_id" - - "hmac_key_secret" - properties: - credential_type: - type: "string" - enum: - - "HMAC_KEY" - default: "HMAC_KEY" - hmac_key_access_id: - type: "string" - description: - "HMAC key access ID. When linked to a service account,\ - \ this ID is 61 characters long; when linked to a user account,\ - \ it is 24 characters long." - title: "HMAC Key Access ID" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234" - hmac_key_secret: - type: "string" - description: - "The corresponding secret for the access ID. It is a\ - \ 40-character base-64 encoded string." - title: "HMAC Key Secret" - airbyte_secret: true - examples: - - "1234567890abcdefghij1234567890ABCDEFGHIJ" + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: - "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: - "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: - "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: - "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for GCS multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: - "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: - "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: - "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: - "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for GCS multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" $schema: "http://json-schema.org/draft-07/schema#" - dockerImage: "airbyte/destination-pubsub:0.1.1" spec: @@ -1088,9 +1044,9 @@ title: "Google PubSub Destination Spec" type: "object" required: - - "project_id" - - "topic_id" - - "credentials_json" + - "project_id" + - "topic_id" + - "credentials_json" additionalProperties: true properties: project_id: @@ -1103,8 +1059,7 @@ title: "PubSub Topic ID" credentials_json: type: "string" - description: - "The contents of the JSON service account key. Check out the\ + description: "The contents of the JSON service account key. Check out the\ \ docs if you need help generating this key." title: "Credentials JSON" @@ -1113,7 +1068,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-kafka:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kafka" @@ -1122,32 +1077,31 @@ title: "Kafka Destination Spec" type: "object" required: - - "bootstrap_servers" - - "topic_pattern" - - "protocol" - - "acks" - - "enable_idempotence" - - "compression_type" - - "batch_size" - - "linger_ms" - - "max_in_flight_requests_per_connection" - - "client_dns_lookup" - - "buffer_memory" - - "max_request_size" - - "retries" - - "socket_connection_setup_timeout_ms" - - "socket_connection_setup_timeout_max_ms" - - "max_block_ms" - - "request_timeout_ms" - - "delivery_timeout_ms" - - "send_buffer_bytes" - - "receive_buffer_bytes" + - "bootstrap_servers" + - "topic_pattern" + - "protocol" + - "acks" + - "enable_idempotence" + - "compression_type" + - "batch_size" + - "linger_ms" + - "max_in_flight_requests_per_connection" + - "client_dns_lookup" + - "buffer_memory" + - "max_request_size" + - "retries" + - "socket_connection_setup_timeout_ms" + - "socket_connection_setup_timeout_max_ms" + - "max_block_ms" + - "request_timeout_ms" + - "delivery_timeout_ms" + - "send_buffer_bytes" + - "receive_buffer_bytes" additionalProperties: true properties: bootstrap_servers: title: "Bootstrap servers" - description: - "A list of host/port pairs to use for establishing the initial\ + description: "A list of host/port pairs to use for establishing the initial\ \ connection to the Kafka cluster. The client will make use of all servers\ \ irrespective of which servers are specified here for bootstrapping—this\ \ list only impacts the initial hosts used to discover the full set of\ @@ -1158,24 +1112,23 @@ \ though, in case a server is down)." type: "string" examples: - - "kafka-broker1:9092,kafka-broker2:9092" + - "kafka-broker1:9092,kafka-broker2:9092" topic_pattern: title: "Topic pattern" - description: - "Topic pattern in which the records will be sent. You can use\ + description: "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" test_topic: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" sync_producer: title: "Sync producer" description: "Wait synchronously until the record has been sent to Kafka." @@ -1186,99 +1139,92 @@ type: "object" description: "Protocol used to communicate with brokers." oneOf: - - title: "PLAINTEXT" - required: - - "security_protocol" - properties: - security_protocol: - type: "string" - enum: - - "PLAINTEXT" - default: "PLAINTEXT" - - title: "SASL PLAINTEXT" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_PLAINTEXT" - default: "SASL_PLAINTEXT" - sasl_mechanism: - title: "SASL mechanism" - description: - "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "PLAIN" - enum: - - "PLAIN" - sasl_jaas_config: - title: "SASL JAAS config" - description: - "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true - - title: "SASL SSL" - required: - - "security_protocol" - - "sasl_mechanism" - - "sasl_jaas_config" - properties: - security_protocol: - type: "string" - enum: - - "SASL_SSL" - default: "SASL_SSL" - sasl_mechanism: - title: "SASL mechanism" - description: - "SASL mechanism used for client connections. This may\ - \ be any mechanism for which a security provider is available." - type: "string" - default: "GSSAPI" - enum: - - "GSSAPI" - - "OAUTHBEARER" - - "SCRAM-SHA-256" - sasl_jaas_config: - title: "SASL JAAS config" - description: - "JAAS login context parameters for SASL connections in\ - \ the format used by JAAS configuration files." - type: "string" - default: "" - airbyte_secret: true + - title: "PLAINTEXT" + required: + - "security_protocol" + properties: + security_protocol: + type: "string" + enum: + - "PLAINTEXT" + default: "PLAINTEXT" + - title: "SASL PLAINTEXT" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_PLAINTEXT" + default: "SASL_PLAINTEXT" + sasl_mechanism: + title: "SASL mechanism" + description: "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "PLAIN" + enum: + - "PLAIN" + sasl_jaas_config: + title: "SASL JAAS config" + description: "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true + - title: "SASL SSL" + required: + - "security_protocol" + - "sasl_mechanism" + - "sasl_jaas_config" + properties: + security_protocol: + type: "string" + enum: + - "SASL_SSL" + default: "SASL_SSL" + sasl_mechanism: + title: "SASL mechanism" + description: "SASL mechanism used for client connections. This may\ + \ be any mechanism for which a security provider is available." + type: "string" + default: "GSSAPI" + enum: + - "GSSAPI" + - "OAUTHBEARER" + - "SCRAM-SHA-256" + sasl_jaas_config: + title: "SASL JAAS config" + description: "JAAS login context parameters for SASL connections in\ + \ the format used by JAAS configuration files." + type: "string" + default: "" + airbyte_secret: true client_id: title: "Client ID" - description: - "An id string to pass to the server when making requests. The\ + description: "An id string to pass to the server when making requests. The\ \ purpose of this is to be able to track the source of requests beyond\ \ just ip/port by allowing a logical application name to be included in\ \ server-side request logging." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" acks: title: "ACKs" - description: - "The number of acknowledgments the producer requires the leader\ + description: "The number of acknowledgments the producer requires the leader\ \ to have received before considering a request complete. This controls\ \ the durability of records that are sent." type: "string" default: "1" enum: - - "0" - - "1" - - "all" + - "0" + - "1" + - "all" enable_idempotence: title: "Enable idempotence" - description: - "When set to 'true', the producer will ensure that exactly\ + description: "When set to 'true', the producer will ensure that exactly\ \ one copy of each message is written in the stream. If 'false', producer\ \ retries due to broker failures, etc., may write duplicates of the retried\ \ message in the stream." @@ -1290,39 +1236,35 @@ type: "string" default: "none" enum: - - "none" - - "gzip" - - "snappy" - - "lz4" - - "zstd" + - "none" + - "gzip" + - "snappy" + - "lz4" + - "zstd" batch_size: title: "Batch size" - description: - "The producer will attempt to batch records together into fewer\ + description: "The producer will attempt to batch records together into fewer\ \ requests whenever multiple records are being sent to the same partition." type: "integer" examples: - - 16384 + - 16384 linger_ms: title: "Linger ms" - description: - "The producer groups together any records that arrive in between\ + description: "The producer groups together any records that arrive in between\ \ request transmissions into a single batched request." type: "string" examples: - - 0 + - 0 max_in_flight_requests_per_connection: title: "Max in flight requests per connection" - description: - "The maximum number of unacknowledged requests the client will\ + description: "The maximum number of unacknowledged requests the client will\ \ send on a single connection before blocking." type: "integer" examples: - - 5 + - 5 client_dns_lookup: title: "Client DNS lookup" - description: - "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ + description: "Controls how the client uses DNS lookups. If set to use_all_dns_ips,\ \ connect to each returned IP address in sequence until a successful connection\ \ is established. After a disconnection, the next IP is used. Once all\ \ IPs have been used once, the client resolves the IP(s) from the hostname\ @@ -1334,14 +1276,13 @@ type: "string" default: "use_all_dns_ips" enum: - - "default" - - "use_all_dns_ips" - - "resolve_canonical_bootstrap_servers_only" - - "use_all_dns_ips" + - "default" + - "use_all_dns_ips" + - "resolve_canonical_bootstrap_servers_only" + - "use_all_dns_ips" buffer_memory: title: "Buffer memory" - description: - "The total bytes of memory the producer can use to buffer records\ + description: "The total bytes of memory the producer can use to buffer records\ \ waiting to be sent to the server." type: "string" examples: 33554432 @@ -1350,80 +1291,72 @@ description: "The maximum size of a request in bytes." type: "integer" examples: - - 1048576 + - 1048576 retries: title: "Retries" - description: - "Setting a value greater than zero will cause the client to\ + description: "Setting a value greater than zero will cause the client to\ \ resend any record whose send fails with a potentially transient error." type: "integer" examples: - - 2147483647 + - 2147483647 socket_connection_setup_timeout_ms: title: "Socket connection setup timeout" - description: - "The amount of time the client will wait for the socket connection\ + description: "The amount of time the client will wait for the socket connection\ \ to be established." type: "string" examples: - - 10000 + - 10000 socket_connection_setup_timeout_max_ms: title: "Socket connection setup max timeout" - description: - "The maximum amount of time the client will wait for the socket\ + description: "The maximum amount of time the client will wait for the socket\ \ connection to be established. The connection setup timeout will increase\ \ exponentially for each consecutive connection failure up to this maximum." type: "string" examples: - - 30000 + - 30000 max_block_ms: title: "Max block ms" - description: - "The configuration controls how long the KafkaProducer's send(),\ + description: "The configuration controls how long the KafkaProducer's send(),\ \ partitionsFor(), initTransactions(), sendOffsetsToTransaction(), commitTransaction()\ \ and abortTransaction() methods will block." type: "string" examples: - - 60000 + - 60000 request_timeout_ms: title: "Request timeout" - description: - "The configuration controls the maximum amount of time the\ + description: "The configuration controls the maximum amount of time the\ \ client will wait for the response of a request. If the response is not\ \ received before the timeout elapses the client will resend the request\ \ if necessary or fail the request if retries are exhausted." type: "integer" examples: - - 30000 + - 30000 delivery_timeout_ms: title: "Delivery timeout" - description: - "An upper bound on the time to report success or failure after\ + description: "An upper bound on the time to report success or failure after\ \ a call to 'send()' returns." type: "integer" examples: - - 120000 + - 120000 send_buffer_bytes: title: "Send buffer bytes" - description: - "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ + description: "The size of the TCP send buffer (SO_SNDBUF) to use when sending\ \ data. If the value is -1, the OS default will be used." type: "integer" examples: - - 131072 + - 131072 receive_buffer_bytes: title: "Receive buffer bytes" - description: - "The size of the TCP receive buffer (SO_RCVBUF) to use when\ + description: "The size of the TCP receive buffer (SO_RCVBUF) to use when\ \ reading data. If the value is -1, the OS default will be used." type: "integer" examples: - - 32768 + - 32768 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-kinesis:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/kinesis" @@ -1432,10 +1365,10 @@ title: "Kinesis Destination Spec" type: "object" required: - - "shardCount" - - "accessKey" - - "privateKey" - - "bufferSize" + - "shardCount" + - "accessKey" + - "privateKey" + - "bufferSize" additionalProperties: true properties: endpoint: @@ -1468,8 +1401,7 @@ order: 4 bufferSize: title: "bufferSize" - description: - "Buffer size for storing kinesis records before being batch\ + description: "Buffer size for storing kinesis records before being batch\ \ streamed." type: "integer" minimum: 1 @@ -1480,7 +1412,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-csv:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-csv" @@ -1489,25 +1421,24 @@ title: "CSV Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: - "Path to the directory where csv files will be written. The\ + description: "Path to the directory where csv files will be written. The\ \ destination uses the local mount \"/local\" and any data files will\ \ be placed inside that local mount. For more information check out our\ \ docs" type: "string" examples: - - "/local" + - "/local" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-local-json:0.2.8" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/local-json" @@ -1516,24 +1447,23 @@ title: "Local Json Destination Spec" type: "object" required: - - "destination_path" + - "destination_path" additionalProperties: false properties: destination_path: - description: - "Path to the directory where json files will be written. The\ + description: "Path to the directory where json files will be written. The\ \ files will be placed inside that local mount. For more information check\ \ out our docs" type: "string" examples: - - "/json_data" + - "/json_data" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mqtt:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mqtt" @@ -1542,16 +1472,16 @@ title: "MQTT Destination Spec" type: "object" required: - - "broker_host" - - "broker_port" - - "use_tls" - - "topic_pattern" - - "publisher_sync" - - "connect_timeout" - - "automatic_reconnect" - - "clean_session" - - "message_retained" - - "message_qos" + - "broker_host" + - "broker_port" + - "use_tls" + - "topic_pattern" + - "publisher_sync" + - "connect_timeout" + - "automatic_reconnect" + - "clean_session" + - "message_retained" + - "message_qos" additionalProperties: true properties: broker_host: @@ -1577,29 +1507,27 @@ type: "string" topic_pattern: title: "Topic pattern" - description: - "Topic pattern in which the records will be sent. You can use\ + description: "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}/{stream}/sample" + - "sample.topic" + - "{namespace}/{stream}/sample" topic_test: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test/topic" + - "test/topic" client: title: "Client ID" - description: - "A client identifier that is unique on the server being connected\ + description: "A client identifier that is unique on the server being connected\ \ to." type: "string" examples: - - "airbyte-client1" + - "airbyte-client1" publisher_sync: title: "Sync publisher" description: "Wait synchronously until the record has been sent to the broker." @@ -1607,29 +1535,25 @@ default: false connect_timeout: title: "Connect timeout" - description: - " Maximum time interval (in seconds) the client will wait for\ + description: " Maximum time interval (in seconds) the client will wait for\ \ the network connection to the MQTT server to be established." type: "integer" default: 30 automatic_reconnect: title: "Automatic reconnect" - description: - "Whether the client will automatically attempt to reconnect\ + description: "Whether the client will automatically attempt to reconnect\ \ to the server if the connection is lost." type: "boolean" default: true clean_session: title: "Clean session" - description: - "Whether the client and server should remember state across\ + description: "Whether the client and server should remember state across\ \ restarts and reconnects." type: "boolean" default: true message_retained: title: "Message retained" - description: - "Whether or not the publish message should be retained by the\ + description: "Whether or not the publish message should be retained by the\ \ messaging engine." type: "boolean" default: false @@ -1638,14 +1562,14 @@ description: "Quality of service used for each message to be delivered." default: "AT_LEAST_ONCE" enum: - - "AT_MOST_ONCE" - - "AT_LEAST_ONCE" - - "EXACTLY_ONCE" + - "AT_MOST_ONCE" + - "AT_LEAST_ONCE" + - "EXACTLY_ONCE" supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-mssql:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mssql" @@ -1654,11 +1578,11 @@ title: "MS SQL Server Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -1674,7 +1598,7 @@ maximum: 65536 default: 1433 examples: - - "1433" + - "1433" order: 1 database: title: "DB Name" @@ -1683,13 +1607,12 @@ order: 2 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -1709,169 +1632,159 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Encrypted (trust server certificate)" - additionalProperties: false - description: - "Use the cert provided by the server without verification.\ - \ (For testing purposes only!)" - required: - - "ssl_method" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_trust_server_certificate" - default: "encrypted_trust_server_certificate" - - title: "Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "ssl_method" - - "trustStoreName" - - "trustStorePassword" - type: "object" - properties: - ssl_method: - type: "string" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - hostNameInCertificate: - title: "Host Name In Certificate" - type: "string" - description: - "Specifies the host name of the server. The value of\ - \ this property must match the subject property of the certificate." - order: 7 + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + additionalProperties: false + description: "Use the cert provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 7 tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-meilisearch:0.2.10" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/meilisearch" @@ -1880,7 +1793,7 @@ title: "MeiliSearch Destination Spec" type: "object" required: - - "host" + - "host" additionalProperties: true properties: host: @@ -1898,8 +1811,8 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mongodb:0.1.2" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mongodb" @@ -1908,94 +1821,91 @@ title: "MongoDB Destination Spec" type: "object" required: - - "database" - - "auth_type" + - "database" + - "auth_type" additionalProperties: true properties: instance_type: - description: - "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + description: "MongoDb instance to connect to. For MongoDB Atlas and Replica\ \ Set TLS connection is used by default." title: "MongoDb instance type" type: "object" order: 0 oneOf: - - title: "Standalone MongoDb Instance" - required: - - "instance" - - "host" - - "port" - properties: - instance: - type: "string" - enum: - - "standalone" - default: "standalone" - host: - title: "Host" - type: "string" - description: "Host of a Mongo database to be replicated." - order: 0 - port: - title: "Port" - type: "integer" - description: "Port of a Mongo database to be replicated." - minimum: 0 - maximum: 65536 - default: 27017 - examples: - - "27017" - order: 1 - tls: - title: "TLS connection" - type: "boolean" - description: - "Indicates whether TLS encryption protocol will be used\ - \ to connect to MongoDB. It is recommended to use TLS connection\ - \ if possible. For more information see documentation." - default: false - order: 2 - - title: "Replica Set" - required: - - "instance" - - "server_addresses" - properties: - instance: - type: "string" - enum: - - "replica" - default: "replica" - server_addresses: - title: "Server addresses" - type: "string" - description: - "The members of a replica set. Please specify `host`:`port`\ - \ of each member seperated by comma." - examples: - - "host1:27017,host2:27017,host3:27017" - order: 0 - replica_set: - title: "Replica Set" - type: "string" - description: "A replica set name." - order: 1 - - title: "MongoDB Atlas" - additionalProperties: false - required: - - "instance" - - "cluster_url" - properties: - instance: - type: "string" - enum: - - "atlas" - default: "atlas" - cluster_url: - title: "Cluster URL" - type: "string" - description: "URL of a cluster to connect to." - order: 0 + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS connection" + type: "boolean" + description: "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + additionalProperties: false + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 database: title: "DB Name" description: "Name of the database." @@ -2006,45 +1916,45 @@ type: "object" description: "Authorization type." oneOf: - - title: "None" - additionalProperties: false - description: "None." - required: - - "authorization" - type: "object" - properties: - authorization: - type: "string" - const: "none" - - title: "Login/Password" - additionalProperties: false - description: "Login/Password." - required: - - "authorization" - - "username" - - "password" - type: "object" - properties: - authorization: - type: "string" - const: "login/password" - username: - title: "User" - description: "Username to use to access the database." - type: "string" - order: 1 - password: - title: "Password" - description: "Password associated with the username." - type: "string" - airbyte_secret: true - order: 2 + - title: "None" + additionalProperties: false + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + - title: "Login/Password" + additionalProperties: false + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-mysql:0.1.15" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mysql" @@ -2053,10 +1963,10 @@ title: "MySQL Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" + - "host" + - "port" + - "username" + - "database" additionalProperties: true properties: host: @@ -2072,7 +1982,7 @@ maximum: 65536 default: 3306 examples: - - "3306" + - "3306" order: 1 database: title: "DB Name" @@ -2099,118 +2009,110 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-oracle:0.1.12" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/oracle" @@ -2219,10 +2121,10 @@ title: "Oracle Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "sid" + - "host" + - "port" + - "username" + - "sid" additionalProperties: true properties: host: @@ -2238,7 +2140,7 @@ maximum: 65536 default: 1521 examples: - - "1521" + - "1521" order: 1 sid: title: "SID" @@ -2247,8 +2149,7 @@ order: 2 username: title: "User" - description: - "Username to use to access the database. This user must have\ + description: "Username to use to access the database. This user must have\ \ CREATE USER privileges in the database." type: "string" order: 3 @@ -2260,15 +2161,14 @@ order: 4 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"airbyte\"\ . In Oracle, schemas and users are the same thing, so the \"user\" parameter\ \ is used as the login credentials and this is used for the default Airbyte\ \ message schema." type: "string" examples: - - "airbyte" + - "airbyte" default: "airbyte" order: 5 encryption: @@ -2277,180 +2177,169 @@ description: "Encryption method to use when communicating with the database" order: 6 oneOf: - - title: "Unencrypted" - additionalProperties: false - description: "Data transfer will not be encrypted." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "unencrypted" - enum: - - "unencrypted" - default: "unencrypted" - - title: "Native Network Ecryption (NNE)" - additionalProperties: false - description: - "Native network encryption gives you the ability to encrypt\ - \ database connections, without the configuration overhead of TCP/IP\ - \ and SSL/TLS and without the need to open and listen on different ports." - required: - - "encryption_method" - properties: - encryption_method: - type: "string" - const: "client_nne" - enum: - - "client_nne" - default: "client_nne" - encryption_algorithm: - type: "string" - description: - "This parameter defines the encryption algorithm to be\ - \ used" - title: "Encryption Algorithm" - default: "AES256" - enum: - - "AES256" - - "RC4_56" - - "3DES168" - - title: "TLS Encrypted (verify certificate)" - additionalProperties: false - description: "Verify and use the cert provided by the server." - required: - - "encryption_method" - - "ssl_certificate" - properties: - encryption_method: - type: "string" - const: "encrypted_verify_certificate" - enum: - - "encrypted_verify_certificate" - default: "encrypted_verify_certificate" - ssl_certificate: - title: "SSL PEM file" - description: - "Privacy Enhanced Mail (PEM) files are concatenated certificate\ - \ containers frequently used in certificate installations" - type: "string" - airbyte_secret: true - multiline: true + - title: "Unencrypted" + additionalProperties: false + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Ecryption (NNE)" + additionalProperties: false + description: "Native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the encryption algorithm to be\ + \ used" + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + additionalProperties: false + description: "Verify and use the cert provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations" + type: "string" + airbyte_secret: true + multiline: true tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-postgres:0.3.13" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/postgres" @@ -2459,11 +2348,11 @@ title: "Postgres Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" - - "schema" + - "host" + - "port" + - "username" + - "database" + - "schema" additionalProperties: true properties: host: @@ -2479,7 +2368,7 @@ maximum: 65536 default: 5432 examples: - - "5432" + - "5432" order: 1 database: title: "DB Name" @@ -2488,13 +2377,12 @@ order: 2 schema: title: "Default Schema" - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. The usual value for this field is \"public\"\ ." type: "string" examples: - - "public" + - "public" default: "public" order: 3 username: @@ -2517,119 +2405,111 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-pulsar:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/pulsar" @@ -2638,30 +2518,29 @@ title: "Pulsar Destination Spec" type: "object" required: - - "brokers" - - "use_tls" - - "topic_type" - - "topic_tenant" - - "topic_namespace" - - "topic_pattern" - - "compression_type" - - "send_timeout_ms" - - "max_pending_messages" - - "max_pending_messages_across_partitions" - - "batching_enabled" - - "batching_max_messages" - - "batching_max_publish_delay" - - "block_if_queue_full" + - "brokers" + - "use_tls" + - "topic_type" + - "topic_tenant" + - "topic_namespace" + - "topic_pattern" + - "compression_type" + - "send_timeout_ms" + - "max_pending_messages" + - "max_pending_messages_across_partitions" + - "batching_enabled" + - "batching_max_messages" + - "batching_max_publish_delay" + - "block_if_queue_full" additionalProperties: true properties: brokers: title: "Pulsar brokers" - description: - "A list of host/port pairs to use for establishing the initial\ + description: "A list of host/port pairs to use for establishing the initial\ \ connection to the Pulsar cluster." type: "string" examples: - - "broker1:6650,broker2:6650" + - "broker1:6650,broker2:6650" use_tls: title: "Use TLS" description: "Whether to use TLS encryption on the connection." @@ -2669,8 +2548,7 @@ default: false topic_type: title: "Topic type" - description: - "It identifies type of topic. Pulsar supports two kind of topics:\ + description: "It identifies type of topic. Pulsar supports two kind of topics:\ \ persistent and non-persistent. In persistent topic, all messages are\ \ durably persisted on disk (that means on multiple disks unless the broker\ \ is standalone), whereas non-persistent topic does not persist message\ @@ -2678,52 +2556,48 @@ type: "string" default: "persistent" enum: - - "persistent" - - "non-persistent" + - "persistent" + - "non-persistent" topic_tenant: title: "Topic tenant" - description: - "The topic tenant within the instance. Tenants are essential\ + description: "The topic tenant within the instance. Tenants are essential\ \ to multi-tenancy in Pulsar, and spread across clusters." type: "string" default: "public" examples: - - "public" + - "public" topic_namespace: title: "Topic namespace" - description: - "The administrative unit of the topic, which acts as a grouping\ + description: "The administrative unit of the topic, which acts as a grouping\ \ mechanism for related topics. Most topic configuration is performed\ \ at the namespace level. Each tenant has one or multiple namespaces." type: "string" default: "default" examples: - - "default" + - "default" topic_pattern: title: "Topic pattern" - description: - "Topic pattern in which the records will be sent. You can use\ + description: "Topic pattern in which the records will be sent. You can use\ \ patterns like '{namespace}' and/or '{stream}' to send the message to\ \ a specific topic based on these values. Notice that the topic name will\ \ be transformed to a standard naming convention." type: "string" examples: - - "sample.topic" - - "{namespace}.{stream}.sample" + - "sample.topic" + - "{namespace}.{stream}.sample" topic_test: title: "Test topic" description: "Topic to test if Airbyte can produce messages." type: "string" examples: - - "test.topic" + - "test.topic" producer_name: title: "Producer name" - description: - "Name for the producer. If not filled, the system will generate\ + description: "Name for the producer. If not filled, the system will generate\ \ a globally unique name which can be accessed with." type: "string" examples: - - "airbyte-producer" + - "airbyte-producer" producer_sync: title: "Sync producer" description: "Wait synchronously until the record has been sent to Pulsar." @@ -2735,15 +2609,14 @@ type: "string" default: "NONE" enum: - - "NONE" - - "LZ4" - - "ZLIB" - - "ZSTD" - - "SNAPPY" + - "NONE" + - "LZ4" + - "ZLIB" + - "ZSTD" + - "SNAPPY" send_timeout_ms: title: "Message send timeout" - description: - "If a message is not acknowledged by a server before the send-timeout\ + description: "If a message is not acknowledged by a server before the send-timeout\ \ expires, an error occurs (in ms)." type: "integer" default: 30000 @@ -2759,8 +2632,7 @@ default: 50000 batching_enabled: title: "Enable batching" - description: - "Control whether automatic batching of messages is enabled\ + description: "Control whether automatic batching of messages is enabled\ \ for the producer." type: "boolean" default: true @@ -2771,15 +2643,13 @@ default: 1000 batching_max_publish_delay: title: "Batching max publish delay" - description: - " Time period in milliseconds within which the messages sent\ + description: " Time period in milliseconds within which the messages sent\ \ will be batched." type: "integer" default: 1 block_if_queue_full: title: "Block if queue is full" - description: - "If the send operation should block when the outgoing message\ + description: "If the send operation should block when the outgoing message\ \ queue is full." type: "boolean" default: false @@ -2787,7 +2657,7 @@ supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "append" + - "append" - dockerImage: "airbyte/destination-redis:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redis" @@ -2796,11 +2666,11 @@ title: "Redis Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "password" - - "cache_type" + - "host" + - "port" + - "username" + - "password" + - "cache_type" additionalProperties: false properties: host: @@ -2808,7 +2678,7 @@ description: "Redis host to connect to." type: "string" examples: - - "localhost,127.0.0.1" + - "localhost,127.0.0.1" order: 1 port: title: "Port" @@ -2835,14 +2705,14 @@ default: "hash" description: "Redis cache type to store data in." enum: - - "hash" + - "hash" order: 5 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-redshift:0.3.21" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/redshift" @@ -2851,17 +2721,16 @@ title: "Redshift Destination Spec" type: "object" required: - - "host" - - "port" - - "database" - - "username" - - "password" - - "schema" + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" additionalProperties: true properties: host: - description: - "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + description: "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ \ region and end with .redshift.amazonaws.com)" type: "string" title: "Host" @@ -2872,7 +2741,7 @@ maximum: 65536 default: 5439 examples: - - "5439" + - "5439" title: "Port" username: description: "Username to use to access the database." @@ -2888,61 +2757,57 @@ type: "string" title: "Database" schema: - description: - "The default schema tables are written to if the source does\ + description: "The default schema tables are written to if the source does\ \ not specify a namespace. Unless specifically configured, the usual value\ \ for this field is \"public\"." type: "string" examples: - - "public" + - "public" default: "public" title: "Default Schema" s3_bucket_name: title: "S3 Bucket Name" type: "string" - description: - "The name of the staging S3 bucket to use if utilising a COPY\ + description: "The name of the staging S3 bucket to use if utilising a COPY\ \ strategy. COPY is recommended for production workloads for better speed\ \ and scalability. See AWS docs for more details." examples: - - "airbyte.staging" + - "airbyte.staging" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" - description: - "The region of the S3 staging bucket to use if utilising a\ + description: "The region of the S3 staging bucket to use if utilising a\ \ copy strategy." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" access_key_id: type: "string" - description: - "The Access Key Id granting allow one to access the above S3\ + description: "The Access Key Id granting allow one to access the above S3\ \ staging bucket. Airbyte requires Read and Write permissions to the given\ \ bucket." title: "S3 Key Id" @@ -2957,9 +2822,8 @@ minimum: 10 maximum: 100 examples: - - "10" - description: - "Optional. Increase this if syncing tables larger than 100GB.\ + - "10" + description: "Optional. Increase this if syncing tables larger than 100GB.\ \ Only relevant for COPY. Files are streamed to S3 in parts. This determines\ \ the size of each part, in MBs. As S3 has a limit of 10,000 parts per\ \ file, part size affects the table size. This is 10MB by default, resulting\ @@ -2971,9 +2835,9 @@ supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-s3:0.1.16" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/s3" @@ -2982,330 +2846,316 @@ title: "S3 Destination Spec" type: "object" required: - - "s3_bucket_name" - - "s3_bucket_path" - - "s3_bucket_region" - - "access_key_id" - - "secret_access_key" - - "format" + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + - "format" additionalProperties: false properties: s3_endpoint: title: "Endpoint" type: "string" default: "" - description: - "This is your S3 endpoint url.(if you are working with AWS\ + description: "This is your S3 endpoint url.(if you are working with AWS\ \ S3, just leave empty)." examples: - - "http://localhost:9000" + - "http://localhost:9000" s3_bucket_name: title: "S3 Bucket Name" type: "string" description: "The name of the S3 bucket." examples: - - "airbyte_sync" + - "airbyte_sync" s3_bucket_path: description: "Directory under the S3 bucket where data will be written." type: "string" examples: - - "data_sync/test" + - "data_sync/test" s3_bucket_region: title: "S3 Bucket Region" type: "string" default: "" description: "The region of the S3 bucket." enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-north-1" - - "eu-south-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "sa-east-1" - - "me-south-1" - - "us-gov-east-1" - - "us-gov-west-1" + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-north-1" + - "eu-south-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "sa-east-1" + - "me-south-1" + - "us-gov-east-1" + - "us-gov-west-1" access_key_id: type: "string" - description: - "The access key id to access the S3 bucket. Airbyte requires\ + description: "The access key id to access the S3 bucket. Airbyte requires\ \ Read and Write permissions to the given bucket." title: "S3 Key Id" airbyte_secret: true examples: - - "A012345678910EXAMPLE" + - "A012345678910EXAMPLE" secret_access_key: type: "string" description: "The corresponding secret to the access key id." title: "S3 Access Key" airbyte_secret: true examples: - - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" format: title: "Output Format" type: "object" description: "Output data format" oneOf: - - title: "Avro: Apache Avro" - required: - - "format_type" - - "compression_codec" - properties: - format_type: - type: "string" - enum: - - "Avro" - default: "Avro" - compression_codec: - title: "Compression Codec" - description: - "The compression algorithm used to compress data. Default\ - \ to no compression." - type: "object" - oneOf: - - title: "no compression" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "no compression" - default: "no compression" - - title: "Deflate" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "Deflate" - default: "Deflate" - compression_level: - title: "Deflate level" - description: - "0: no compression & fastest, 9: best compression\ - \ & slowest." - type: "integer" - default: 0 - minimum: 0 - maximum: 9 - - title: "bzip2" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "bzip2" - default: "bzip2" - - title: "xz" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "xz" - default: "xz" - compression_level: - title: "Compression level" - description: - "See here for details." - type: "integer" - default: 6 - minimum: 0 - maximum: 9 - - title: "zstandard" - required: - - "codec" - - "compression_level" - properties: - codec: - type: "string" - enum: - - "zstandard" - default: "zstandard" - compression_level: - title: "Compression level" - description: - "Negative levels are 'fast' modes akin to lz4 or\ - \ snappy, levels above 9 are generally for archival purposes,\ - \ and levels above 18 use a lot of memory." - type: "integer" - default: 3 - minimum: -5 - maximum: 22 - include_checksum: - title: "Include checksum" - description: "If true, include a checksum with each data block." - type: "boolean" - default: false - - title: "snappy" - required: - - "codec" - properties: - codec: - type: "string" - enum: - - "snappy" - default: "snappy" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "CSV: Comma-Separated Values" - required: - - "format_type" - - "flattening" - properties: - format_type: - type: "string" - enum: - - "CSV" - default: "CSV" - flattening: - type: "string" - title: "Normalization (Flattening)" - description: - "Whether the input json data should be normalized (flattened)\ - \ in the output CSV. Please refer to docs for details." - default: "No flattening" - enum: - - "No flattening" - - "Root level flattening" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "JSON Lines: newline-delimited JSON" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "JSONL" - default: "JSONL" - part_size_mb: - title: "Block Size (MB) for Amazon S3 multipart upload" - description: - "This is the size of a \"Part\" being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will allow\ - \ to upload a bigger files and improve the speed, but consumes9\ - \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." - type: "integer" - default: 5 - examples: - - 5 - - title: "Parquet: Columnar Storage" - required: - - "format_type" - properties: - format_type: - type: "string" - enum: - - "Parquet" - default: "Parquet" - compression_codec: - title: "Compression Codec" - description: "The compression algorithm used to compress data pages." - type: "string" - enum: - - "UNCOMPRESSED" - - "SNAPPY" - - "GZIP" - - "LZO" - - "BROTLI" - - "LZ4" - - "ZSTD" - default: "UNCOMPRESSED" - block_size_mb: - title: "Block Size (Row Group Size) (MB)" - description: - "This is the size of a row group being buffered in memory.\ - \ It limits the memory usage when writing. Larger values will improve\ - \ the IO when reading, but consume more memory when writing. Default:\ - \ 128 MB." - type: "integer" - default: 128 - examples: - - 128 - max_padding_size_mb: - title: "Max Padding Size (MB)" - description: - "Maximum size allowed as padding to align row groups.\ - \ This is also the minimum size of a row group. Default: 8 MB." - type: "integer" - default: 8 - examples: - - 8 - page_size_kb: - title: "Page Size (KB)" - description: - "The page size is for compression. A block is composed\ - \ of pages. A page is the smallest unit that must be read fully\ - \ to access a single record. If this value is too small, the compression\ - \ will deteriorate. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_page_size_kb: - title: "Dictionary Page Size (KB)" - description: - "There is one dictionary page per column per row group\ - \ when dictionary encoding is used. The dictionary page size works\ - \ like the page size but for dictionary. Default: 1024 KB." - type: "integer" - default: 1024 - examples: - - 1024 - dictionary_encoding: - title: "Dictionary Encoding" - description: "Default: true." - type: "boolean" - default: true + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "no compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression level" + description: "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression level" + description: "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + part_size_mb: + title: "Block Size (MB) for Amazon S3 multipart upload" + description: "This is the size of a \"Part\" being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will allow\ + \ to upload a bigger files and improve the speed, but consumes9\ + \ more memory. Allowed values: min=5MB, max=525MB Default: 5MB." + type: "integer" + default: 5 + examples: + - 5 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-sftp-json:0.1.0" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/sftp-json" @@ -3314,10 +3164,10 @@ title: "Destination SFTP JSON" type: "object" required: - - "host" - - "username" - - "password" - - "destination_path" + - "host" + - "username" + - "password" + - "destination_path" additionalProperties: false properties: host: @@ -3333,7 +3183,7 @@ maximum: 65536 default: 22 examples: - - 22 + - 22 order: 1 username: title: "User" @@ -3351,14 +3201,14 @@ type: "string" description: "Path to the directory where json files will be written." examples: - - "/json_data" + - "/json_data" order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append" - dockerImage: "airbyte/destination-snowflake:0.3.20" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/snowflake" @@ -3367,58 +3217,56 @@ title: "Snowflake Destination Spec" type: "object" required: - - "host" - - "role" - - "warehouse" - - "database" - - "schema" - - "username" - - "password" + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "password" additionalProperties: true properties: host: - description: - "Host domain of the snowflake instance (must include the account,\ + description: "Host domain of the snowflake instance (must include the account,\ \ region, cloud environment, and end with snowflakecomputing.com)." examples: - - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.us-east-2.aws.snowflakecomputing.com" type: "string" title: "Host" order: 0 role: description: "The role you created for Airbyte to access Snowflake." examples: - - "AIRBYTE_ROLE" + - "AIRBYTE_ROLE" type: "string" title: "Role" order: 1 warehouse: description: "The warehouse you created for Airbyte to sync data into." examples: - - "AIRBYTE_WAREHOUSE" + - "AIRBYTE_WAREHOUSE" type: "string" title: "Warehouse" order: 2 database: description: "The database you created for Airbyte to sync data into." examples: - - "AIRBYTE_DATABASE" + - "AIRBYTE_DATABASE" type: "string" title: "Database" order: 3 schema: - description: - "The default Snowflake schema tables are written to if the\ + description: "The default Snowflake schema tables are written to if the\ \ source does not specify a namespace." examples: - - "AIRBYTE_SCHEMA" + - "AIRBYTE_SCHEMA" type: "string" title: "Default Schema" order: 4 username: description: "The username you created to allow Airbyte to access the database." examples: - - "AIRBYTE_USER" + - "AIRBYTE_USER" type: "string" title: "Username" order: 5 @@ -3434,165 +3282,156 @@ description: "Loading method used to send data to Snowflake." order: 7 oneOf: - - title: "[Recommended] Internal Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to Snowflake, then uses
COPY INTO table
to upload the file.\ - \ Recommended for large production workloads for better speed and scalability." - required: - - "method" - properties: - method: - type: "string" - enum: - - "Internal Staging" - default: "Internal Staging" - - title: "Standard Inserts" - additionalProperties: false - description: - "Uses
INSERT
statements to send batches of records\ - \ to Snowflake. Easiest (no setup) but not recommended for large production\ - \ workloads due to slow speed." - required: - - "method" - properties: - method: - type: "string" - enum: - - "Standard" - default: "Standard" - - title: "AWS S3 Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "s3_bucket_name" - - "access_key_id" - - "secret_access_key" - properties: - method: - type: "string" - enum: - - "S3 Staging" - default: "S3 Staging" - order: 0 - s3_bucket_name: - title: "S3 Bucket Name" - type: "string" - description: - "The name of the staging S3 bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte.staging" - order: 1 - s3_bucket_region: - title: "S3 Bucket Region" - type: "string" - default: "" - description: - "The region of the S3 staging bucket to use if utilising\ - \ a copy strategy." - enum: - - "" - - "us-east-1" - - "us-east-2" - - "us-west-1" - - "us-west-2" - - "af-south-1" - - "ap-east-1" - - "ap-south-1" - - "ap-northeast-1" - - "ap-northeast-2" - - "ap-northeast-3" - - "ap-southeast-1" - - "ap-southeast-2" - - "ca-central-1" - - "cn-north-1" - - "cn-northwest-1" - - "eu-central-1" - - "eu-west-1" - - "eu-west-2" - - "eu-west-3" - - "eu-south-1" - - "eu-north-1" - - "sa-east-1" - - "me-south-1" - order: 2 - access_key_id: - type: "string" - description: - "The Access Key Id granting allow one to access the above\ - \ S3 staging bucket. Airbyte requires Read and Write permissions\ - \ to the given bucket." - title: "S3 Key Id" - airbyte_secret: true - order: 3 - secret_access_key: - type: "string" - description: "The corresponding secret to the above access key id." - title: "S3 Access Key" - airbyte_secret: true - order: 4 - - title: "GCS Staging" - additionalProperties: false - description: - "Writes large batches of records to a file, uploads the file\ - \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ - \ for large production workloads for better speed and scalability." - required: - - "method" - - "project_id" - - "bucket_name" - - "credentials_json" - properties: - method: - type: "string" - enum: - - "GCS Staging" - default: "GCS Staging" - order: 0 - project_id: - title: "GCP Project ID" - type: "string" - description: "The name of the GCP project ID for your credentials." - examples: - - "my-project" - order: 1 - bucket_name: - title: "GCS Bucket Name" - type: "string" - description: - "The name of the staging GCS bucket. Airbyte will write\ - \ files to this bucket and read them via
COPY
statements\ - \ on Snowflake." - examples: - - "airbyte-staging" - order: 2 - credentials_json: - title: "Google Application Credentials" - type: "string" - description: - "The contents of the JSON key file that has read/write\ - \ permissions to the staging GCS bucket. You will separately need\ - \ to grant bucket access to your Snowflake GCP service account.\ - \ See the GCP docs for more information on how to generate a JSON key\ - \ for your service account." - airbyte_secret: true - multiline: true - order: 3 + - title: "[Recommended] Internal Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to Snowflake, then uses
COPY INTO table
to upload the file.\ + \ Recommended for large production workloads for better speed and scalability." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Internal Staging" + default: "Internal Staging" + - title: "Standard Inserts" + additionalProperties: false + description: "Uses
INSERT
statements to send batches of records\ + \ to Snowflake. Easiest (no setup) but not recommended for large production\ + \ workloads due to slow speed." + required: + - "method" + properties: + method: + type: "string" + enum: + - "Standard" + default: "Standard" + - title: "AWS S3 Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to S3, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "s3_bucket_name" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + enum: + - "S3 Staging" + default: "S3 Staging" + order: 0 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte.staging" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket to use if utilising\ + \ a copy strategy." + enum: + - "" + - "us-east-1" + - "us-east-2" + - "us-west-1" + - "us-west-2" + - "af-south-1" + - "ap-east-1" + - "ap-south-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-southeast-1" + - "ap-southeast-2" + - "ca-central-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "eu-south-1" + - "eu-north-1" + - "sa-east-1" + - "me-south-1" + order: 2 + access_key_id: + type: "string" + description: "The Access Key Id granting allow one to access the above\ + \ S3 staging bucket. Airbyte requires Read and Write permissions\ + \ to the given bucket." + title: "S3 Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: "The corresponding secret to the above access key id." + title: "S3 Access Key" + airbyte_secret: true + order: 4 + - title: "GCS Staging" + additionalProperties: false + description: "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses
COPY INTO table
to upload the file. Recommended\ + \ for large production workloads for better speed and scalability." + required: + - "method" + - "project_id" + - "bucket_name" + - "credentials_json" + properties: + method: + type: "string" + enum: + - "GCS Staging" + default: "GCS Staging" + order: 0 + project_id: + title: "GCP Project ID" + type: "string" + description: "The name of the GCP project ID for your credentials." + examples: + - "my-project" + order: 1 + bucket_name: + title: "GCS Bucket Name" + type: "string" + description: "The name of the staging GCS bucket. Airbyte will write\ + \ files to this bucket and read them via
COPY
statements\ + \ on Snowflake." + examples: + - "airbyte-staging" + order: 2 + credentials_json: + title: "Google Application Credentials" + type: "string" + description: "The contents of the JSON key file that has read/write\ + \ permissions to the staging GCS bucket. You will separately need\ + \ to grant bucket access to your Snowflake GCP service account.\ + \ See the GCP docs for more information on how to generate a JSON key\ + \ for your service account." + airbyte_secret: true + multiline: true + order: 3 supportsIncremental: true supportsNormalization: true supportsDBT: true supported_destination_sync_modes: - - "overwrite" - - "append" - - "append_dedup" + - "overwrite" + - "append" + - "append_dedup" - dockerImage: "airbyte/destination-mariadb-columnstore:0.1.1" spec: documentationUrl: "https://docs.airbyte.io/integrations/destinations/mariadb-columnstore" @@ -3601,10 +3440,10 @@ title: "MariaDB Columnstore Destination Spec" type: "object" required: - - "host" - - "port" - - "username" - - "database" + - "host" + - "port" + - "username" + - "database" additionalProperties: true properties: host: @@ -3620,7 +3459,7 @@ maximum: 65536 default: 3306 examples: - - "3306" + - "3306" order: 1 database: title: "Database" @@ -3641,115 +3480,107 @@ tunnel_method: type: "object" title: "SSH Tunnel Method" - description: - "Whether to initiate an SSH tunnel before connecting to the\ + description: "Whether to initiate an SSH tunnel before connecting to the\ \ database, and if so, which kind of authentication to use." oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: - "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: - "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: - "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 supportsIncremental: true supportsNormalization: false supportsDBT: false supported_destination_sync_modes: - - "overwrite" - - "append" + - "overwrite" + - "append"