Skip to content

Commit 34dd5f0

Browse files
committed
combine S3Config and S3DestinationConfig
1 parent 9ab7fa7 commit 34dd5f0

File tree

12 files changed

+136
-189
lines changed

12 files changed

+136
-189
lines changed

airbyte-integrations/connectors/destination-databricks/src/main/java/io/airbyte/integrations/destination/databricks/DatabricksDestination.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config,
4747
@Override
4848
public void checkPersistence(final JsonNode config) {
4949
final DatabricksDestinationConfig databricksConfig = DatabricksDestinationConfig.get(config);
50-
S3StreamCopier.attemptS3WriteAndDelete(databricksConfig.getS3DestinationConfig().getS3Config());
50+
S3StreamCopier.attemptS3WriteAndDelete(databricksConfig.getS3DestinationConfig());
5151
}
5252

5353
@Override

airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopier.java

+16-16
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@
1414
import io.airbyte.integrations.destination.jdbc.SqlOperations;
1515
import io.airbyte.integrations.destination.jdbc.StagingFilenameGenerator;
1616
import io.airbyte.integrations.destination.jdbc.copy.StreamCopier;
17-
import io.airbyte.integrations.destination.s3.S3Config;
1817
import io.airbyte.integrations.destination.s3.S3Destination;
18+
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
1919
import io.airbyte.protocol.models.AirbyteRecordMessage;
2020
import io.airbyte.protocol.models.DestinationSyncMode;
2121
import java.io.IOException;
@@ -50,7 +50,7 @@ public abstract class S3StreamCopier implements StreamCopier {
5050
public static final int MAX_PARTS_PER_FILE = 1000;
5151

5252
protected final AmazonS3 s3Client;
53-
protected final S3Config s3Config;
53+
protected final S3DestinationConfig s3Config;
5454
protected final String tmpTableName;
5555
private final DestinationSyncMode destSyncMode;
5656
protected final String schemaName;
@@ -67,15 +67,15 @@ public abstract class S3StreamCopier implements StreamCopier {
6767
private final StagingFilenameGenerator filenameGenerator;
6868

6969
public S3StreamCopier(final String stagingFolder,
70-
final DestinationSyncMode destSyncMode,
71-
final String schema,
72-
final String streamName,
73-
final String s3FileName,
74-
final AmazonS3 client,
75-
final JdbcDatabase db,
76-
final S3Config s3Config,
77-
final ExtendedNameTransformer nameTransformer,
78-
final SqlOperations sqlOperations) {
70+
final DestinationSyncMode destSyncMode,
71+
final String schema,
72+
final String streamName,
73+
final String s3FileName,
74+
final AmazonS3 client,
75+
final JdbcDatabase db,
76+
final S3DestinationConfig s3Config,
77+
final ExtendedNameTransformer nameTransformer,
78+
final SqlOperations sqlOperations) {
7979
this.destSyncMode = destSyncMode;
8080
this.schemaName = schema;
8181
this.streamName = streamName;
@@ -224,15 +224,15 @@ private void closeAndWaitForUpload() throws IOException {
224224
LOGGER.info("All data for {} stream uploaded.", streamName);
225225
}
226226

227-
public static void attemptS3WriteAndDelete(final S3Config s3Config) {
227+
public static void attemptS3WriteAndDelete(final S3DestinationConfig s3Config) {
228228
S3Destination.attemptS3WriteAndDelete(s3Config, "");
229229
}
230230

231231
public abstract void copyS3CsvFileIntoTable(JdbcDatabase database,
232-
String s3FileLocation,
233-
String schema,
234-
String tableName,
235-
S3Config s3Config)
232+
String s3FileLocation,
233+
String schema,
234+
String tableName,
235+
S3DestinationConfig s3Config)
236236
throws SQLException;
237237

238238
}

airbyte-integrations/connectors/destination-jdbc/src/main/java/io/airbyte/integrations/destination/jdbc/copy/s3/S3StreamCopierFactory.java

+16-16
Original file line numberDiff line numberDiff line change
@@ -10,25 +10,25 @@
1010
import io.airbyte.integrations.destination.jdbc.SqlOperations;
1111
import io.airbyte.integrations.destination.jdbc.copy.StreamCopier;
1212
import io.airbyte.integrations.destination.jdbc.copy.StreamCopierFactory;
13-
import io.airbyte.integrations.destination.s3.S3Config;
1413
import io.airbyte.integrations.destination.s3.S3Destination;
14+
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
1515
import io.airbyte.protocol.models.AirbyteStream;
1616
import io.airbyte.protocol.models.ConfiguredAirbyteStream;
1717
import io.airbyte.protocol.models.DestinationSyncMode;
1818

19-
public abstract class S3StreamCopierFactory implements StreamCopierFactory<S3Config> {
19+
public abstract class S3StreamCopierFactory implements StreamCopierFactory<S3DestinationConfig> {
2020

2121
/**
2222
* Used by the copy consumer.
2323
*/
2424
@Override
2525
public StreamCopier create(final String configuredSchema,
26-
final S3Config s3Config,
27-
final String stagingFolder,
28-
final ConfiguredAirbyteStream configuredStream,
29-
final ExtendedNameTransformer nameTransformer,
30-
final JdbcDatabase db,
31-
final SqlOperations sqlOperations) {
26+
final S3DestinationConfig s3Config,
27+
final String stagingFolder,
28+
final ConfiguredAirbyteStream configuredStream,
29+
final ExtendedNameTransformer nameTransformer,
30+
final JdbcDatabase db,
31+
final SqlOperations sqlOperations) {
3232
try {
3333
final AirbyteStream stream = configuredStream.getStream();
3434
final DestinationSyncMode syncMode = configuredStream.getDestinationSyncMode();
@@ -45,14 +45,14 @@ public StreamCopier create(final String configuredSchema,
4545
* For specific copier suppliers to implement.
4646
*/
4747
public abstract StreamCopier create(String stagingFolder,
48-
DestinationSyncMode syncMode,
49-
String schema,
50-
String streamName,
51-
AmazonS3 s3Client,
52-
JdbcDatabase db,
53-
S3Config s3Config,
54-
ExtendedNameTransformer nameTransformer,
55-
SqlOperations sqlOperations)
48+
DestinationSyncMode syncMode,
49+
String schema,
50+
String streamName,
51+
AmazonS3 s3Client,
52+
JdbcDatabase db,
53+
S3DestinationConfig s3Config,
54+
ExtendedNameTransformer nameTransformer,
55+
SqlOperations sqlOperations)
5656
throws Exception;
5757

5858
}

airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftCopyS3Destination.java

+5-5
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import io.airbyte.integrations.destination.jdbc.copy.CopyConsumerFactory;
1515
import io.airbyte.integrations.destination.jdbc.copy.CopyDestination;
1616
import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier;
17-
import io.airbyte.integrations.destination.s3.S3Config;
17+
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
1818
import io.airbyte.protocol.models.AirbyteMessage;
1919
import io.airbyte.protocol.models.ConfiguredAirbyteCatalog;
2020
import java.util.function.Consumer;
@@ -42,15 +42,15 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config,
4242
getDatabase(config),
4343
getSqlOperations(),
4444
getNameTransformer(),
45-
getS3Config(config),
45+
getS3DestinationConfig(config),
4646
catalog,
4747
new RedshiftStreamCopierFactory(),
4848
getConfiguredSchema(config));
4949
}
5050

5151
@Override
5252
public void checkPersistence(final JsonNode config) throws Exception {
53-
S3StreamCopier.attemptS3WriteAndDelete(getS3Config(config));
53+
S3StreamCopier.attemptS3WriteAndDelete(getS3DestinationConfig(config));
5454
}
5555

5656
@Override
@@ -72,8 +72,8 @@ private String getConfiguredSchema(final JsonNode config) {
7272
return config.get("schema").asText();
7373
}
7474

75-
private S3Config getS3Config(final JsonNode config) {
76-
return S3Config.getS3Config(config);
75+
private S3DestinationConfig getS3DestinationConfig(final JsonNode config) {
76+
return S3DestinationConfig.getS3DestinationConfig(config);
7777
}
7878

7979
}

airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopier.java

+15-15
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopier;
1515
import io.airbyte.integrations.destination.redshift.manifest.Entry;
1616
import io.airbyte.integrations.destination.redshift.manifest.Manifest;
17-
import io.airbyte.integrations.destination.s3.S3Config;
17+
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
1818
import io.airbyte.protocol.models.DestinationSyncMode;
1919
import java.util.Optional;
2020
import java.util.UUID;
@@ -31,14 +31,14 @@ public class RedshiftStreamCopier extends S3StreamCopier {
3131
private String manifestFilePath = null;
3232

3333
public RedshiftStreamCopier(final String stagingFolder,
34-
final DestinationSyncMode destSyncMode,
35-
final String schema,
36-
final String streamName,
37-
final AmazonS3 client,
38-
final JdbcDatabase db,
39-
final S3Config s3Config,
40-
final ExtendedNameTransformer nameTransformer,
41-
final SqlOperations sqlOperations) {
34+
final DestinationSyncMode destSyncMode,
35+
final String schema,
36+
final String streamName,
37+
final AmazonS3 client,
38+
final JdbcDatabase db,
39+
final S3DestinationConfig s3Config,
40+
final ExtendedNameTransformer nameTransformer,
41+
final SqlOperations sqlOperations) {
4242
super(stagingFolder, destSyncMode, schema, streamName, Strings.addRandomSuffix("", "", FILE_PREFIX_LENGTH) + "_" + streamName,
4343
client, db, s3Config, nameTransformer, sqlOperations);
4444
objectMapper = new ObjectMapper();
@@ -56,11 +56,11 @@ public void copyStagingFileToTemporaryTable() {
5656

5757
@Override
5858
public void copyS3CsvFileIntoTable(
59-
final JdbcDatabase database,
60-
final String s3FileLocation,
61-
final String schema,
62-
final String tableName,
63-
final S3Config s3Config) {
59+
final JdbcDatabase database,
60+
final String s3FileLocation,
61+
final String schema,
62+
final String tableName,
63+
final S3DestinationConfig s3Config) {
6464
throw new RuntimeException("Redshift Stream Copier should not copy individual files without use of a manifest");
6565
}
6666

@@ -127,7 +127,7 @@ private void executeCopy(final String manifestPath) {
127127
getFullS3Path(s3Config.getBucketName(), manifestPath),
128128
s3Config.getAccessKeyId(),
129129
s3Config.getSecretAccessKey(),
130-
s3Config.getRegion());
130+
s3Config.getBucketRegion());
131131

132132
Exceptions.toRuntime(() -> db.execute(copyQuery));
133133
}

airbyte-integrations/connectors/destination-redshift/src/main/java/io/airbyte/integrations/destination/redshift/RedshiftStreamCopierFactory.java

+9-9
Original file line numberDiff line numberDiff line change
@@ -10,21 +10,21 @@
1010
import io.airbyte.integrations.destination.jdbc.SqlOperations;
1111
import io.airbyte.integrations.destination.jdbc.copy.StreamCopier;
1212
import io.airbyte.integrations.destination.jdbc.copy.s3.S3StreamCopierFactory;
13-
import io.airbyte.integrations.destination.s3.S3Config;
13+
import io.airbyte.integrations.destination.s3.S3DestinationConfig;
1414
import io.airbyte.protocol.models.DestinationSyncMode;
1515

1616
public class RedshiftStreamCopierFactory extends S3StreamCopierFactory {
1717

1818
@Override
1919
public StreamCopier create(final String stagingFolder,
20-
final DestinationSyncMode syncMode,
21-
final String schema,
22-
final String streamName,
23-
final AmazonS3 s3Client,
24-
final JdbcDatabase db,
25-
final S3Config s3Config,
26-
final ExtendedNameTransformer nameTransformer,
27-
final SqlOperations sqlOperations)
20+
final DestinationSyncMode syncMode,
21+
final String schema,
22+
final String streamName,
23+
final AmazonS3 s3Client,
24+
final JdbcDatabase db,
25+
final S3DestinationConfig s3Config,
26+
final ExtendedNameTransformer nameTransformer,
27+
final SqlOperations sqlOperations)
2828
throws Exception {
2929
return new RedshiftStreamCopier(stagingFolder, syncMode, schema, streamName, s3Client, db, s3Config, nameTransformer, sqlOperations);
3030
}

airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Config.java

-76
This file was deleted.

airbyte-integrations/connectors/destination-s3/src/main/java/io/airbyte/integrations/destination/s3/S3Destination.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ public static void main(final String[] args) throws Exception {
3737
@Override
3838
public AirbyteConnectionStatus check(final JsonNode config) {
3939
try {
40-
attemptS3WriteAndDelete(S3Config.getS3Config(config), config.get("s3_bucket_path").asText());
40+
attemptS3WriteAndDelete(S3DestinationConfig.getS3DestinationConfig(config), config.get("s3_bucket_path").asText());
4141
return new AirbyteConnectionStatus().withStatus(Status.SUCCEEDED);
4242
} catch (final Exception e) {
4343
LOGGER.error("Exception attempting to access the S3 bucket: ", e);
@@ -56,23 +56,23 @@ public AirbyteMessageConsumer getConsumer(final JsonNode config,
5656
return new S3Consumer(S3DestinationConfig.getS3DestinationConfig(config), configuredCatalog, formatterFactory, outputRecordCollector);
5757
}
5858

59-
public static void attemptS3WriteAndDelete(final S3Config s3Config, final String bucketPath) {
59+
public static void attemptS3WriteAndDelete(final S3DestinationConfig s3Config, final String bucketPath) {
6060
final var prefix = bucketPath.isEmpty() ? "" : bucketPath + (bucketPath.endsWith("/") ? "" : "/");
6161
final String outputTableName = prefix + "_airbyte_connection_test_" + UUID.randomUUID().toString().replaceAll("-", "");
6262
attemptWriteAndDeleteS3Object(s3Config, outputTableName);
6363
}
6464

65-
private static void attemptWriteAndDeleteS3Object(final S3Config s3Config, final String outputTableName) {
65+
private static void attemptWriteAndDeleteS3Object(final S3DestinationConfig s3Config, final String outputTableName) {
6666
final var s3 = getAmazonS3(s3Config);
6767
final var s3Bucket = s3Config.getBucketName();
6868

6969
s3.putObject(s3Bucket, outputTableName, "check-content");
7070
s3.deleteObject(s3Bucket, outputTableName);
7171
}
7272

73-
public static AmazonS3 getAmazonS3(final S3Config s3Config) {
73+
public static AmazonS3 getAmazonS3(final S3DestinationConfig s3Config) {
7474
final var endpoint = s3Config.getEndpoint();
75-
final var region = s3Config.getRegion();
75+
final var region = s3Config.getBucketRegion();
7676
final var accessKeyId = s3Config.getAccessKeyId();
7777
final var secretAccessKey = s3Config.getSecretAccessKey();
7878

@@ -81,7 +81,7 @@ public static AmazonS3 getAmazonS3(final S3Config s3Config) {
8181
if (endpoint.isEmpty()) {
8282
return AmazonS3ClientBuilder.standard()
8383
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
84-
.withRegion(s3Config.getRegion())
84+
.withRegion(s3Config.getBucketRegion())
8585
.build();
8686

8787
} else {

0 commit comments

Comments
 (0)