From 2f966b258abe320e15bffbd6bafd190b63f8def4 Mon Sep 17 00:00:00 2001 From: davidradl Date: Thu, 4 Sep 2025 14:26:15 +0100 Subject: [PATCH] HTTP155 Initial Flink contribution Signed-off-by: davidradl --- .checkstyle | 15 + .github/boring-cyborg.yml | 86 ++ .github/pull_request_template.md | 9 - .github/workflows/build.yml | 51 - .github/workflows/prepare_release_branch.yml | 73 - .github/workflows/publish.yml | 53 - .github/workflows/push_pr.yml | 37 + .github/workflows/weekly.yml | 41 + .gitignore | 35 +- .mvn/jvm.config | 7 + .mvn/wrapper/maven-wrapper.properties | 22 + CHANGELOG.md | 281 ---- CONTRIBUTING.md | 9 - NOTICE | 17 + README.md | 726 +--------- dev/README.md | 36 - dev/checkstyle-suppressions.xml | 31 - dev/checkstyle.xml | 218 --- docs/JoinTable.PNG | Bin 64354 -> 0 bytes docs/content/docs/datastream/http.md | 179 +++ docs/content/docs/table/http.md | 545 ++++++++ flink-connector-http/pom.xml | 487 +++++++ .../http/HttpPostRequestCallback.java | 46 + .../http/HttpPostRequestCallbackFactory.java | 79 ++ .../apache/flink/connector/http/HttpSink.java | 90 ++ .../connector}/http/HttpSinkBuilder.java | 106 +- ...tpStatusCodeValidationFailedException.java | 33 + .../flink/connector/http/LookupArg.java | 33 + .../connector/http/LookupQueryCreator.java | 43 + .../http/LookupQueryCreatorFactory.java | 68 + .../SchemaLifecycleAwareElementConverter.java | 28 +- .../http}/auth/OidcAccessTokenManager.java | 87 +- .../connector/http/clients/PollingClient.java | 42 + .../http/clients/PollingClientFactory.java | 36 + .../http/clients/SinkHttpClient.java | 43 + .../http/clients/SinkHttpClientBuilder.java | 40 + .../http/clients/SinkHttpClientResponse.java | 42 + .../http/config/ConfigException.java | 52 + .../config/HttpConnectorConfigConstants.java | 159 +++ .../http/config/SinkRequestSubmitMode.java | 34 + .../BasicAuthHeaderValuePreprocessor.java | 38 +- .../ComposeHeaderPreprocessor.java | 60 + .../http/preprocessor/HeaderPreprocessor.java | 33 + .../preprocessor/HeaderValuePreprocessor.java | 35 + .../OIDCAuthHeaderValuePreprocessor.java | 64 + .../http/retry/HttpClientWithRetry.java | 93 ++ .../http}/retry/RetryConfigProvider.java | 43 +- .../http/retry/RetryStrategyType.java | 45 + .../http}/security/SecurityContext.java | 122 +- .../security/SelfSignedTrustManager.java | 37 +- .../connector/http/sink/HttpSinkInternal.java | 202 +++ .../http/sink/HttpSinkRequestEntry.java | 48 + .../connector/http/sink/HttpSinkWriter.java | 161 +++ .../sink/HttpSinkWriterStateSerializer.java | 53 + .../httpclient/AbstractRequestSubmitter.java | 64 + .../httpclient/BatchRequestSubmitter.java | 85 +- .../BatchRequestSubmitterFactory.java | 89 ++ .../http/sink/httpclient/HttpRequest.java | 33 + .../JavaNetHttpResponseWrapper.java | 46 + .../httpclient/JavaNetSinkHttpClient.java | 137 ++ .../PerRequestRequestSubmitterFactory.java | 50 + .../sink/httpclient/PerRequestSubmitter.java | 94 ++ .../sink/httpclient/RequestSubmitter.java | 30 + .../httpclient/RequestSubmitterFactory.java | 26 + .../status/ComposeHttpStatusCodeChecker.java | 176 +++ .../http/status/HttpCodesParser.java | 85 ++ .../http}/status/HttpResponseChecker.java | 38 +- .../http/status/HttpResponseCodeType.java | 63 + .../http/status/HttpStatusCodeChecker.java | 34 + .../IncludeListHttpStatusCodeChecker.java | 39 + .../SingleValueHttpStatusCodeChecker.java | 44 + .../http/status/TypeStatusCodeChecker.java | 61 + .../SerializationSchemaElementConverter.java | 61 + .../lookup/AsyncHttpTableLookupFunction.java | 131 ++ .../table/lookup/BodyBasedRequestFactory.java | 88 ++ .../http/table/lookup/GetRequestFactory.java | 90 ++ .../http/table/lookup/HttpLookupConfig.java | 48 + .../lookup/HttpLookupConnectorOptions.java | 200 +++ .../lookup/HttpLookupSourceRequestEntry.java | 47 + .../table/lookup/HttpLookupTableSource.java | 169 ++- .../lookup/HttpLookupTableSourceFactory.java | 232 ++++ .../http/table/lookup/HttpRequestFactory.java | 35 + .../table/lookup/HttpTableLookupFunction.java | 45 +- .../lookup/JavaNetHttpPollingClient.java | 151 +- .../JavaNetHttpPollingClientFactory.java | 46 + .../http/table/lookup/LookupQueryInfo.java | 86 ++ .../http}/table/lookup/LookupRow.java | 43 +- .../http/table/lookup/LookupSchemaEntry.java | 43 + .../table/lookup/RequestFactoryBase.java | 93 +- .../lookup/RowDataLookupSchemaEntryBase.java | 48 + .../RowDataSingleValueLookupSchemaEntry.java | 49 +- .../lookup/RowTypeLookupSchemaEntry.java | 46 +- .../Slf4JHttpLookupPostRequestCallback.java | 84 ++ ...jHttpLookupPostRequestCallbackFactory.java | 53 + .../http}/table/lookup/TableSourceHelper.java | 35 +- .../ElasticSearchLiteQueryCreator.java | 69 + .../ElasticSearchLiteQueryCreatorFactory.java | 56 + .../querycreators/GenericGetQueryCreator.java | 58 + .../GenericGetQueryCreatorFactory.java | 56 + .../GenericJsonAndUrlQueryCreator.java | 218 +-- .../GenericJsonAndUrlQueryCreatorFactory.java | 157 +++ .../GenericJsonQueryCreator.java | 71 + .../GenericJsonQueryCreatorFactory.java | 92 ++ .../querycreators/ObjectMapperAdapter.java | 42 + .../querycreators/PrefixedConfigOption.java | 102 ++ .../QueryFormatAwareConfiguration.java | 31 +- .../http}/table/sink/HttpDynamicSink.java | 200 +-- .../sink/HttpDynamicSinkConnectorOptions.java | 44 + .../sink/HttpDynamicTableSinkFactory.java | 116 ++ .../sink/Slf4jHttpPostRequestCallback.java | 69 + .../Slf4jHttpPostRequestCallbackFactory.java | 53 + .../connector/http}/utils/ConfigUtils.java | 85 +- .../connector/http/utils/ExceptionUtils.java | 46 + .../connector/http/utils/HttpHeaderUtils.java | 150 ++ .../http}/utils/JavaNetHttpClientFactory.java | 160 ++- .../connector/http/utils/ProxyConfig.java | 61 + .../http/utils/SerializationSchemaUtils.java | 67 + .../SynchronizedSerializationSchema.java | 19 +- .../connector/http/utils/ThreadUtils.java | 37 + .../http}/utils/uri/CharArrayBuffer.java | 54 +- .../http/utils/uri/NameValuePair.java | 43 + .../http/utils/uri/ParserCursor.java | 72 + .../http}/utils/uri/TokenParser.java | 92 +- .../connector/http}/utils/uri/URIBuilder.java | 69 +- .../http}/utils/uri/URLEncodedUtils.java | 145 +- .../org.apache.flink.table.factories.Factory | 28 + .../connector/http/ExceptionUtilsTest.java | 38 + .../HttpPostRequestCallbackFactoryTest.java | 194 +++ .../http/HttpsConnectionTestBase.java | 62 + .../flink/connector/http/StreamTableJob.java | 77 + .../flink/connector/http/TestHelper.java | 65 + .../TestLookupPostRequestCallbackFactory.java | 53 + .../http/TestPostRequestCallbackFactory.java | 51 + .../http/WireMockServerPortAllocator.java | 36 + .../flink/connector/http/app/HttpStubApp.java | 57 + .../connector/http/app/JsonTransform.java | 125 ++ .../auth/OidcAccessTokenManagerTest.java | 51 +- .../http/config/ConfigExceptionTest.java | 40 + .../http}/retry/HttpClientWithRetryTest.java | 86 +- .../http/retry/RetryConfigProviderTest.java | 89 ++ .../http}/retry/RetryStrategyTypeTest.java | 37 +- .../http/sink/HttpSinkBuilderTest.java | 93 ++ .../http/sink/HttpSinkConnectionTest.java | 378 +++++ .../HttpSinkWriterStateSerializerTest.java | 52 + .../http}/sink/HttpSinkWriterTest.java | 90 +- .../BatchRequestSubmitterFactoryTest.java | 96 ++ .../httpclient/BatchRequestSubmitterTest.java | 103 ++ .../JavaNetSinkHttpClientConnectionTest.java | 329 +++++ .../httpclient/JavaNetSinkHttpClientTest.java | 102 +- .../ComposeHttpStatusCodeCheckerTest.java | 181 +++ .../http}/status/HttpCodesParserTest.java | 79 +- .../http}/status/HttpResponseCheckerTest.java | 55 +- .../BasicAuthHeaderValuePreprocessorTest.java | 46 + .../table/ComposeHeaderPreprocessorTest.java | 43 + .../AsyncHttpTableLookupFunctionTest.java | 188 +-- .../lookup/BodyBasedRequestFactoryTest.java | 89 +- .../HttpLookupTableSourceFactoryTest.java | 139 ++ .../HttpLookupTableSourceITCaseTest.java | 1118 +++++++++++++++ .../lookup/HttpLookupTableSourceTest.java | 258 ++++ ...avaNetHttpPollingClientConnectionTest.java | 256 ++-- .../JavaNetHttpPollingClientFactoryTest.java | 51 + ...tHttpPollingClientHttpsConnectionTest.java | 363 +++++ .../lookup/JavaNetHttpPollingClientTest.java | 237 ++++ .../JavaNetHttpPollingClientWithWireTest.java | 191 +++ .../http/table/lookup/JsonTransform.java | 126 ++ .../table/lookup/LookupQueryInfoTest.java | 43 +- ...wDataSingleValueLookupSchemaEntryTest.java | 47 + .../lookup/RowTypeLookupSchemaEntryTest.java | 177 +++ .../table/lookup/TableSourceHelperTest.java | 53 + .../querycreators/CustomFormatFactory.java | 32 +- .../CustomJsonFormatFactory.java | 32 +- .../ElasticSearchLiteQueryCreatorTest.java | 158 +++ .../GenericGetQueryCreatorTest.java | 208 +++ ...ericJsonAndUrlQueryCreatorFactoryTest.java | 138 ++ .../GenericJsonAndUrlQueryCreatorTest.java | 229 +-- .../GenericJsonQueryCreatorFactoryTest.java | 111 ++ .../GenericJsonQueryCreatorTest.java | 49 +- .../lookup/querycreators/PersonBean.java | 28 + .../querycreators/QueryCreatorUtils.java | 54 +- .../QueryFormatAwareConfigurationTest.java | 59 + ...BatchRequestHttpDynamicSinkInsertTest.java | 364 +++++ .../http/table/sink/HttpDynamicSinkTest.java | 134 ++ .../sink/HttpDynamicTableSinkFactoryTest.java | 113 ++ .../PerRequestHttpDynamicSinkInsertTest.java | 331 +++++ .../connector/http/utils/ConfigUtilsTest.java | 292 ++++ .../http/utils/HttpHeaderUtilsTest.java | 47 + .../utils/JavaNetHttpClientFactoryTest.java | 141 ++ .../http/utils/uri/CharArrayBufferTest.java | 96 ++ .../http/utils/uri/ParserCursorTest.java | 49 + .../http/utils/uri/TokenParserTest.java | 41 + .../http}/utils/uri/URIBuilderTest.java | 183 ++- .../http}/utils/uri/URLEncodedUtilsTest.java | 57 +- .../org.apache.flink.table.factories.Factory | 24 + .../src}/test/resources/auth/AuthResult.json | 0 .../HttpResult.json | 0 .../http-array-result/HttpResult.json | 0 .../src}/test/resources/http/HttpResult.json | 0 .../resources/json/sink/allInOneBatch.txt | 0 .../json/sink/fourSingleEventBatches.txt | 0 .../test/resources/json/sink/threeBatches.txt | 0 .../test/resources/json/sink/twoBatches.txt | 0 .../src}/test/resources/security/certs/ca.crt | 0 .../src}/test/resources/security/certs/ca.key | 0 .../test/resources/security/certs/ca.pass.key | 0 .../src}/test/resources/security/certs/ca.srl | 0 .../security/certs/ca_server_bundle.cert.pem | 0 .../test/resources/security/certs/client.crt | 0 .../test/resources/security/certs/client.csr | 0 .../test/resources/security/certs/client.key | 0 .../resources/security/certs/client.pass.key | 0 .../security/certs/clientPrivateKey.der | Bin .../security/certs/clientPrivateKey.pem | 0 .../security/certs/client_keyStore.p12 | Bin .../test/resources/security/certs/server.crt | 0 .../test/resources/security/certs/server.csr | 0 .../test/resources/security/certs/server.key | 0 .../resources/security/certs/server.pass.key | 0 .../security/certs/serverKeyStore.jks | Bin .../security/certs/serverTrustStore.jks | Bin flink-sql-connector-http/pom.xml | 147 ++ .../src/main/resources/META-INF/NOTICE | 12 + .../flink/connector/http/PackagingITCase.java | 42 + pom.xml | 1235 ++++++++--------- .../http/HttpPostRequestCallback.java | 29 - .../http/HttpPostRequestCallbackFactory.java | 67 - .../getindata/connectors/http/HttpSink.java | 74 - ...tpStatusCodeValidationFailedException.java | 15 - .../getindata/connectors/http/LookupArg.java | 22 - .../connectors/http/LookupQueryCreator.java | 27 - .../http/LookupQueryCreatorFactory.java | 49 - .../internal/ComposeHeaderPreprocessor.java | 44 - .../http/internal/HeaderPreprocessor.java | 17 - .../internal/HeaderValuePreprocessor.java | 18 - .../OIDCAuthHeaderValuePreprocessor.java | 50 - .../http/internal/PollingClient.java | 25 - .../http/internal/PollingClientFactory.java | 16 - .../http/internal/SinkHttpClient.java | 28 - .../http/internal/SinkHttpClientBuilder.java | 27 - .../http/internal/SinkHttpClientResponse.java | 31 - .../http/internal/config/ConfigException.java | 30 - .../config/HttpConnectorConfigConstants.java | 143 -- .../config/SinkRequestSubmitMode.java | 17 - .../internal/retry/HttpClientWithRetry.java | 71 - .../internal/retry/RetryStrategyType.java | 27 - .../http/internal/sink/HttpSinkInternal.java | 188 --- .../internal/sink/HttpSinkRequestEntry.java | 36 - .../http/internal/sink/HttpSinkWriter.java | 131 -- .../sink/HttpSinkWriterStateSerializer.java | 36 - .../httpclient/AbstractRequestSubmitter.java | 49 - .../BatchRequestSubmitterFactory.java | 76 - .../internal/sink/httpclient/HttpRequest.java | 16 - .../JavaNetHttpResponseWrapper.java | 33 - .../httpclient/JavaNetSinkHttpClient.java | 121 -- .../PerRequestRequestSubmitterFactory.java | 33 - .../sink/httpclient/PerRequestSubmitter.java | 80 -- .../sink/httpclient/RequestSubmitter.java | 16 - .../httpclient/RequestSubmitterFactory.java | 8 - .../status/ComposeHttpStatusCodeChecker.java | 161 --- .../http/internal/status/HttpCodesParser.java | 63 - .../internal/status/HttpResponseCodeType.java | 49 - .../status/HttpStatusCodeChecker.java | 16 - .../SingleValueHttpStatusCodeChecker.java | 28 - .../status/TypeStatusCodeChecker.java | 43 - .../WhiteListHttpStatusCodeChecker.java | 23 - .../SerializationSchemaElementConverter.java | 47 - .../lookup/AsyncHttpTableLookupFunction.java | 111 -- .../table/lookup/BodyBasedRequestFactory.java | 71 - .../table/lookup/GetRequestFactory.java | 70 - .../table/lookup/HttpLookupConfig.java | 33 - .../lookup/HttpLookupConnectorOptions.java | 161 --- .../lookup/HttpLookupSourceRequestEntry.java | 32 - .../lookup/HttpLookupTableSourceFactory.java | 194 --- .../table/lookup/HttpRequestFactory.java | 20 - .../JavaNetHttpPollingClientFactory.java | 34 - .../table/lookup/LookupQueryInfo.java | 73 - .../table/lookup/LookupSchemaEntry.java | 28 - .../lookup/RowDataLookupSchemaEntryBase.java | 36 - .../Slf4JHttpLookupPostRequestCallback.java | 70 - ...jHttpLookupPostRequestCallbackFactory.java | 38 - .../ElasticSearchLiteQueryCreator.java | 50 - .../ElasticSearchLiteQueryCreatorFactory.java | 44 - .../querycreators/GenericGetQueryCreator.java | 42 - .../GenericGetQueryCreatorFactory.java | 43 - .../GenericJsonAndUrlQueryCreatorFactory.java | 142 -- .../GenericJsonQueryCreator.java | 59 - .../GenericJsonQueryCreatorFactory.java | 78 -- .../querycreators/ObjectMapperAdapter.java | 28 - .../querycreators/PrefixedConfigOption.java | 85 -- .../sink/HttpDynamicSinkConnectorOptions.java | 27 - .../sink/HttpDynamicTableSinkFactory.java | 100 -- .../sink/Slf4jHttpPostRequestCallback.java | 53 - .../Slf4jHttpPostRequestCallbackFactory.java | 39 - .../http/internal/utils/ExceptionUtils.java | 26 - .../http/internal/utils/HttpHeaderUtils.java | 137 -- .../http/internal/utils/ProxyConfig.java | 40 - .../utils/SerializationSchemaUtils.java | 50 - .../http/internal/utils/ThreadUtils.java | 17 - .../internal/utils/uri/NameValuePair.java | 56 - .../http/internal/utils/uri/ParserCursor.java | 92 -- .../org.apache.flink.table.factories.Factory | 8 - .../java/com/getindata/StreamTableJob.java | 56 - .../connectors/http/ExceptionUtilsTest.java | 18 - .../HttpPostRequestCallbackFactoryTest.java | 172 --- .../getindata/connectors/http/TestHelper.java | 48 - .../TestLookupPostRequestCallbackFactory.java | 29 - .../http/TestPostRequestCallbackFactory.java | 27 - .../connectors/http/app/HttpStubApp.java | 35 - .../connectors/http/app/JsonTransform.java | 108 -- .../BasicAuthHeaderValuePreprocessorTest.java | 26 - .../ComposeHeaderPreprocessorTest.java | 21 - .../internal/HttpsConnectionTestBase.java | 47 - .../internal/config/ConfigExceptionTest.java | 20 - .../retry/RetryConfigProviderTest.java | 63 - .../internal/sink/HttpSinkBuilderTest.java | 74 - .../internal/sink/HttpSinkConnectionTest.java | 325 ----- .../HttpSinkWriterStateSerializerTest.java | 30 - .../BatchRequestSubmitterFactoryTest.java | 86 -- .../httpclient/BatchRequestSubmitterTest.java | 96 -- .../JavaNetSinkHttpClientConnectionTest.java | 369 ----- .../ComposeHttpStatusCodeCheckerTest.java | 171 --- .../HttpLookupTableSourceFactoryTest.java | 110 -- .../HttpLookupTableSourceITCaseTest.java | 1071 -------------- .../lookup/HttpLookupTableSourceTest.java | 253 ---- .../JavaNetHttpPollingClientFactoryTest.java | 30 - ...tHttpPollingClientHttpsConnectionTest.java | 356 ----- .../lookup/JavaNetHttpPollingClientTest.java | 227 --- .../JavaNetHttpPollingClientWithWireTest.java | 163 --- .../internal/table/lookup/JsonTransform.java | 108 -- ...wDataSingleValueLookupSchemaEntryTest.java | 28 - .../lookup/RowTypeLookupSchemaEntryTest.java | 144 -- .../table/lookup/TableSourceHelperTest.java | 34 - .../ElasticSearchLiteQueryCreatorTest.java | 139 -- .../GenericGetQueryCreatorTest.java | 181 --- ...ericJsonAndUrlQueryCreatorFactoryTest.java | 117 -- .../GenericJsonQueryCreatorFactoryTest.java | 88 -- .../table/lookup/querycreators/PathBean.java | 11 - .../lookup/querycreators/PersonBean.java | 12 - .../QueryFormatAwareConfigurationTest.java | 38 - ...BatchRequestHttpDynamicSinkInsertTest.java | 341 ----- .../table/sink/HttpDynamicSinkTest.java | 109 -- .../sink/HttpDynamicTableSinkFactoryTest.java | 94 -- .../PerRequestHttpDynamicSinkInsertTest.java | 309 ----- .../http/internal/utils/ConfigUtilsTest.java | 244 ---- .../internal/utils/HttpHeaderUtilsTest.java | 27 - .../utils/JavaNetHttpClientFactoryTest.java | 112 -- .../utils/uri/CharArrayBufferTest.java | 73 - .../internal/utils/uri/ParserCursorTest.java | 29 - .../internal/utils/uri/TokenParserTest.java | 25 - .../org.apache.flink.table.factories.Factory | 4 - tools/ci/log4j.properties | 72 + tools/maven/checkstyle.xml | 564 ++++++++ tools/maven/suppressions.xml | 27 + 352 files changed, 18096 insertions(+), 14314 deletions(-) create mode 100644 .checkstyle create mode 100644 .github/boring-cyborg.yml delete mode 100644 .github/pull_request_template.md delete mode 100644 .github/workflows/build.yml delete mode 100644 .github/workflows/prepare_release_branch.yml delete mode 100644 .github/workflows/publish.yml create mode 100644 .github/workflows/push_pr.yml create mode 100644 .github/workflows/weekly.yml create mode 100644 .mvn/jvm.config create mode 100644 .mvn/wrapper/maven-wrapper.properties delete mode 100644 CHANGELOG.md delete mode 100644 CONTRIBUTING.md create mode 100644 NOTICE delete mode 100644 dev/README.md delete mode 100644 dev/checkstyle-suppressions.xml delete mode 100644 dev/checkstyle.xml delete mode 100644 docs/JoinTable.PNG create mode 100644 docs/content/docs/datastream/http.md create mode 100644 docs/content/docs/table/http.md create mode 100644 flink-connector-http/pom.xml create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallback.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSink.java rename {src/main/java/com/getindata/connectors => flink-connector-http/src/main/java/org/apache/flink/connector}/http/HttpSinkBuilder.java (60%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpStatusCodeValidationFailedException.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupArg.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreator.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreatorFactory.java rename {src/main/java/com/getindata/connectors => flink-connector-http/src/main/java/org/apache/flink/connector}/http/SchemaLifecycleAwareElementConverter.java (52%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/auth/OidcAccessTokenManager.java (66%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClient.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClientFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClient.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientBuilder.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientResponse.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/config/ConfigException.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/config/HttpConnectorConfigConstants.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/config/SinkRequestSubmitMode.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor}/BasicAuthHeaderValuePreprocessor.java (51%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/ComposeHeaderPreprocessor.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderPreprocessor.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderValuePreprocessor.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/OIDCAuthHeaderValuePreprocessor.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/HttpClientWithRetry.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/retry/RetryConfigProvider.java (50%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryStrategyType.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/security/SecurityContext.java (68%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/security/SelfSignedTrustManager.java (59%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkInternal.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkRequestEntry.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriter.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializer.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/AbstractRequestSubmitter.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/sink/httpclient/BatchRequestSubmitter.java (60%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/HttpRequest.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetHttpResponseWrapper.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClient.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestRequestSubmitterFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestSubmitter.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitter.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitterFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/ComposeHttpStatusCodeChecker.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpCodesParser.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/status/HttpResponseChecker.java (56%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseCodeType.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpStatusCodeChecker.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/IncludeListHttpStatusCodeChecker.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/SingleValueHttpStatusCodeChecker.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/status/TypeStatusCodeChecker.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/SerializationSchemaElementConverter.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunction.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/GetRequestFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConfig.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConnectorOptions.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupSourceRequestEntry.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/HttpLookupTableSource.java (59%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpRequestFactory.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/HttpTableLookupFunction.java (59%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/JavaNetHttpPollingClient.java (58%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfo.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/LookupRow.java (59%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupSchemaEntry.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/RequestFactoryBase.java (55%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataLookupSchemaEntryBase.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/RowDataSingleValueLookupSchemaEntry.java (57%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/RowTypeLookupSchemaEntry.java (63%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4JHttpLookupPostRequestCallback.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/TableSourceHelper.java (64%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreator.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorFactory.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java (53%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreator.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ObjectMapperAdapter.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/PrefixedConfigOption.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/lookup/querycreators/QueryFormatAwareConfiguration.java (53%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/table/sink/HttpDynamicSink.java (57%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkConnectorOptions.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactory.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallback.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallbackFactory.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/ConfigUtils.java (59%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ExceptionUtils.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/HttpHeaderUtils.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/JavaNetHttpClientFactory.java (53%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ProxyConfig.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SerializationSchemaUtils.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/SynchronizedSerializationSchema.java (51%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ThreadUtils.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/uri/CharArrayBuffer.java (66%) create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/NameValuePair.java create mode 100644 flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/ParserCursor.java rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/uri/TokenParser.java (58%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/uri/URIBuilder.java (68%) rename {src/main/java/com/getindata/connectors/http/internal => flink-connector-http/src/main/java/org/apache/flink/connector/http}/utils/uri/URLEncodedUtils.java (77%) create mode 100644 flink-connector-http/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/ExceptionUtilsTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpsConnectionTestBase.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/StreamTableJob.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/TestHelper.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/TestLookupPostRequestCallbackFactory.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/TestPostRequestCallbackFactory.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/WireMockServerPortAllocator.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/app/HttpStubApp.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/app/JsonTransform.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/auth/OidcAccessTokenManagerTest.java (81%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/config/ConfigExceptionTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/retry/HttpClientWithRetryTest.java (65%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryConfigProviderTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/retry/RetryStrategyTypeTest.java (57%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkBuilderTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkConnectionTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializerTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/sink/HttpSinkWriterTest.java (56%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/sink/httpclient/JavaNetSinkHttpClientTest.java (51%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/status/HttpCodesParserTest.java (55%) rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/status/HttpResponseCheckerTest.java (70%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/BasicAuthHeaderValuePreprocessorTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/ComposeHeaderPreprocessorTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/AsyncHttpTableLookupFunctionTest.java (52%) rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/BodyBasedRequestFactoryTest.java (57%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceITCaseTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/JavaNetHttpPollingClientConnectionTest.java (67%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientWithWireTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JsonTransform.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/LookupQueryInfoTest.java (69%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/TableSourceHelperTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/querycreators/CustomFormatFactory.java (60%) rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/querycreators/CustomJsonFormatFactory.java (60%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java (52%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/querycreators/GenericJsonQueryCreatorTest.java (57%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/PersonBean.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/table/lookup/querycreators/QueryCreatorUtils.java (54%) create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/BatchRequestHttpDynamicSinkInsertTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/PerRequestHttpDynamicSinkInsertTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/ConfigUtilsTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/HttpHeaderUtilsTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactoryTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/CharArrayBufferTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/ParserCursorTest.java create mode 100644 flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/TokenParserTest.java rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/utils/uri/URIBuilderTest.java (54%) rename {src/test/java/com/getindata/connectors/http/internal => flink-connector-http/src/test/java/org/apache/flink/connector/http}/utils/uri/URLEncodedUtilsTest.java (53%) create mode 100644 flink-connector-http/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory rename {src => flink-connector-http/src}/test/resources/auth/AuthResult.json (100%) rename {src => flink-connector-http/src}/test/resources/http-array-result-with-nulls/HttpResult.json (100%) rename {src => flink-connector-http/src}/test/resources/http-array-result/HttpResult.json (100%) rename {src => flink-connector-http/src}/test/resources/http/HttpResult.json (100%) rename {src => flink-connector-http/src}/test/resources/json/sink/allInOneBatch.txt (100%) rename {src => flink-connector-http/src}/test/resources/json/sink/fourSingleEventBatches.txt (100%) rename {src => flink-connector-http/src}/test/resources/json/sink/threeBatches.txt (100%) rename {src => flink-connector-http/src}/test/resources/json/sink/twoBatches.txt (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/ca.crt (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/ca.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/ca.pass.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/ca.srl (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/ca_server_bundle.cert.pem (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/client.crt (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/client.csr (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/client.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/client.pass.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/clientPrivateKey.der (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/clientPrivateKey.pem (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/client_keyStore.p12 (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/server.crt (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/server.csr (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/server.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/server.pass.key (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/serverKeyStore.jks (100%) rename {src => flink-connector-http/src}/test/resources/security/certs/serverTrustStore.jks (100%) create mode 100644 flink-sql-connector-http/pom.xml create mode 100644 flink-sql-connector-http/src/main/resources/META-INF/NOTICE create mode 100644 flink-sql-connector-http/src/test/java/org/apache/flink/connector/http/PackagingITCase.java delete mode 100644 src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java delete mode 100644 src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/HttpSink.java delete mode 100644 src/main/java/com/getindata/connectors/http/HttpStatusCodeValidationFailedException.java delete mode 100644 src/main/java/com/getindata/connectors/http/LookupArg.java delete mode 100644 src/main/java/com/getindata/connectors/http/LookupQueryCreator.java delete mode 100644 src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessor.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/HeaderPreprocessor.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/HeaderValuePreprocessor.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/OIDCAuthHeaderValuePreprocessor.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/PollingClient.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/PollingClientFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/SinkHttpClient.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/SinkHttpClientBuilder.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/SinkHttpClientResponse.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/config/ConfigException.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/config/HttpConnectorConfigConstants.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/config/SinkRequestSubmitMode.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetry.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/retry/RetryStrategyType.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkInternal.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkRequestEntry.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializer.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/AbstractRequestSubmitter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetHttpResponseWrapper.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClient.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestRequestSubmitterFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestSubmitter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitterFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/ComposeHttpStatusCodeChecker.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/HttpCodesParser.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/HttpResponseCodeType.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/HttpStatusCodeChecker.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/SingleValueHttpStatusCodeChecker.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/TypeStatusCodeChecker.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/status/WhiteListHttpStatusCodeChecker.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/SerializationSchemaElementConverter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunction.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/GetRequestFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConfig.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConnectorOptions.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupSourceRequestEntry.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpRequestFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupSchemaEntry.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataLookupSchemaEntryBase.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreator.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ObjectMapperAdapter.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PrefixedConfigOption.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkConnectorOptions.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallbackFactory.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/ExceptionUtils.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtils.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/ProxyConfig.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/SerializationSchemaUtils.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/ThreadUtils.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/uri/NameValuePair.java delete mode 100644 src/main/java/com/getindata/connectors/http/internal/utils/uri/ParserCursor.java delete mode 100644 src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory delete mode 100644 src/test/java/com/getindata/StreamTableJob.java delete mode 100644 src/test/java/com/getindata/connectors/http/ExceptionUtilsTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/HttpPostRequestCallbackFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/TestHelper.java delete mode 100644 src/test/java/com/getindata/connectors/http/TestLookupPostRequestCallbackFactory.java delete mode 100644 src/test/java/com/getindata/connectors/http/TestPostRequestCallbackFactory.java delete mode 100644 src/test/java/com/getindata/connectors/http/app/HttpStubApp.java delete mode 100644 src/test/java/com/getindata/connectors/http/app/JsonTransform.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessorTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessorTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/HttpsConnectionTestBase.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/config/ConfigExceptionTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/retry/RetryConfigProviderTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkBuilderTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkConnectionTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializerTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceITCaseTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientWithWireTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/JsonTransform.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelperTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PathBean.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PersonBean.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/sink/BatchRequestHttpDynamicSinkInsertTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/table/sink/PerRequestHttpDynamicSinkInsertTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/ConfigUtilsTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtilsTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactoryTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBufferTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/uri/ParserCursorTest.java delete mode 100644 src/test/java/com/getindata/connectors/http/internal/utils/uri/TokenParserTest.java delete mode 100644 src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory create mode 100644 tools/ci/log4j.properties create mode 100644 tools/maven/checkstyle.xml create mode 100644 tools/maven/suppressions.xml diff --git a/.checkstyle b/.checkstyle new file mode 100644 index 00000000..8a4c73c4 --- /dev/null +++ b/.checkstyle @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml new file mode 100644 index 00000000..f8bc1067 --- /dev/null +++ b/.github/boring-cyborg.yml @@ -0,0 +1,86 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +labelPRBasedOnFilePath: + component=BuildSystem: + - .github/**/* + - tools/maven/* + + component=Documentation: + - docs/**/* + + component=Connectors/Http: + - flink-connector-http*/**/* + +###### IssueLink Adder ################################################################################################# +# Insert Issue (Jira/Github etc) link in PR description based on the Issue ID in PR title. +insertIssueLinkInPrDescription: + # specify the placeholder for the issue link that should be present in the description + descriptionIssuePlaceholderRegexp: "^Issue link: (.*)$" + matchers: + # you can have several matches - for different types of issues + # only the first matching entry is replaced + jiraIssueMatch: + # specify the regexp of issue id that you can find in the title of the PR + # the match groups can be used to build the issue id (${1}, ${2}, etc.). + titleIssueIdRegexp: \[(FLINK-[0-9]+)\] + # the issue link to be added. ${1}, ${2} ... are replaced with the match groups from the + # title match (remember to use quotes) + descriptionIssueLink: "[${1}](https://issues.apache.org/jira/browse/${1}/)" + docOnlyIssueMatch: + titleIssueIdRegexp: \[hotfix\] + descriptionIssueLink: "`Documentation only change, no JIRA issue`" + +###### Title Validator ################################################################################################# +# Verifies if commit/PR titles match the regexp specified +verifyTitles: + # Regular expression that should be matched by titles of commits or PR + titleRegexp: ^\[FLINK-[0-9]+\].*$|^\[FLINK-XXXXX\].*$|^\[hotfix].*$ + # If set to true, it will always check the PR title (as opposed to the individual commits). + alwaysUsePrTitle: false + # If set to true, it will only check the commit in case there is a single commit. + # In case of multiple commits it will check PR title. + # This reflects the standard behaviour of Github that for `Squash & Merge` GitHub + # uses the PR title rather than commit messages for the squashed commit ¯\_(ツ)_/¯ + # For single-commit PRs it takes the squashed commit message from the commit as expected. + # + # If set to false it will check all commit messages. This is useful when you do not squash commits at merge. + validateEitherPrOrSingleCommitTitle: true + # The title the GitHub status should appear from. + statusTitle: "Title Validator" + # A custom message to be displayed when the title passes validation. + successMessage: "Validation successful!" + # A custom message to be displayed when the title fails validation. + # Allows insertion of ${type} (commit/PR), ${title} (the title validated) and ${regex} (the titleRegexp above). + failureMessage: "Wrong ${type} title: ${title}" + +# Various Flags to control behaviour of the "Labeler" +labelerFlags: + # If this flag is changed to 'false', labels would only be added when the PR is first created + # and not when existing PR is updated. + # The default is 'true' which means the labels would be added when PR is updated even if they + # were removed by the user + labelOnPRUpdates: true + +# Comment to be posted to welcome users when they open their first PR +firstPRWelcomeComment: > + Thanks for opening this pull request! Please check out our contributing guidelines. (https://flink.apache.org/contributing/how-to-contribute.html) + +# Comment to be posted to congratulate user on their first merged PR +firstPRMergeComment: > + Awesome work, congrats on your first merged pull request! \ No newline at end of file diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md deleted file mode 100644 index 44b483a3..00000000 --- a/.github/pull_request_template.md +++ /dev/null @@ -1,9 +0,0 @@ -#### Description - -`describe the purpose of the change here` - -Resolves `` - -##### PR Checklist -- [ ] Tests added -- [ ] [Changelog](CHANGELOG.md) updated diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml deleted file mode 100644 index b20dd4d3..00000000 --- a/.github/workflows/build.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: Build - -on: - push: - branches: - - 'main' - - 'release/**' - pull_request: - -env: - MAVEN_CLI_OPTS: "--batch-mode" - MAVEN_OPTS: "-Dmaven.repo.local=.m2/repository" - JAVA_ADDITIONAL_OPTS: "-Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS" - FF_USE_FASTZIP: "true" - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - flink: ["1.18.1", "1.19.1", "1.20.0"] - steps: - - uses: actions/checkout@v3 - - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: '11' - distribution: 'adopt' - cache: maven - - - name: Build for Flink ${{ matrix.flink }} - run: mvn $MAVEN_CLI_OPTS $JAVA_ADDITIONAL_OPTS -Dflink.version=${{ matrix.flink }} compile - - - name: Tests for Flink ${{ matrix.flink }} - run: | - mvn $MAVEN_CLI_OPTS $JAVA_ADDITIONAL_OPTS -Dflink.version=${{ matrix.flink }} test integration-test - - - name: Test JavaDoc - run: mvn $MAVEN_CLI_OPTS $JAVA_ADDITIONAL_OPTS javadoc:javadoc - if: startsWith(matrix.flink, '1.20') - - - name: Add coverage to PR - id: jacoco - uses: madrapps/jacoco-report@v1.7.1 - with: - paths: ${{ github.workspace }}/target/site/jacoco/jacoco.xml - token: ${{ secrets.GITHUB_TOKEN }} - min-coverage-overall: 40 - min-coverage-changed-files: 60 - if: startsWith(matrix.flink, '1.20') && github.event.pull_request.head.repo.fork == false diff --git a/.github/workflows/prepare_release_branch.yml b/.github/workflows/prepare_release_branch.yml deleted file mode 100644 index ff2993fb..00000000 --- a/.github/workflows/prepare_release_branch.yml +++ /dev/null @@ -1,73 +0,0 @@ -name: Prepare release branch - -on: - workflow_dispatch: - inputs: - version_part: - description: The part of the version to update (Patch, Minor or Major) - required: true - type: choice - options: - - Patch - - Minor - - Major - default: 'Minor' - -jobs: - prepare-branch: - runs-on: ubuntu-latest - - permissions: # Permissions to create a new branch and a new PR. - contents: write - pull-requests: write - - steps: - - uses: actions/checkout@v3 - - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: '11' - distribution: 'adopt' - cache: maven - - - name: Validate inputs - run: | - echo "INPUT_VERSION_PART: ${{ github.event.inputs.version_part }}" - python -c "if '${{ github.event.inputs.version_part }}' not in ['Patch', 'Minor', 'Major']: raise ValueError(\"'${{ github.event.inputs.version_part }}' must be one of ['Patch', 'Minor', 'Major'])\")" - - - name: Save current version - id: save_current_version - run: | - mvn versions:set -DremoveSnapshot -DgenerateBackupPoms=false - echo "::set-output name=current_version::$(mvn -B help:evaluate -Dexpression=project.version -q -DforceStdout)" - - - name: Update the CHANGELOG according to 'Keep a Changelog' guidelines - uses: thomaseizinger/keep-a-changelog-new-release@v1 - with: - version: ${{ steps.save_current_version.outputs.current_version }} - - - name: Create a new release branch - run: | - git config user.name github-actions - git config user.email github-actions@github.com - git commit -am "Bump CHANGELOG for release ${{ steps.save_current_version.outputs.current_version }}" - git checkout -b release/${{ steps.save_current_version.outputs.current_version }} - git push -u origin release/${{ steps.save_current_version.outputs.current_version }} - - - name: Bump development version - run: | - git checkout -b bump-version-after-${{ steps.save_current_version.outputs.current_version }} - mvn validate -D 'bump${{ github.event.inputs.version_part }}' -DgenerateBackupPoms=false - git commit -am "Bump development version to $(mvn -B help:evaluate -Dexpression=project.version -q -DforceStdout)" - git push -u origin bump-version-after-${{ steps.save_current_version.outputs.current_version }} - - - name: Open a PR to bump development version to main - id: open_pr - uses: vsoch/pull-request-action@1.1.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - PULL_REQUEST_BRANCH: main - PULL_REQUEST_FROM_BRANCH: bump-version-after-${{ steps.save_current_version.outputs.current_version }} - PULL_REQUEST_TITLE: "Bump development version after release of ${{ steps.save_current_version.outputs.current_version }}" - PULL_REQUEST_BODY: "Bump SNAPSHOT version and CHANGELOG for subsequent development." diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml deleted file mode 100644 index a994f28f..00000000 --- a/.github/workflows/publish.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Publish - -on: - release: - types: [published] - -env: - MAVEN_CLI_OPTS: "--batch-mode" - MAVEN_OPTS: "-Dmaven.repo.local=.m2/repository" - JAVA_ADDITIONAL_OPTS: "-Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS" - FF_USE_FASTZIP: "true" - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Check release tag match # ... and fail fast if they do not - run: diff <(echo "${{ github.ref_name }}") <(echo "$(mvn -B help:evaluate -Dexpression=project.version -q -DforceStdout)") - - - name: Set up JDK 11 - uses: actions/setup-java@v4 - with: - java-version: '11' - distribution: 'temurin' - cache: maven - - - name: Build - run: mvn $MAVEN_CLI_OPTS $JAVA_ADDITIONAL_OPTS compile - - - name: Tests - run: | - mvn $MAVEN_CLI_OPTS $JAVA_ADDITIONAL_OPTS test integration-test - - - name: Set up Apache Maven Central - uses: actions/setup-java@v4 - with: - java-version: '11' - distribution: 'temurin' - server-id: ossrh - server-username: MAVEN_USERNAME - server-password: MAVEN_PASSWORD - gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} - gpg-passphrase: GPG_PRIVATE_KEY_PASSWORD - - - name: Publish to Apache Maven Central - if: github.event.release - run: mvn deploy -P release - env: - MAVEN_USERNAME: ${{ secrets.SONATYPE_TOKEN_USERNAME }} - MAVEN_PASSWORD: ${{ secrets.SONATYPE_TOKEN_PASSWORD }} - GPG_PRIVATE_KEY_PASSWORD: ${{ secrets.GPG_PRIVATE_KEY_PASSWORD }} diff --git a/.github/workflows/push_pr.yml b/.github/workflows/push_pr.yml new file mode 100644 index 00000000..5a211143 --- /dev/null +++ b/.github/workflows/push_pr.yml @@ -0,0 +1,37 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# We need to specify repo related information here since Apache INFRA doesn't differentiate +# between several workflows with the same names while preparing a report for GHA usage +# https://infra-reports.apache.org/#ghactions +name: Flink Connector HTTP CI +on: [push, pull_request] +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true +jobs: + compile_and_test: + strategy: + matrix: + flink: [ 1.20.0 ] + jdk: [ '11', '17', '21' ] + uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils + with: + flink_version: ${{ matrix.flink }} + jdk_version: ${{ matrix.jdk }} + diff --git a/.github/workflows/weekly.yml b/.github/workflows/weekly.yml new file mode 100644 index 00000000..c2e7d408 --- /dev/null +++ b/.github/workflows/weekly.yml @@ -0,0 +1,41 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +# We need to specify repo related information here since Apache INFRA doesn't differentiate +# between several workflows with the same names while preparing a report for GHA usage +# https://infra-reports.apache.org/#ghactions +name: Weekly Flink Connector HTTP CI +on: + schedule: + - cron: "0 0 * * 0" + workflow_dispatch: +jobs: + compile_and_test: + if: github.repository_owner == 'apache' + strategy: + matrix: + flink_branches: [{ + flink: 1.20-0, + branch: main + }] + uses: apache/flink-connector-shared-utils/.github/workflows/ci.yml@ci_utils + with: + flink_version: ${{ matrix.flink_branches.flink }} + connector_branch: ${{ matrix.flink_branches.branch }} + jdk_version: ${{ matrix.flink_branches.jdk || '11, 17, 21' }} + run_dependency_convergence: false diff --git a/.gitignore b/.gitignore index 637d5966..bbad0811 100644 --- a/.gitignore +++ b/.gitignore @@ -5,12 +5,33 @@ .gitignore.swp .project .settings +/.java-version +.eslintcache +.cache +scalastyle-output.xml +.idea/* +./bobmodes/* +!.idea/vcs.xml +.metadata +.version.properties +filter.properties +logs.zip +.mvn/wrapper/*.jar +.mvn/maven.config +target +tmp target bin -/flink.http.connector.iml -/src/main/flink-http-connector.iml -/src/main/main.iml -/src/test/test.iml -/flink-http-connector.iml -/dependency-reduced-pom.xml -/.java-version +*.class +*.iml +*.swp +*.jar +*.zip +*.log +*.pyc +.DS_Store +build-target +atlassian-ide-plugin.xml +out/ +/docs/api +tools/japicmp-output diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 00000000..7a099aa8 --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,7 @@ +-XX:+IgnoreUnrecognizedVMOptions +--add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED +--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 00000000..0fe40ae8 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# updating the Maven version requires updates to certain documentation and verification logic +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.6/apache-maven-3.8.6-bin.zip +distributionSha256Sum=ccf20a80e75a17ffc34d47c5c95c98c39d426ca17d670f09cd91e877072a9309 +wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.3.2/maven-wrapper-3.3.2.jar +wrapperSha256Sum=3d8f20ce6103913be8b52aef6d994e0c54705fb527324ceb9b835b338739c7a8 diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 7810d9b8..00000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,281 +0,0 @@ -# Changelog - -## [Unreleased] - -- Added option to define a proxy for the lookup source (including authentication) - -- Added support for generic json and URL query creator - -- Retries support for source table: - - Auto retry on IOException and user-defined http codes - parameter `gid.connector.http.source.lookup.retry-codes`. - - Parameters `gid.connector.http.source.lookup.error.code.exclude"` and `gid.connector.http.source.lookup.error.code` were replaced by `gid.connector.http.source.lookup.ignored-response-codes`. - - Added connection timeout for source table - `gid.connector.http.source.lookup.connection.timeout`. - -## [0.19.0] - 2025-03-20 - -- OIDC token request to not flow during explain - -## [0.18.0] - 2025-01-15 - -### Fixed - -- Ignore Eclipse files in .gitignore -- Support Flink 1.20 - -## [0.17.0] - 2024-11-28 - -### Added - -- Allow to fetch multiple results from REST API endpoint (`gid.connector.http.source.lookup.result-type`). - -## [0.16.0] - 2024-10-18 - -### Added - -- Added support for built in JVM certificates if no security is configured. -- Added support for OIDC Bearer tokens. - -### Fixed - -- Ensured SerializationSchema is used in thread-safe way. - -## [0.15.0] - 2024-07-30 - -### Added - -- Added support for caching of lookup joins. - -### Fixed - -- Fixed issue in the logging code of the `JavaNetHttpPollingClient` which prevents showing the status code and response body when the log level is configured at DEBUG (or lower) level. - -## [0.14.0] - 2024-05-10 - -### Added - -- Added support for optionally using a custom SLF4J logger to trace HTTP lookup queries. - New configuration parameter: `gid.connector.http.source.lookup.request-callback` with default value - `slf4j-lookup-logger`. If this parameter is not provided then the default SLF4J logger - [Slf4JHttpLookupPostRequestCallback](https://github.com/getindata/flink-http-connector/blob/main/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java) - is used instead. - -## [0.13.0] - 2024-04-03 - -### Added - -- Added support for using the result of a lookup join operation in a subsequent select query that adds - or removes columns (project pushdown operation). - -### Changed - -- Changed [LookupQueryInfo](src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java) - Any custom implementation of this interface that aims to provide path-based requests is able to provide - the lookup query url with parameters surrounded by curly brackets. For example the supplied - URL `http://service/{customerId}`, will result in the lookup parameter `customerId` value being used - in the url. - -### Fixed - -- Moved junit support to junit 5, allowing junits to be run against flink 1.17 and 1.18. - -## [0.12.0] - 2024-03-22 - -### Added - -- Added support for passing `Authorization` headers for other purposes than Basic Authentication. - New configuration parameter: `gid.connector.http.source.lookup.use-raw-authorization-header`. - If set to `'true'`, the connector uses the raw value set for the `Authorization` header, without - transformation for Basic Authentication (base64, addition of "Basic " prefix). - If not specified, defaults to `'false'`. - -### Changed - -- Changed API for `LookupQueryCreator`. The method `createLookupQuery` no longer returns a String but a - [LookupQueryInfo](src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java) - Any custom implementation of this interface that aims to provide body-based request is able to provide - the lookup query as the payload and an optional formatted string representing the query parameters. - -## [0.11.0] - 2023-11-20 - -## [0.10.0] - 2023-07-05 - -### Fixed - - Fixed an issue where SQL Client did not work with the connector at Flink 1.16. - - This required a change to use a different classloader in the lookup join processing. - As well as the classloader change, a change to the PrefixedConfigOption implementation was - required, because it was implemented as an extension to ConfigOption; which produced - access errors when trying to access the parent class protected methods (the parent class was loaded - using a different classloader). The new implementation is not an extension; instead it holds an - instance of the ConfigOption as a private variable and uses reflection to instantiate a cloned - ConfigOption object with the prefixed key. - -### Added - -- Add support for batch request submission in HTTP sink. The mode can be changed by setting - `gid.connector.http.sink.writer.request.mode` with value `single` or `batch`. The default value - is `batch` bode which is breaking change comparing to previous versions. Additionally, - `gid.connector.http.sink.request.batch.size` option can be used to set batch size. By default, - batch size is 500 which is same as default value of HttpSink `maxBatchSize` parameter. - -### Changed - -- Changed API for public HttpSink builder. The `setHttpPostRequestCallback` expects a `PostRequestCallback` - of generic type [HttpRequest](src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java) - instead `HttpSinkRequestEntry`. -- Changed HTTP sink request and response processing thread pool sizes from 16 to 1. - -## [0.9.0] - 2023-02-10 - -- Add support for Flink 1.16. -- Add [SchemaLifecycleAwareElementConverter](src/main/java/com/getindata/connectors/http/SchemaLifecycleAwareElementConverter.java) that can be used for createing - schema lifecycle aware Element converters for Http Sink. - -## [0.8.1] - 2022-12-22 - -### Fixed - -- Fixed issue with not printing HttpRequest body/parameters for Lookup Source by - [Slf4JHttpLookupPostRequestCallback](src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java) - - -### Removed - -- Removed unused reference to EncodingFormat from HttpLookupTableSource - -## [0.8.0] - 2022-12-06 - -### Added - -- Add new parameters for HTTP timeout configuration and thread pool size for Sink and Lookup source http requests. - -### Fixed - -- Fix issue with not cleaning Flink's internal task queue for AsyncIO requests after HTTP timeout in - Lookup source - - -## [0.7.0] - 2022-10-27 - -- Add to Lookup Source support for performing lookup on columns with complex types such as ROW, Map etc. -- Add support for custom Json Serialization format for SQL Lookup Source when using [GenericJsonQueryCreator](src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreator.java) - The custom format can be defined using Flink's Factory mechanism. The format name can be defined using - `lookup-request.format` option. The default format is `json` which means that connector will use FLink's [json-format](https://nightlies.apache.org/flink/flink-docs-master/docs/connectors/table/formats/json/) - -## [0.6.0] - 2022-10-05 - -### Added - -- Add support for other REST methods like PUT and POST to lookup source connector. The request method can be set using - new optional lookup-source property `lookup-method`. If property is not specified in table DDL, GET method will be used for - lookup queries. - -## [0.5.0] - 2022-09-22 - -### Added - -- Add Http Header value preprocessor mechanism, that can preprocess defined header value before setting it on the request. -- Allow user to specify `Authorization` header for Basic Authentication. The value will be converted to Base64, - or if it starts from prefix `Basic `, it will be used as is (without any extra modification). -- Add TLS and mTLS support for Http Sink and Lookup Source connectors. - New properties are: - - `gid.connector.http.security.cert.server` - path to server's certificate. - - `gid.connector.http.security.cert.client` - path to connector's certificate. - - `gid.connector.http.security.key.client` - path to connector's private key. - - `gid.connector.http.security.cert.server.allowSelfSigned` - allowing for self-signed certificates without adding them to KeyStore (not recommended for a production). -- Add [LookupQueryCreator](src/main/java/com/getindata/connectors/http/LookupQueryCreator.java) and - [LookupQueryCreatorFactory](src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java) interfaces - (along with a "default" - [GenericGetQueryCreator](src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java) - implementation) for customization of queries prepared by Lookup Source for its HTTP requests. -- Add [ElasticSearchLiteQueryCreator](src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java) - that prepares [`q` parameter query](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html#search-api-query-params-q) - using Lucene query string syntax (in first versions of ElasticSearch called - [Search _Lite_](https://www.elastic.co/guide/en/elasticsearch/guide/current/search-lite.html)). - -## [0.4.0] - 2022-08-31 - -### Added - -- Add new properties `gid.connector.http.sink.error.code`,`gid.connector.http.sink.error.code.exclude`, - `gid.connector.http.source.lookup.error.code` and `gid.connector.http.source.lookup.error.code.exclude` - to set HTTP status codes that should be interpreted as errors both for HTTP Sink and HTTP Lookup Source. -- Use Flink's format support to Http Lookup Source. -- Add HTTP Lookup source client header configuration via properties. -- Add [HttpPostRequestCallback](src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java) and - [HttpPostRequestCallbackFactory](src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java) - interfaces (along with a "default" - [Slf4jHttpPostRequestCallback](src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java) - implementation) for customizable processing of HTTP Sink requests and responses in Table API. - -### Changed - -- Change dependency scope for `org.apache.flink.flink-connector-base` from `compile` to `provided`. -- Changed DDL of `rest-lookup` connector. Dropped `json-path` properties, and add mandatory `format` property. - -### Removed - -- Remove dependency on `org.apache.httpcomponents.httpclient`from production code. Dependency is only for test scope. -- Removed dependency on `com.jayway.jsonpath.json-path` - -### Fixed - -- Fix JavaDoc errors. - -## [0.3.0] - 2022-07-21 - -- Package refactoring. Hide internal classes that does not have to be used by API users under "internal" package. - Methods defined in classes located outside "internal" package are considered "public API". - Any changes to those methods should be communicated as "not backward compatible" and should be avoided. -- Add checkstyle configuration to "dev" folder. Add checkstyle check during maven build -- Add HTTP sink client header configuration via properties. - -## [0.2.0] - 2022-07-06 - -- Implement [HttpSink](src/main/java/com/getindata/connectors/http/HttpSink.java) deriving from [AsyncSinkBase](https://cwiki.apache.org/confluence/display/FLINK/FLIP-171%3A+Async+Sink) introduced in Flink 1.15. -- Add support for Table API in HttpSink in the form of [HttpDynamicSink](src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSink.java). - -## [0.1.0] - 2022-05-26 - -- Implement basic support for Http connector for Flink SQL - -[Unreleased]: https://github.com/getindata/flink-http-connector/compare/0.19.0...HEAD - -[0.19.0]: https://github.com/getindata/flink-http-connector/compare/0.18.0...0.19.0 - -[0.18.0]: https://github.com/getindata/flink-http-connector/compare/0.17.0...0.18.0 - -[0.17.0]: https://github.com/getindata/flink-http-connector/compare/0.16.0...0.17.0 - -[0.16.0]: https://github.com/getindata/flink-http-connector/compare/0.15.0...0.16.0 - -[0.15.0]: https://github.com/getindata/flink-http-connector/compare/0.14.0...0.15.0 - -[0.14.0]: https://github.com/getindata/flink-http-connector/compare/0.13.0...0.14.0 - -[0.13.0]: https://github.com/getindata/flink-http-connector/compare/0.12.0...0.13.0 - -[0.12.0]: https://github.com/getindata/flink-http-connector/compare/0.11.0...0.12.0 - -[0.11.0]: https://github.com/getindata/flink-http-connector/compare/0.10.0...0.11.0 - -[0.10.0]: https://github.com/getindata/flink-http-connector/compare/0.9.0...0.10.0 - -[0.9.0]: https://github.com/getindata/flink-http-connector/compare/0.8.1...0.9.0 - -[0.8.1]: https://github.com/getindata/flink-http-connector/compare/0.8.0...0.8.1 - -[0.8.0]: https://github.com/getindata/flink-http-connector/compare/0.7.0...0.8.0 - -[0.7.0]: https://github.com/getindata/flink-http-connector/compare/0.6.0...0.7.0 - -[0.6.0]: https://github.com/getindata/flink-http-connector/compare/0.5.0...0.6.0 - -[0.5.0]: https://github.com/getindata/flink-http-connector/compare/0.4.0...0.5.0 - -[0.4.0]: https://github.com/getindata/flink-http-connector/compare/0.3.0...0.4.0 - -[0.3.0]: https://github.com/getindata/flink-http-connector/compare/0.2.0...0.3.0 - -[0.2.0]: https://github.com/getindata/flink-http-connector/compare/0.1.0...0.2.0 - -[0.1.0]: https://github.com/getindata/flink-http-connector/compare/dfe9bfeaa73e77b1de14cd0cb0546a925583e23e...0.1.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 4c0b7d8e..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,9 +0,0 @@ -## PR Guidelines -1. Fork branch from `main`. -1. Ensure to provide unit tests for new functionality. -1. Update documentation accordingly. -1. Update [changelog](CHANGELOG.md) according to ["Keep a changelog"](https://keepachangelog.com/en/1.0.0/) guidelines. -1. Squash changes with a single commit as much as possible and ensure verbose PR name. -1. Open a PR against `main` - -*We reserve the right to take over and modify or abandon PRs that do not match the workflow or are abandoned.* diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..bba5e33c --- /dev/null +++ b/NOTICE @@ -0,0 +1,17 @@ +Apache Flink HTTP Connector +Copyright 2025 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby +granted, provided that this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING +ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. + + + diff --git a/README.md b/README.md index 211eff4a..72e900f7 100644 --- a/README.md +++ b/README.md @@ -1,714 +1,68 @@ -# flink-http-connector +# Apache Flink HTTP Connector -[![Maven Central](https://img.shields.io/maven-central/v/com.getindata/flink-http-connector)](https://mvnrepository.com/artifact/com.getindata/flink-http-connector) -[![javadoc](https://javadoc.io/badge2/com.getindata/flink-http-connector/javadoc.svg)](https://javadoc.io/doc/com.getindata/flink-http-connector) +This repository contains the official Apache Flink HTTP connector. -The HTTP TableLookup connector that allows for pulling data from external system via HTTP GET method and HTTP Sink that allows for sending data to external system via HTTP requests. +## Apache Flink -**Note**: The `main` branch may be in an *unstable or even broken state* during development. -Please use [releases](https://github.com/getindata/flink-http-connector/releases) instead of the `main` branch in order to get a stable set of binaries. +Apache Flink is an open source stream processing framework with powerful stream- and batch-processing capabilities. -The goal for HTTP TableLookup connector was to use it in Flink SQL statement as a standard table that can be later joined with other stream using pure SQL Flink. - -Currently, HTTP source connector supports only Lookup Joins (TableLookup) [1] in Table/SQL API. -`HttpSink` supports both Streaming API (when using [HttpSink](src/main/java/com/getindata/connectors/http/internal/sink/HttpSink.java) built using [HttpSinkBuilder](src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkBuilder.java)) and the Table API (using connector created in [HttpDynamicTableSinkFactory](src/main/java/com/getindata/connectors/http/internal/table/HttpDynamicTableSinkFactory.java)). +Learn more about Flink at [https://flink.apache.org/](https://flink.apache.org/) -## Updating the connector -In case of updating http-connector please see [Breaking changes](#breaking-changes) section. +## Building the Apache Flink HTTP Connector from Source -## Prerequisites -* Java 11 -* Maven 3 -* Flink 1.18+. Recommended Flink 1.20.* - - - -## Runtime dependencies -This connector has few Flink's runtime dependencies, that are expected to be provided. -* `org.apache.flink.flink-java` -* `org.apache.flink.flink-clients` -* `org.apache.flink.flink-connector-base` - -## Installation - -In order to use the `flink-http-connector` the following dependencies are required for both projects using a build automation tool (such as Maven or SBT) and SQL Client with SQL JAR bundles. For build automation tool reference, look into Maven Central: [https://mvnrepository.com/artifact/com.getindata/flink-http-connector](https://mvnrepository.com/artifact/com.getindata/flink-http-connector). - -## Documentation - -You can read the official JavaDoc documentation of the latest release at [https://javadoc.io/doc/com.getindata/flink-http-connector](https://javadoc.io/doc/com.getindata/flink-http-connector). - -## Usage - -### HTTP TableLookup Source -Flink SQL table definition: - -Enrichment Lookup Table -```roomsql -CREATE TABLE Customers ( - id STRING, - id2 STRING, - msg STRING, - uuid STRING, - details ROW< - isActive BOOLEAN, - nestedDetails ROW< - balance STRING - > - > -) WITH ( -'connector' = 'rest-lookup', -'format' = 'json', -'url' = 'http://localhost:8080/client', -'asyncPolling' = 'true' -) -``` - -Data Source Table -```roomsql -CREATE TABLE Orders ( - id STRING, - id2 STRING, - proc_time AS PROCTIME() -) WITH ( -'connector' = 'datagen', -'rows-per-second' = '1', -'fields.id.kind' = 'sequence', -'fields.id.start' = '1', -'fields.id.end' = '120', -'fields.id2.kind' = 'sequence', -'fields.id2.start' = '2', -'fields.id2.end' = '120' -); -``` - -Using _Customers_ table in Flink SQL Lookup Join with _Orders_ table: - -```roomsql -SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o -JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c ON o.id = c.id AND o.id2 = c.id2 -``` - -The columns and their values used for JOIN `ON` condition will be used as HTTP GET parameters where the column name will be used as a request parameter name. - -For Example: -`` -http://localhost:8080/client/service?id=1&uuid=2 -`` - -Or for REST POST method they will be converted to Json and used as request body. In this case, json request body will look like this: -```json -{ - "id": "1", - "uuid": "2" -} -``` - -#### Http headers -It is possible to set HTTP headers that will be added to HTTP request send by lookup source connector. -Headers are defined via property key `gid.connector.http.source.lookup.header.HEADER_NAME = header value` for example: -`gid.connector.http.source.lookup.header.X-Content-Type-Options = nosniff`. - -Headers can be set using http lookup source table DDL. In example below, HTTP request done for `http-lookup` table will contain three headers: -- `Origin` -- `X-Content-Type-Options` -- `Content-Type` - -```roomsql -CREATE TABLE http-lookup ( - id bigint, - some_field string -) WITH ( - 'connector' = 'rest-lookup', - 'format' = 'json', - 'url' = 'http://localhost:8080/client', - 'asyncPolling' = 'true', - 'gid.connector.http.source.lookup.header.Origin' = '*', - 'gid.connector.http.source.lookup.header.X-Content-Type-Options' = 'nosniff', - 'gid.connector.http.source.lookup.header.Content-Type' = 'application/json' -) -``` - -#### Custom REST query -Http Lookup Source builds queries out of `JOIN` clauses. One can customize how those queries are built by implementing -[LookupQueryCreator](src/main/java/com/getindata/connectors/http/LookupQueryCreator.java) and -[LookupQueryCreatorFactory](src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java) interfaces. -Custom implementations of `LookupQueryCreatorFactory` can be registered along other factories in -`resources/META-INF.services/org.apache.flink.table.factories.Factory` file and then referenced by their identifiers in -the Http Lookup Source DDL property field `gid.connector.http.source.lookup.query-creator`. - -A default implementation that builds an "ordinary" GET query, i.e. adds `?joinColumn1=value1&joinColumn2=value2&...` -to the URI of the endpoint, - -For body based queries such as POST/PUT requests, the -([GenericGetQueryCreator](src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java)) -is provided as a default query creator. This implementation uses Flink's [json-format](https://nightlies.apache.org/flink/flink-docs-master/docs/connectors/table/formats/json/) to convert RowData object into Json String. - -The `GenericGetQueryCreator` allows for using custom formats that will perform serialization to Json. Thanks to this, users can create their own logic for converting RowData to Json Strings suitable for their HTTP endpoints and use this logic as custom format -with HTTP Lookup connector and SQL queries. -To create a custom format user has to implement Flink's `SerializationSchema` and `SerializationFormatFactory` interfaces and register custom format factory along other factories in -`resources/META-INF.services/org.apache.flink.table.factories.Factory` file. This is common Flink mechanism for providing custom implementations for various factories. - -The most flexible query creator is the [GenericJsonAndUrlQueryCreator](src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java) -which allows column content to be mapped as URL, path, body and query parameter request values; it supports -POST, PUT and GET operations. This query creator allows you to issue json requests without needing to code -your own custom http connector. The mappings from columns to the json request are supplied in the query creator configuration -parameters `gid.connector.http.request.query-param-fields`, `gid.connector.http.request.body-fields` and `gid.connector.http.request.url-map`. - -In order to use custom format, user has to specify option `'lookup-request.format' = 'customFormatName'`, where `customFormatName` is the identifier of custom format factory. - -Additionally, it is possible to pass query format options from table's DDL. -This can be done by using option like so: `'lookup-request.format.customFormatName.customFormatProperty' = 'propertyValue'`, for example -`'lookup-request.format.customFormatName.fail-on-missing-field' = 'true'`. - -It is important that `customFormatName` part match `SerializationFormatFactory` identifier used for custom format implementation. -In this case, the `fail-on-missing-field` will be passed to `SerializationFormatFactory::createEncodingFormat( -DynamicTableFactory.Context context, ReadableConfig formatOptions)` method in `ReadableConfig` object. - -With default configuration, Flink-Json format is used for `GenericGetQueryCreator`, all options defined in [json-format](https://nightlies.apache.org/flink/flink-docs-master/docs/connectors/table/formats/json/) -can be passed through table DDL. For example `'lookup-request.format.json.fail-on-missing-field' = 'true'`. In this case, format identifier is `json`. - -#### Timeouts -Lookup Source is guarded by two timeout timers. First one is specified by Flink's AsyncIO operator that executes `AsyncTableFunction`. -The default value of this timer is set to 3 minutes and can be changed via `table.exec.async-lookup.timeout` [option](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/config/#table-exec-async-lookup-timeout). - -The second one is set per individual HTTP requests by HTTP client. Its default value is set currently to 30 seconds and can be changed via `gid.connector.http.source.lookup.request.timeout` option. - -Flink's current implementation of `AsyncTableFunction` does not allow specifying custom logic for handling Flink AsyncIO timeouts as it is for Java API. -Because of that, if AsyncIO timer passes, Flink will throw TimeoutException which will cause job restart. - -#### Retries (Lookup source) -Lookup source handles auto-retries for two scenarios: -1. IOException occurs (e.g. temporary network outage) -2. The response contains a HTTP error code that indicates a retriable error. These codes are defined in the table configuration (see `gid.connector.http.source.lookup.retry-codes`). -Retries are executed silently, without restarting the job. After reaching max retries attempts (per request) operation will fail and restart job. - -Notice that HTTP codes are categorized into into 3 groups: -- successful responses - response is returned immediately for further processing -- temporary errors - request will be retried up to the retry limit -- error responses - unexpected responses are not retried and will fail the job. Any HTTP error code which is not configured as successful or temporary error is treated as an unretriable error. - -##### Retry strategy -User can choose retry strategy type for source table: -- fixed-delay - http request will be re-sent after specified delay. -- exponential-delay - request will be re-sent with exponential backoff strategy, limited by `lookup.max-retries` attempts. The delay for each retry is calculated as the previous attempt's delay multiplied by the backoff multiplier (parameter `gid.connector.http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier`) up to `gid.connector.http.source.lookup.retry-strategy.exponential-delay.max-backoff`. The initial delay value is defined in the table configuration as `gid.connector.http.source.lookup.retry-strategy.exponential-delay.initial-backoff`. - - -#### Lookup multiple results - -Typically, join can return zero, one or more results. What is more, there are lots of possible REST API designs and -pagination methods. Currently, the connector supports only two simple approaches (`gid.connector.http.source.lookup.result-type`): - -- `single-value` - REST API returns single object. -- `array` - REST API returns array of objects. Pagination is not supported yet. - -Please be informed that the mechanism will be enhanced in the future. See [HTTP-118](https://github.com/getindata/flink-http-connector/issues/118). - -### HTTP Sink -The following example shows the minimum Table API example to create a [HttpDynamicSink](src/main/java/com/getindata/connectors/http/internal/table/HttpDynamicSink.java) that writes JSON values to an HTTP endpoint using POST method, assuming Flink has JAR of [JSON serializer](https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/formats/json/) installed: - -```roomsql -CREATE TABLE http ( - id bigint, - some_field string -) WITH ( - 'connector' = 'http-sink', - 'url' = 'http://example.com/myendpoint', - 'format' = 'json' -) -``` - -Then use `INSERT` SQL statement to send data to your HTTP endpoint: - -```roomsql -INSERT INTO http VALUES (1, 'Ninette'), (2, 'Hedy') -``` - -Due to the fact that `HttpSink` sends bytes inside HTTP request's body, one can easily swap `'format' = 'json'` for some other [format](https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/connectors/table/formats/overview/). - -Other examples of usage of the Table API can be found in [some tests](src/test/java/com/getindata/connectors/http/table/HttpDynamicSinkInsertTest.java). - -### Request submission -Starting from version 0.10 HTTP Sink by default submits events in batch. Before version 0.10 the default and only submission type was `single`. -This is a breaking compatibility change. - -The submission mode can be changed using `gid.connector.http.sink.writer.request.mode` property using `single` or `batch` as property value. - -#### Batch submission mode -In batch mode, a number of events (processed elements) will be batched and submitted in one HTTP request. -In this mode, HTTP PUT/POST request's body contains a Json array, where every element of this array represents -individual event. - -An example of Http Sink batch request body containing data for three events: -```json -[ - { - "id": 1, - "first_name": "Ninette", - "last_name": "Clee", - "gender": "Female", - "stock": "CDZI", - "currency": "RUB", - "tx_date": "2021-08-24 15:22:59" - }, - { - "id": 2, - "first_name": "Rob", - "last_name": "Zombie", - "gender": "Male", - "stock": "DGICA", - "currency": "GBP", - "tx_date": "2021-10-25 20:53:54" - }, - { - "id": 3, - "first_name": "Adam", - "last_name": "Jones", - "gender": "Male", - "stock": "DGICA", - "currency": "PLN", - "tx_date": "2021-10-26 20:53:54" - } -] -``` - -By default, batch size is set to 500 which is the same as Http Sink's `maxBatchSize` property and has value of 500. -The `maxBatchSize' property sets maximal number of events that will by buffered by Flink runtime before passing it to Http Sink for processing. +Prerequisites: -In order to change submission batch size use `gid.connector.http.sink.request.batch.size` property. For example: - -Streaming API: -```java -HttpSink.builder() - .setEndpointUrl("http://example.com/myendpoint") - .setElementConverter( - (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setProperty("gid.connector.http.sink.request.batch.size", "50") - .build(); -``` -SQL: -```roomsql -CREATE TABLE http ( - id bigint, - some_field string -) WITH ( - 'connector' = 'http-sink', - 'url' = 'http://example.com/myendpoint', - 'format' = 'json', - 'gid.connector.http.sink.request.batch.size' = '50' -) -``` - -#### Single submission mode -In this mode every processed event is submitted as individual HTTP POST/PUT request. - -Streaming API: -```java -HttpSink.builder() - .setEndpointUrl("http://example.com/myendpoint") - .setElementConverter( - (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setProperty("gid.connector.http.sink.writer.request.mode", "single") - .build(); -``` -SQL: -```roomsql -CREATE TABLE http ( - id bigint, - some_field string -) WITH ( - 'connector' = 'http-sink', - 'url' = 'http://example.com/myendpoint', - 'format' = 'json', - 'gid.connector.http.sink.writer.request.mode' = 'single' -) -``` - -#### Http headers -It is possible to set HTTP headers that will be added to HTTP request send by sink connector. -Headers are defined via property key `gid.connector.http.sink.header.HEADER_NAME = header value` for example: -`gid.connector.http.sink.header.X-Content-Type-Options = nosniff`. -Properties can be set via Sink builder or Property object: -```java -HttpSink.builder() - .setEndpointUrl("http://example.com/myendpoint") - .setElementConverter( - (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setProperty("gid.connector.http.sink.header.X-Content-Type-Options", "nosniff") - .build(); -``` -or - -```java -Properties properties = Properties(); -properties.setProperty("gid.connector.http.sink.header.X-Content-Type-Options", "nosniff"); - -HttpSink.builder() - .setEndpointUrl("http://example.com/myendpoint") - .setElementConverter( - (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setProperties(properties) - .build(); -``` - -In Table/SQL API, headers can be set using http sink table DDL. In example below, HTTP request done for `http` table will contain three headers: -- `Origin` -- `X-Content-Type-Options` -- `Content-Type` - -```roomsql -CREATE TABLE http ( - id bigint, - some_field string -) WITH ( - 'connector' = 'http-sink', - 'url' = 'http://example.com/myendpoint', - 'format' = 'json', - 'gid.connector.http.sink.header.Origin' = '*', - 'gid.connector.http.sink.header.X-Content-Type-Options' = 'nosniff', - 'gid.connector.http.sink.header.Content-Type' = 'application/json' -) -``` - -Note that when using OIDC, it adds an `Authentication` header with the bearer token; this will override -an existing `Authorization` header specified in configuration. - -#### Custom request/response callback - -- Http Sink processes responses that it gets from the HTTP endpoint along their respective requests. One can customize the -behaviour of the additional stage of processing done by Table API Sink by implementing -[HttpPostRequestCallback](src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java) and -[HttpPostRequestCallbackFactory](src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java) -interfaces. Custom implementations of `HttpPostRequestCallbackFactory` can be registered along other factories in -`resources/META-INF/services/org.apache.flink.table.factories.Factory` file and then referenced by their identifiers in -the HttpSink DDL property field `gid.connector.http.sink.request-callback`. - - For example, one can create a class `CustomHttpSinkPostRequestCallbackFactory` with a unique identifier, say `rest-sink-logger`, -that implements interface `HttpPostRequestCallbackFactory` to create a new instance of a custom callback -`CustomHttpSinkPostRequestCallback`. This factory can be registered along other factories by appending the fully-qualified name -of class `CustomHttpSinkPostRequestCallbackFactory` in `resources/META-INF/services/org.apache.flink.table.factories.Factory` file -and then reference identifier `rest-sink-logger` in the HttpSink DDL property field `gid.connector.http.sink.request-callback`. - - A default implementation that logs those pairs as *INFO* level logs using Slf4j -([Slf4jHttpPostRequestCallback](src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java)) -is provided. - - -- Http Lookup Source processes responses that it gets from the HTTP endpoint along their respective requests. One can customize the -behaviour of the additional stage of processing done by Table Function API by implementing -[HttpPostRequestCallback](src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java) and -[HttpPostRequestCallbackFactory](src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java) -interfaces. - - For example, one can create a class `CustomHttpLookupPostRequestCallbackFactory` with a unique identifier, say `rest-lookup-logger`, -that implements interface `HttpPostRequestCallbackFactory` to create a new instance of a custom callback -`CustomHttpLookupPostRequestCallback`. This factory can be registered along other factories by appending the fully-qualified name -of class `CustomHttpLookupPostRequestCallbackFactory` in `resources/META-INF/services/org.apache.flink.table.factories.Factory` file -and then reference identifier `rest-lookup-logger` in the HTTP lookup DDL property field `gid.connector.http.source.lookup.request-callback`. - - A default implementation that logs those pairs as *INFO* level logs using Slf4j -([Slf4JHttpLookupPostRequestCallback](src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java)) -is provided. - - -## HTTP status code handler -### Sink table -You can configure a list of HTTP status codes that should be treated as errors for HTTP sink table. -By default all 400 and 500 response codes will be interpreted as error code. - -This behavior can be changed by using below properties in table definition (DDL) or passing it via `setProperty' method from Sink's builder. The property name are: -- `gid.connector.http.sink.error.code` used to defined HTTP status code value that should be treated as error for example 404. -Many status codes can be defined in one value, where each code should be separated with comma, for example: -`401, 402, 403`. User can use this property also to define a type code mask. In that case, all codes from given HTTP response type will be treated as errors. -An example of such a mask would be `3XX, 4XX, 5XX`. In this case, all 300s, 400s and 500s status codes will be treated as errors. -- `gid.connector.http.sink.error.code.exclude` used to exclude a HTTP code from error list. - Many status codes can be defined in one value, where each code should be separated with comma, for example: - `401, 402, 403`. In this example, codes 401, 402 and 403 would not be interpreted as error codes. - -### Source table -The source table categorizes HTTP responses into three groups based on status codes: -- Retry codes (`gid.connector.http.source.lookup.retry-codes`): -Responses in this group indicate a temporary issue (it can be e.g., HTTP 503 Service Unavailable). When such a response is received, the request should be retried. -- Success codes (`gid.connector.http.source.lookup.success-codes`): -These are expected responses that should be processed by table function. -- Ignored responses (`gid.connector.http.source.lookup.ignored-response-codes`): -Successful response, but its content will be ignored. For example, an HTTP 404 Not Found response is valid and indicates that the requested item does not exist, so its content can be ignored. -- Error codes: -Any response code that is not classified as a retry or success code falls into this category. Receiving such a response will result in a job failure. - - -Above parameters support whitelisting and blacklisting. A sample configuration may look like this: -`2XX,404,!203` - meaning all codes from group 2XX (200-299), with 404 and without 203 ('!' character). Group blacklisting e.g. !2XX is not supported. - -The same format is used in parameter `gid.connector.http.source.lookup.retry-codes`. +* Unix-like environment (we use Linux, Mac OS X) +* Git +* Maven (we recommend version 3.8.6) +* Java 11 -Example with explanation: -```roomsql -CREATE TABLE [...] -WITH ( - [...], - 'gid.connector.http.source.lookup.success-codes' = '2XX', - 'gid.connector.http.source.lookup.retry-codes' = '5XX,!501,!505,!506', - 'gid.connector.http.source.lookup.ignored-response-codes' = '404' -) ``` -All 200s codes and 404 are considered as successful (`success-codes`, `ignored-response-codes`). These responses won't cause retry or job failure. 404 response is listed in `ignored-response-codes` parameter, what means content body will be ignored. Http with 404 code will produce just empty record. -When server returns response with 500s code except 501, 505 and 506 then connector will re-send request based on configuration in `gid.connector.http.source.lookup.retry-strategy` parameters. By default it's fixed-delay with 1 second delay, up to 3 times per request (parameter `lookup.max-retries`). After exceeding max-retries limit the job will fail. -A response with any other code than specified in params `success-codes` and `retry-codes` e.g. 400, 505, 301 will cause job failure. - - -```roomsql -CREATE TABLE [...] -WITH ( - [...], - 'gid.connector.http.source.lookup.success-codes' = '2XX', - 'gid.connector.http.source.lookup.retry-codes' = '', - 'gid.connector.http.source.lookup.ignored-response-codes' = '1XX,3XX,4XX,5XX' -) +git clone https://github.com/apache/flink-connector-http.git +cd flink-connector-http +mvn clean package -DskipTests ``` -In this configuration, all HTTP responses are considered successful because the sets `success-codes` and `ignored-response-codes` together cover all possible status codes. As a result, no retries will be triggered based on HTTP response codes. However, only responses with status code 200 will be parsed and processed by the Flink operator. Responses with status codes in the 1xx, 3xx, 4xx, and 5xx ranges are classified under `ignored-response-codes`. -Note that retries remain enabled and will still occur on IOException. -To disable retries, set `'lookup.max-retries' = '0'`. - - - -## TLS (more secure replacement for SSL) and mTLS support - -Both Http Sink and Lookup Source connectors support HTTPS communication using TLS 1.2 and mTLS. -To enable Https communication simply use `https` protocol in endpoint's URL. - -To specify certificate(s) to be used by the server, use `gid.connector.http.security.cert.server` connector property; -the value is a comma separated list of paths to certificate(s), for example you can use your organization's CA -Root certificate, or a self-signed certificate. - -Note that if there are no security properties for a `https` url then, the JVMs default certificates are -used - allowing use of globally recognized CAs without the need for configuration. - -You can also configure the connector to use mTLS. For this simply use `gid.connector.http.security.cert.client` -and `gid.connector.http.security.key.client` connector properties to specify paths to the certificate and -private key. The key MUST be in `PCKS8` format. Both PEM and DER keys are -allowed. - -All properties can be set via Sink's builder `.setProperty(...)` method or through Sink and Source table DDL. - -For non production environments it is sometimes necessary to use Https connection and accept all certificates. -In this special case, you can configure connector to trust all certificates without adding them to keystore. -To enable this option use `gid.connector.http.security.cert.server.allowSelfSigned` property setting its value to `true`. - -## Basic Authentication -The connector supports Basic Authentication using a HTTP `Authorization` header. -The header value can be set via properties, similarly as for other headers. The connector converts the passed value to Base64 and uses it for the request. -If the used value starts with the prefix `Basic`, or `gid.connector.http.source.lookup.use-raw-authorization-header` -is set to `'true'`, it will be used as header value as is, without any extra modification. - -## OIDC Bearer Authentication -The connector supports Bearer Authentication using a HTTP `Authorization` header. The [OAuth 2.0 rcf](https://datatracker.ietf.org/doc/html/rfc6749) mentions [Obtaining Authorization](https://datatracker.ietf.org/doc/html/rfc6749#section-4) -and an authorization grant. OIDC makes use of this [authorisation grant](https://datatracker.ietf.org/doc/html/rfc6749#section-1.3) in a [Token Request](https://openid.net/specs/openid-connect-core-1_0.html#TokenRequest) by including a [OAuth grant type](https://oauth.net/2/grant-types/) and associated properties, the response is the [token response](https://openid.net/specs/openid-connect-core-1_0.html#TokenResponse). - -If you want to use this authorization then you should supply the `Token Request` body in `application/x-www-form-urlencoded` encoding -in configuration property `gid.connector.http.security.oidc.token.request`. See [grant extension](https://datatracker.ietf.org/doc/html/rfc6749#section-4.5) for -an example of a customised grant type token request. The supplied `token request` will be issued to the -[token end point](https://datatracker.ietf.org/doc/html/rfc6749#section-3.2), whose url should be supplied in configuration property -`gid.connector.http.security.oidc.token.endpoint.url`. The returned `access token` is then cached and used for subsequent requests; if the token has expired then - a new one is requested. There is a property `gid.connector.http.security.oidc.token.expiry.reduction`, that defaults to 1 second; new tokens will -be requested if the current time is later than the cached token expiry time minus `gid.connector.http.security.oidc.token.expiry.reduction`. -### Restrictions at this time -* No authentication is applied to the token request. -* The processing does not use the refresh token if it present. +The resulting jars can be found in the `target` directory of the respective module. -## Table API Connector Options -### HTTP TableLookup Source +## Developing Flink -| Option | Required | Description/Value | -|--------------------------------------------------------------------------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| connector | required | The Value should be set to _rest-lookup_ | -| format | required | Flink's format name that should be used to decode REST response, Use `json` for a typical REST endpoint. | -| url | required | The base URL that should be use for GET requests. For example _http://localhost:8080/client_ | -| asyncPolling | optional | true/false - determines whether Async Polling should be used. Mechanism is based on Flink's Async I/O. | -| lookup-method | optional | GET/POST/PUT (and any other) - determines what REST method should be used for lookup REST query. If not specified, `GET` method will be used. | -| lookup.cache | optional | Enum possible values: `NONE`, `PARTIAL`. The cache strategy for the lookup table. Currently supports `NONE` (no caching) and `PARTIAL` (caching entries on lookup operation in external API). | -| lookup.partial-cache.max-rows | optional | The max number of rows of lookup cache, over this value, the oldest rows will be expired. `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | -| lookup.partial-cache.expire-after-write | optional | The max time to live for each rows in lookup cache after writing into the cache. Specify as a [Duration](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#duration). `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | -| lookup.partial-cache.expire-after-access | optional | The max time to live for each rows in lookup cache after accessing the entry in the cache. Specify as a [Duration](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#duration). `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | -| lookup.partial-cache.cache-missing-key | optional | This is a boolean that defaults to true. Whether to store an empty value into the cache if the lookup key doesn't match any rows in the table. `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | -| lookup.max-retries | optional | The max retry times if the lookup failed; default is 3. See the following Lookup Cache section for more detail. Set value 0 to disable retries. | -| gid.connector.http.lookup.error.code | optional | List of HTTP status codes that should be treated as errors by HTTP Source, separated with comma. | -| gid.connector.http.lookup.error.code.exclude | optional | List of HTTP status codes that should be excluded from the `gid.connector.http.lookup.error.code` list, separated with comma. | -| gid.connector.http.security.cert.server | optional | Comma separated paths to trusted HTTP server certificates that should be added to the connectors trust store. | -| gid.connector.http.security.cert.client | optional | Path to trusted certificate that should be used by connector's HTTP client for mTLS communication. | -| gid.connector.http.security.key.client | optional | Path to trusted private key that should be used by connector's HTTP client for mTLS communication. | -| gid.connector.http.security.cert.server.allowSelfSigned | optional | Accept untrusted certificates for TLS communication. | -| gid.connector.http.security.oidc.token.request | optional | OIDC `Token Request` body in `application/x-www-form-urlencoded` encoding | -| gid.connector.http.security.oidc.token.endpoint.url | optional | OIDC `Token Endpoint` url, to which the token request will be issued | -| gid.connector.http.security.oidc.token.expiry.reduction | optional | OIDC tokens will be requested if the current time is later than the cached token expiry time minus this value. | -| gid.connector.http.source.lookup.request.timeout | optional | Sets HTTP request timeout in seconds. If not specified, the default value of 30 seconds will be used. | -| gid.connector.http.source.lookup.request.thread-pool.size | optional | Sets the size of pool thread for HTTP lookup request processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 8 threads will be used. | -| gid.connector.http.source.lookup.response.thread-pool.size | optional | Sets the size of pool thread for HTTP lookup response processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 4 threads will be used. | -| gid.connector.http.source.lookup.use-raw-authorization-header | optional | If set to `'true'`, uses the raw value set for the `Authorization` header, without transformation for Basic Authentication (base64, addition of "Basic " prefix). If not specified, defaults to `'false'`. | -| gid.connector.http.source.lookup.request-callback | optional | Specify which `HttpLookupPostRequestCallback` implementation to use. By default, it is set to `slf4j-lookup-logger` corresponding to `Slf4jHttpLookupPostRequestCallback`. | -| gid.connector.http.source.lookup.connection.timeout | optional | Source table connection timeout. Default - no value. | -| gid.connector.http.source.lookup.success-codes | optional | Comma separated http codes considered as success response. Use [1-5]XX for groups and '!' character for excluding. | -| gid.connector.http.source.lookup.retry-codes | optional | Comma separated http codes considered as transient errors. Use [1-5]XX for groups and '!' character for excluding. | -| gid.connector.http.source.lookup.ignored-response-codes | optional | Comma separated http codes. Content for these responses will be ignored. Use [1-5]XX for groups and '!' character for excluding. Ignored responses togater with `gid.connector.http.source.lookup.success-codes` are considered as successful. | -| gid.connector.http.source.lookup.retry-strategy.type | optional | Auto retry strategy type: fixed-delay (default) or exponential-delay. | -| gid.connector.http.source.lookup.retry-strategy.fixed-delay.delay | optional | Fixed-delay interval between retries. Default 1 second. Use with`lookup.max-retries` parameter. | -| gid.connector.http.source.lookup.retry-strategy.exponential-delay.initial-backoff | optional | Exponential-delay initial delay. Default 1 second. | -| gid.connector.http.source.lookup.retry-strategy.exponential-delay.max-backoff | optional | Exponential-delay maximum delay. Default 1 minute. Use with `lookup.max-retries` parameter. | -| gid.connector.http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier | optional | Exponential-delay multiplier. Default value 1.5 | -| gid.connector.http.source.lookup.proxy.host | optional | Specify the hostname of the proxy. | -| gid.connector.http.source.lookup.proxy.port | optional | Specify the port of the proxy. | -| gid.connector.http.source.lookup.proxy.username | optional | Specify the username used for proxy authentication. | -| gid.connector.http.source.lookup.proxy.password | optional | Specify the password used for proxy authentication. | -| gid.connector.http.request.query-param-fields | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The names of the fields that will be mapped to query parameters. The parameters are separated by semicolons, such as `param1;param2`. | -| gid.connector.http.request.body-fields | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The names of the fields that will be mapped to the body. The parameters are separated by semicolons, such as `param1;param2`. | | -| gid.connector.http.request.url-map | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The map of insert names to column names used as url segments. Parses a string as a map of strings. For example if there are table columns called `customerId` and `orderId`, then specifying value `customerId:cid1,orderID:oid` and a url of https://myendpoint/customers/{cid}/orders/{oid} will mean that the url used for the lookup query will dynamically pickup the values for `customerId`, `orderId` and use them in the url. The expected format of the map is: `key1:value1,key2:value2`. | +The Flink committers use IntelliJ IDEA to develop the Flink codebase. +We recommend IntelliJ IDEA for developing projects that involve Scala code. -### HTTP Sink +Minimal requirements for an IDE are: +* Support for Java and Scala (also mixed projects) +* Support for Maven with Java and Scala -| Option | Required | Description/Value | -|---------------------------------------------------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| connector | required | Specify what connector to use. For HTTP Sink it should be set to _'http-sink'_. | -| format | required | Specify what format to use. | -| url | required | The base URL that should be use for HTTP requests. For example _http://localhost:8080/client_. | -| insert-method | optional | Specify which HTTP method to use in the request. The value should be set either to `POST` or `PUT`. | -| sink.batch.max-size | optional | Maximum number of elements that may be passed in a batch to be written downstream. | -| sink.requests.max-inflight | optional | The maximum number of in flight requests that may exist, if any more in flight requests need to be initiated once the maximum has been reached, then it will be blocked until some have completed. | -| sink.requests.max-buffered | optional | Maximum number of buffered records before applying backpressure. | -| sink.flush-buffer.size | optional | The maximum size of a batch of entries that may be sent to the HTTP endpoint measured in bytes. | -| sink.flush-buffer.timeout | optional | Threshold time in milliseconds for an element to be in a buffer before being flushed. | -| gid.connector.http.sink.request-callback | optional | Specify which `HttpPostRequestCallback` implementation to use. By default, it is set to `slf4j-logger` corresponding to `Slf4jHttpPostRequestCallback`. | -| gid.connector.http.sink.error.code | optional | List of HTTP status codes that should be treated as errors by HTTP Sink, separated with comma. | -| gid.connector.http.sink.error.code.exclude | optional | List of HTTP status codes that should be excluded from the `gid.connector.http.sink.error.code` list, separated with comma. | -| gid.connector.http.security.cert.server | optional | Path to trusted HTTP server certificate that should be add to connectors key store. More than one path can be specified using `,` as path delimiter. | -| gid.connector.http.security.cert.client | optional | Path to trusted certificate that should be used by connector's HTTP client for mTLS communication. | -| gid.connector.http.security.key.client | optional | Path to trusted private key that should be used by connector's HTTP client for mTLS communication. | -| gid.connector.http.security.cert.server.allowSelfSigned | optional | Accept untrusted certificates for TLS communication. | -| gid.connector.http.sink.request.timeout | optional | Sets HTTP request timeout in seconds. If not specified, the default value of 30 seconds will be used. | -| gid.connector.http.sink.writer.thread-pool.size | optional | Sets the size of pool thread for HTTP Sink request processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 1 thread will be used. | -| gid.connector.http.sink.writer.request.mode | optional | Sets Http Sink request submission mode. Two modes are available to select, `single` and `batch` which is the default mode if option is not specified. | -| gid.connector.http.sink.request.batch.size | optional | Applicable only for `gid.connector.http.sink.writer.request.mode = batch`. Sets number of individual events/requests that will be submitted as one HTTP request by HTTP sink. The default value is 500 which is same as HTTP Sink `maxBatchSize` | +### IntelliJ IDEA +The IntelliJ IDE supports Maven out of the box and offers a plugin for Scala development. -## Lookup Cache -The HTTP Client connector can be used in lookup join as a lookup source (also known as a dimension table). +* IntelliJ download: [https://www.jetbrains.com/idea/](https://www.jetbrains.com/idea/) +* IntelliJ Scala Plugin: [https://plugins.jetbrains.com/plugin/?id=1347](https://plugins.jetbrains.com/plugin/?id=1347) -By default, the lookup cache is not enabled. You can enable it by setting `lookup.cache` to `PARTIAL`. -The scope of the cache is per job, so long-running jobs can benefit from this caching. +Check out our [Setting up IntelliJ](https://nightlies.apache.org/flink/flink-docs-master/flinkDev/ide_setup.html#intellij-idea) guide for details. -The lookup cache is used to improve the performance of temporal joins. By default, the lookup cache is not enabled, -so all the API requests are sent on the network. When the lookup cache is enabled, Flink looks in the cache first, -and only sends requests on the network when there is no cached value, then the cache is updated with the returned rows. -The oldest rows in this cache are expired when the cache hits the max cached rows `lookup.partial-cache.max-rows` -or when the row exceeds the max time to live specified by `lookup.partial-cache.expire-after-write` -or `lookup.partial-cache.expire-after-access`. +## Support -By default, flink caches the empty query result for the primary key. You can toggle this behaviour by setting -`lookup.partial-cache.cache-missing-key` to false. +Don’t hesitate to ask! +Contact the developers and community on the [mailing lists](https://flink.apache.org/community.html#mailing-lists) if you need any help. -## Build and deployment -To build the project locally you need to have `maven 3` and Java 11+.
+[Open an issue](https://issues.apache.org/jira/browse/FLINK) if you found a bug in Flink. -Project build command: `mvn package`.
-Detailed test report can be found under `target/site/jacoco/index.xml`. - -## Demo application -**Note**: This demo works only for Flink-1.15x. - -You can test this connector using simple mock http server provided with this repository and Flink SQL-client. -The mock server can be started from IDE (currently only this way) by running `HttpStubApp::main` method. -It will start HTTP server listening on `http://localhost:8080/client` - -Steps to follow: -- Run Mock HTTP server from `HttpStubApp::main` method. -- Start your Flink cluster, for example as described under https://nightlies.apache.org/flink/flink-docs-release-1.16/docs/try-flink/local_installation/ -- Start Flink SQL Client [6] by calling: `./bin/sql-client.sh -j flink-http-connector-1.0-SNAPSHOT.jar` -- Execute SQL statements: -Create Data Stream source Table: -```roomsql -CREATE TABLE Orders (id STRING, id2 STRING, proc_time AS PROCTIME() -) WITH ( -'connector' = 'datagen', -'rows-per-second' = '1', -'fields.id.kind' = 'sequence', -'fields.id.start' = '1', -'fields.id.end' = '120', -'fields.id2.kind' = 'sequence', -'fields.id2.start' = '2', -'fields.id2.end' = '120' -); -``` - -Create Http Connector Lookup Table: -```roomsql -CREATE TABLE Customers ( - id STRING, - id2 STRING, - msg STRING, - uuid STRING, - details ROW< - isActive BOOLEAN, - nestedDetails ROW< - balance STRING - > - > -) WITH ( -'connector' = 'rest-lookup', -'format' = 'json', -'url' = 'http://localhost:8080/client', -'asyncPolling' = 'true' -); -``` - -Submit SQL Select query to join both tables: -```roomsql -SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c ON o.id = c.id AND o.id2 = c.id2; -``` - -As a result, you should see a table with joined records like so: -![join-result](docs/JoinTable.PNG) - -The `msg` column shows parameters used with REST call for given JOIN record. - -## Implementation -### HTTP Source -Implementation of an HTTP source connector is based on Flink's `TableFunction` and `AsyncTableFunction` classes. -To be more specific we are using a `LookupTableSource`. Unfortunately Flink's new unified source interface [2] cannot be used for this type of source. -Issue was discussed on Flink's user mailing list - https://lists.apache.org/thread/tx2w1m15zt5qnvt924mmbvr7s8rlyjmw - -Implementation of an HTTP Sink is based on Flink's `AsyncSinkBase` introduced in Flink 1.15 [3, 4]. - -#### Http Response to Table schema mapping -The mapping from Http Json Response to SQL table schema is done via Flink's Json Format [5]. +## Documentation -## Breaking changes -- Version 0.10 - - Http Sink submission mode changed from single to batch. From now, body of HTTP POUT/POST request will contain a Json array. - - Changed API for public HttpSink builder. The `setHttpPostRequestCallback` expects a `PostRequestCallback` - of generic type [HttpRequest](src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java) - instead `HttpSinkRequestEntry`. -- Version 0.20 - - Http source table parameters: `gid.connector.http.source.lookup.error.code` and `gid.connector.http.source.lookup.error.code.exclude` were removed. These parameters described http status codes which was silently ignored by source lookup table (logged only). it's not recommended to ignore all error response but it's still possible. To do this set all codes as success: `'gid.connector.http.source.lookup.success-codes' = '2XX'` with ignore body from the others responses than 200s: `'gid.connector.http.source.lookup.ignored-response-codes' = '1XX,3XX,4XX,5XX'`. You can still exclude some error codes marking it as transition errors - `gid.connector.http.source.lookup.retry-codes`. Retry-codes have to be excluded from both `success-codes` and `ignored-response-codes`. - - Added dependency io.github.resilience4j:resilience4j-retry +The documentation of Apache Flink is located on the website: [https://flink.apache.org](https://flink.apache.org) +or in the `docs/` directory of the source code. -## TODO +## Fork and Contribute -### HTTP TableLookup Source -- Check other `//TODO`'s. +This is an active open-source project. We are always open to people who want to use the system or contribute to it. +Contact us if you are looking for implementation tasks that fit your skills. +This article describes [how to contribute to Apache Flink](https://flink.apache.org/contributing/how-to-contribute.html). -### HTTP Sink -- Make `HttpSink` retry the failed requests. Currently, it does not retry those at all, only adds their count to the `numRecordsSendErrors` metric. It should be thoroughly thought over how to do it efficiently and then implemented. +## About -### -[1] https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/table/sql/queries/joins/#lookup-join -
-[2] https://nightlies.apache.org/flink/flink-docs-release-1.15/docs/dev/datastream/sources/ -
-[3] https://cwiki.apache.org/confluence/display/FLINK/FLIP-171%3A+Async+Sink -
-[4] https://nightlies.apache.org/flink/flink-docs-release-1.15/api/java/org/apache/flink/connector/base/sink/AsyncSinkBase.html -
-[5] https://nightlies.apache.org/flink/flink-docs-master/docs/connectors/table/formats/json/ -
-[6] https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/sqlclient/ -
+Apache Flink is an open source project of The Apache Software Foundation (ASF). +The Apache Flink project originated from the [Stratosphere](http://stratosphere.eu) research project. diff --git a/dev/README.md b/dev/README.md deleted file mode 100644 index 96f7d691..00000000 --- a/dev/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# Dev README -Below are some helpful IntelliJ configurations you can set to match our coding style and standards. - -## Checkstyle -This project uses checkstyle to format Java code. If developing locally, please setup checkstyle using the following steps. - -1. Add the CheckStyle-IDEA plugin to IntelliJ. -- `Settings > Plugins > Marketplace > CheckStyle-IDEA > INSTALL`. -- Restart your IDE if prompted. - -2. Configure IntelliJ to use the `checkstyle.xml` file provided in this directory. -- Go to `Settings > Tools > Checkstyle` (this tool location may differ based on your version of IntelliJ). -- Set the version to 8.29. -- Under the `Configuration File` heading, click the `+` symbol to add our specific configuration file. -- Give our file a useful description, such as `GID Java Checks`, and provide the `connectors/dev/checkstyle.xml` path. -- Click `Next` to add the checkstyle file -- Check `Active` next to it once it has been added -- In the top right, set the Scan Scope to `Only Java sources (including tests)` - -3. Now, on the bottom tab bar, there should be a `CheckStyle` tab that lets you run Java style checks against using the `Check Project` button. - -## Java Import Order -We use the following import order in our Java files. Please update this in `Settings > Editor > Code Style > Java > Imports > Import Layout`: - -``` -import java.* -import javax.* - -import scala.* - -import all other imports - -import com.getindata.connectors.* -import com.getindata.connectors.internal.* -``` - \ No newline at end of file diff --git a/dev/checkstyle-suppressions.xml b/dev/checkstyle-suppressions.xml deleted file mode 100644 index 642a5503..00000000 --- a/dev/checkstyle-suppressions.xml +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - diff --git a/dev/checkstyle.xml b/dev/checkstyle.xml deleted file mode 100644 index ffd226b6..00000000 --- a/dev/checkstyle.xml +++ /dev/null @@ -1,218 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/JoinTable.PNG b/docs/JoinTable.PNG deleted file mode 100644 index 82379d4c8431171488aef14394db81eb276c5544..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 64354 zcmeFYX;f25`}P~OZ3kNDRvSU4wjGgCkpMDj+k%RKf-;0jM2HXp1%XTix)o8GMCK`| zAczpggh4`}K|rQN8G-~zm_merfrJo32%H`0exB#Q)^pzTp0mz}^WkJIR@ks>SJl0D z-Sw-huKo0?wZ-mT2X=u#pxu`){%#8bZHIwCTP${b5B!A`T+ar+q=IZMegl<|m8OA% z@1Ww;$0wfQKs)iK%d<^Scc zh-Fn7nDBpj%0Jg&?F=_4X8vDYaO3QpW2-_&L-;xd=+r--_!bY%e|`snjHRLo|2~@k zPKmnZUx$r*?>S0Kj+Qe^+|Aa-0>;b)`(0FB)KHz7DG5_TtG6SCYvriU5wCQL_$>Cs z1dIJ{WUHhE{_m3dj8w|VSU-y?k}nBvb2B`FwxRi!E!V07s4QFHD5<^rB(mU zUM^4cj5&$*_s8lieWuge$K|91X&5(gtQ;I57lC`TOCXJ}EGC9e& zD$C$@h0NvD^F;p5;MP}Iw(E+Nv+)m_yzogfKDCYOIGzI^-Thh!g zf7h3DEDAymD4|7NZ5bPS)C^(NWkn^7{SvZ2HP9eKxphwV4%1K^u>hZ^*_$TP?UHJb zb$171gB!f$`UP(=>2R8$9 zwoK`C`(Iv|U#Mr!y2_RwRk9b-nT9b}JUY%(&#!iR%xQJEc#JLF1iyKspL0+}jWL>L z6Q042!eY7YH)J#(UW$4iG9vek8}NCi-uPlQ!;d-QwBKk&S5gN^aq~edOO(mtZgmz# zoKS|8MA^w26kBn6WWOv$g{Yx11}8!mR52*qYg3)Pm|LM=>y-9zWS6ph$TJts@cbkj zYuSbjgrl?4>vspD2nSW8xK6$PlP$)hRorj5XqQ|iDM>}!H#-yYtIBNqtEAvhH&n*E z4f=xWJA>rYWri|*?kf|&G4|7SxkHI3!v;N=B{7Gf84hZ$)1Z^ux;Dbe15q)$van3A zFgwFylN6_P?L*q$Ce8>BF8Y@iy&kZniqX>Xjb{P0ukIm%Syp2F=!_C+qY_J_o9d-b zQTMhDc+7Pl&{7&VY*O!Nru_&txHH0?xx)M77>7`!$jN@-nAvgF?9s?cSy-=>Z@M+b z11EN_x2G$5y-vSjEvaKZYI6*Y21swe!u%sC@MZ=k_oUHTyQZ!)#ayDL5iz#GBZGP5 z(awQ)(+et8t?iLaBczfHjCoe4Zt61c+-2tg;a$k~#Fc|S4W4Iil#>X}G*3k2sS%cC zyCuPuCh1XF`o?fIHYk?ly$GhhaS%-^St;ge^Q)o2<0(-)T^g*M?vT`tB9cT4WyJ;$ z61P?lYsHAl>}4iac)!L)5%etwS81c`z%(i43wRl~G2XwnZsw^6 zYT*y(S9rnITe~bsMSqFbtDjXdt3BfzcCY5&QIuRzCuTE4d`-a^*{(T#5vja4;ksI| z*N>fux*rt0%IhuFB-n7)OE>b~&A;=Bq$CIg-y#FscxpC|Mt5!oV&ec5acp$6rJIM? zh|Ov?kKlh6s8BKPrr~QMPDB5|4cSmLhFK~lJ}17kC{R{%PJ|HKASan}Fa_)ZShkP| zLC$9kM-g6~V@9`+^0>Ii!L=AM8jW6aAtj?^d0EX6r?lR4lGemrf~5z>E~!)H8L2sx z`1m&Eo2DyaiPWZIb>QFgDaArT9)deR_BETjO<-C~7}z$DQ|gT2@^6)>=TyI6_;8XL zWt+`8LD8^akOceQ#3csEy9yt<-ShyplG>&?^^uaA zvtAZ^rSMyaN_ADj;47noY&Yr3lJ9`5XH+@GaFBu3#gs^!cNm)T_(A>OG;f*mbaMIA zVGdZvknSN?^4QCe^^ymTuCXXh!K@B-Z@$NBnZ4zMs5-q*TJgrkyO~FJ^9&n4OD2`k z>Q4jve994+hZBY0@Rx2O=S5^(q>}@XYDc@Bc%0A8^-c3=kv02IQC4JZiS+jaKXz~J zd*D{1U5Bc-)a7c4)5TIHD&FC4Gdp|1>t!K^=j80it}EVo8?#)pDD%{kP3#U$_6SKP z{(Nh#h-@!)In8c;HRyEDy< z3dKnq!+y0uZb5L)Q!+!mhz?dZ=#mQaXTiuA{SSrxgN31mtyFW`A2Z)FX!fa<*V;>u zbs5@ylWJT?2I9Y3c)Y6j_vR7 z{=5B)dXZKBv{Lstw(?v$TJz{Y2D`LnIyezpKk+dYHFDY`EckeH1v=$ae^TaF%&~!u zrt3QLI5ktxb&t)>ny0US!(NgxQ<`=%`VJR-4(;G+HF=3LzQD4wH1HH(v!ZCaS73gs z3p~~Ft;U*GBHLi;)#uas!e1>hiC(6jUph%l%x%AJaCe?_j25Q z8ggR7aqxu)tFGg>GoKWpdVY3rqxe_p+CcO`4Ve}Bj&hN>Z8MnqT%1)0-(D#A0AXhz zjdo_ewpXIwrw8|rioiaeqw~atH3Yjmk$o8`HmU$T_WoyF}yp2ih<0%XC=}wpHkl)g)+tYGZ8&WG4 zAGJ@MjuCs$%`w}WE=l*@tIxY(PWYp>tN+9&Q3>%m;aQL3Lgxb&J9T$SgE7^9R?mCo z{`3g5d)ztjYt;Dxulnki{%WdGckNq0W@o+sjLYw;lhM4@_djun{Hej`sN>#|u(g2u#nSH(ihFte22Zk+L)ora#(Tmv zQ;u9^|J!fo$z^NZf(M7dur>)w!v6wHp0`@D4@Rm%#XmCoa5cB&aGAA zhUHvJu)qTk?;!lvMtk*MYrd=AWV$vggmw#41of{s7GCyvJm920;o7Odgb(`ikN6d@ zo!@%03%QXQFplawKbAzMs)p}!c5rhM!*@v zo=s$E76&g$Lm0F?s5lro(r0YUpS1MEWyZ9GN7XzY#(B)?L4q}hy_VkZ0a^)V49i-z zIh_D)ZV7=-;J-U|rTJ+yE%TI=zU@atT;naY{kbzTu!qz-yhzkBpje(ur$^rZbr6M9zE5R!rtN+TOQw*fQ|g%RdVeB zk^N`Q+E;^s-X6QDx|AGvLpj(`pjGepBusIz=^Y>U;mu=pYFAsx*B|W0Og8`mw0f%3 z`-4XU(S_rBa|4Ic@-j1kBM|msGh|hBlh+#E^H+60 zD)mWWEB8@)=rcr~#8 zQ8Sg3)5!{xg-H>$fyc>M8dy=}dw6a+TS~3@>oq`H#JxaW>`PI_W6!}jP4n=HN;v8htsZyFS_@lA=9(%9aEvT&@jI%)QF-Tx z0m*XQ&hOMK%~Kt_WkBJS+4~a{6KodJ`Ru*4EvqJM^Csj)k(ehA6AMpZiduNiT0Wi= zHR&Azt)PZzJ=u1aB38#x65|`9m!2}eP&$#fM4Cf$3Qj?jQK}2Ioom^2Mlqp>NGhl) zq1aV4C{m=-0_GY9(_Zrb`eYMocJta`-P<_NSa`^^6Lx@3$S*$?cd`mp8Y{W4s6P@) zQuqESxveVtUR#Eti;v2Or6YDz@FZ7N|!2_tqhjRkpO_Rm* zRD-6B9ci624;S1#WqwSGs-zvXKsfpuq8t^KWW1xxs9(md4n+h}O`TS!-WA9$k&kH1 z5Z_$15ti7mNP;36+lhMVWpc;wUIC>U`hP&RnD2^cIK+tPZhPMBfr~M!hEX5Sd3c^S z+M(6F1@2BY{(WSz_)&ZL(Jfe@+B%eSP{c1wy|bguLjh1FYRdh*236TaJM3kpw;+Kx zMv$y3^VHK~&<5t2e3INZUXIml!%+=dzic6z0*_W&*UnvvdqF=`w#MQMo6c#@ry^uw z1M{cWhQB2%s-4Q8>|jX|%_xuaLwoJYHfuD5$&@v?L8j#6mLvN)mN*06{a?w~Le}pV zno^|ZHN*(|O^+j+MQT*jDXgf zgAEI<@GYx;^-%aFgaH!{&WicM{MDJ)H5M@;H(}=dWd`i!&2F0i%ul0Tz%hQ!I zuG5n{z;Nw5sG{vo_n7zFgcW?}HMx$R;uXhvGZdf zNNDB#nDT9KKOprRJdYP7h$%ZGWy8=VX~v*sMei6zrg)F^uF(KsS?fDtvzF=a>>|?n zi6=KYpvKzkVSIc*4XdL&1qQVuR$j^-&DlC>cJ<)E??^C*GAQfv`%wKb@|OyljI)W4 z+@7ke$|ON35ap;(EAF+b*W$J{2R$`6X1nBmU1xgujViSLGYElX=WYo;+tK}L6=cvL zvymR>FK$jw%Y@=)+M*qEC7^pVZ5r>|lru1T;+%6+FD1edAg1N8%a&@&r9i@-i(5bTv2j$dNfi(A&?gdqDBc0sl>N+pOk)W3Dp( z3sBcs3;~nHn>b%zDqMcQwki??1c_~%x1C%wPqT394Ga=j@P;y{O4(v=YC)~NO=*%h z5$kI7zL2nv!pFwPQ*t^(P26ncM@LRBZ2`?!e;@?0kcuGeQy>QvEc)ASJ!?69HCV-; zSQH;$n!|odM94VYMz4|3?*vfcGI14c#H&yND^7p2;-z;BXae;&JQ0g4{H(!N@WJ*^ zG0HiM_?*fP)ewVT{f-*^B5Up~v7nH^nhOKgtytB0cyBYU0)4@Wtjq4RERt5}<>4m} zkI>OR9u_Mf&ye-T@6;E?uzpg^ff#a1IfM=iwmXii5Vk*p!A{`Gfmk0ns~=R~cOzJs zKcKNw*_HiDEKPp!6TN{g^Txmpl>aq4iPO2iAy(>UMxgEuxgQ(+&L3vN(AO3(N2%9m z+N*Kawiaj5#!^~%;%5uq(jqPgnj~pBw1sykp0=pb=?U8|jzX0eD~qeEN0Ne%Q`az- z0i2ozdK7_ABp2J2(%)b78E!SQA#DjzEK-{|-g_%j{Y`hXhRR?T(GyjeTv`2trPn$) ziSo4$wCWb0Q4Nw$B`u&3IsD@77RkJ)folohIK30;i7^+03{0@PY-y07*+VnJ-zV{C z*u4Awhq8^F1;sY`_TECq4C+960_yDMjwTvy14z$Ung zC&8Z)I~YOm-XT-mJPf5|V8F#cbhxXI3&c7Vm>_3yqG604T$-AN;34V`)d6M?SDLb3-j=*3Tv;qk7OEsbKyq?$CC zs0jKfZasO_kNd?;EmJP-^+ICV<=VV%riM)Jtnb+2wXzy3>DnV#LUvTtVQy+K_XLU1 zy9;XfRK0;b?ld13@DHpzbW-YjjElO>UUMA%_$qT2P4TPGA6Av0$lPCVMkw)Hal9q$ zo1uLMAI=`jRwKHCAxa3NvYDp6=6&@3A~4kn;mQf67D8S87Wyt|MNW4*eClo0poSi$ zy4#mxLt84;X@7?J)nizJOGzHjo0!wJPPC$GG?BAqUZhM4HySedsV&wZ?xVh10w-C| zEP4WZe@-VZdwFV0_+S1q<%@cAFKr#g_pi@X@`W}+YfjefM_~830V5V3q!J|=BVE{G z&{-|_i+2Q!1VXzeV#IG>)XAklC3h3g0L?44$DJ9P(dLMdP^(gD*iUIgEe<<}ta+G& zMSgb?gpR9VCd4lwBm+tiqL#Alok#iwF9vkyj?Gh}@{6PR+4~uFk&|O9gh}^F=8ufz zkxu0%!jQSCTOM>NQ*8|5cGSy|GxOVc!iqCHU-{@`7_k!;Yor}tEdJ1-&P*GybHdnX zBk9ckchSR;{6W+N-=37RNTsDk^_SY3U7lVdS&qM8tHp&4h21u+?zkuIDbThNez+GG zDz)rfyx>`%6RWt#+iXdTv3(BZjt&Z)NQkLy2?Bv(>(9b0rVN)thi2jr71z37TH&5? z2qj-KpA0&|MjBE1 z!;m{<{kh25Fv0t48O-{@xFj~c>RLXfF8xKDXMbct1FH3~dc2;AXZA(PZAEAD8D&Ij zezg|SkNv!q4EFv!bG_&AeE|_x@AhavCBV8z=om42AYbug@W&ET~j_{r%21F`~=by!Z}Vex9} z^kNqQWDM*ifUMIEYo#Kx?nbQTcav#HElHPp#}!aG9nrSCYHzxc_LjL&j}r@onVu_^ z3vt*}>gk+k(p0bWR&pKHIzqnd^3%;coI3WVr&{5KJI2PdJ(~4SG>% z!=bc-ELl(SoU;3vVxOqw%@r@>1n~2CZAmb$K8{;U#ula8tj#W6St!v66QXbx+veAb zPkPrg4c?YFxrWc5SjHZbjweHqXr(CO#6v|Rm zO}dFqo^tYAqldM11bkDOWBq`zsQoEn#-2g^e37BnNqyG#4rVj-X}k|?H?B+5d)nrk zN`qO(f6e?)V9wtK1Od7xI3v1jSeQ(cv`*Pkm5_!YfqW=EE})BCj)i2setZd>g@N^uH&|Kp;rkm6uHY zdHimlpsIP(dJ`sxKRL2LU#wM{q|b0X)*(Q#A56L)ad*ADARtz%yC35n@|bo-Qz%F(#;)CpZ|d9~b8O-FS_tJX@0 z3KIO6vveaQgBLj^=Ps_-qt@lAaAVi}Jcg=20%Em&Cg-UI*G{DoPN2=qF73UTscg-& zHD#acA3b>g8Pq86#UgCFhg(-SFo^A~s|MGk@W$>1Mq3m`pW?WBoZxVM3944e0n$2R z2I#DcmuvArssJ%mn62%u$gAk-M-D(Zbs^RGfnrVL_gO&@g;todxYId+228h_?6h04 z$Kvj*n;Fnr2g9;(k{deTEyLhLa$H3@pK~5A&Q-H^;0%91uy=;@kuaXP2p>-j9ccZD zlD2L1KCmUAb36MNr7sP7--Gn(x_7l01=xT5wLCd$BtH$Dr1r7}UuyS@$ZE8u*NB6@UCfeU50=oKR5AM-qyyfu_@-y2hMUJ8uIZM?A|Fw1%Y-TUJabJ5F}2 zU{zXQM%>?_qb$m&2N<)@mGjTnyAMoZW|U4G;@MNO4u5GWh}qVL?r(eDsOv2S2ey;| z+2xytW~1YGV|v9=Ck6vrB|1!FUE+DGxu|7ix&M~GIe9oiL(T%DyQ^lqaC%NbQC|tH^ zO|654uVp|08F;k*KdHERy!BA#Nzl&qTpv9yJ2Hiwx{F0zH)O~wc*X_gtz_M8&5~<$kzlc{|CtU8 z>;>tF^q$vD6Pave-bvmE^Yy*quy=iRKFM6u0&CpXvI7)>`r~%FB_@M%oWZ$7c@jEp z1VC|;0PRl77bx3gYOxXZ_6pm{aeIFyL~3G#g9`6PA>@zPRCbgVbhnjhKQO3@tADj> z-Ded*GWOG8iOu0NiwS(DPZ1V#vIvLl**Y44=p$}xtT9mC$zWfhLse@5l{eQG$>z=f zMBtAW8)`<>N`_&Jy0Q{z+2pq8qQ2jgk8s44zMH#G*W5oWY;ngcJ>V0Co)z#0VLw8$ zWFS7|Lr+BGubr&pz$be3eGhE6j?>Y!U%`X11LuUpl^yrYRdakas}qBJ6H61gOUAm8 z0>L|Nb$at3KDo0{!i@kVkUoG=m1>eNq;MWAa{{}?v%%*ST6JYr-?V6Ycl$_n0Rm2h znzv0L=$GYK#b4EIeExVlVk3R!Ra8-IEw}f*x~EG?Wa4u3I9PKsMQ1SJ%3O)+7irSs zu&H~qPst;GPlqjSBLmG6|0DRis*#yT-#^4^@twd z=yL2cMo3qwlc%jl5VQJlL8`|QQX07f1wrh$cb+?_==O?U{N51Hh9I+;sr3HTDdH8J z>77VzPBOXA!IG|pgV8ZR;km(8oJkLgFCsak?vno zwMd{r-a^+V#?mX3IkVYOfRgYK3)~MDus7PcPuS*GCkr}+naF>ed`!WDhq`|<)KE*s zM7k4_7I>|Syadg z4Ywonl|kV)F#3!5YkQ8aV<8i^hGvT~*FZ~ELYuEfg%rmyy|3x(_~QF|Q>Iaf>nvkp z6hY-KaJP+RC8FaKRTNkobft{(MC>~8OoNQS;0Z8oWf{KdBY*Yk!K?Z&p0&8vChWt5 zaa(MV59y~_(@r_ALu72S>6|FJMDzARMIPdAJhZ1+trvLmEcgKRyY?Cvio0(25A4vK z!y?`FT`Es4=BX#6s%kxnmPI*wYvc8GD5tPXbUbn?k6>1_4QWY2^X361%=mNPTwOTysfm_Vu?ueVHN;wMR;PkXQUGD1Z zKcVEEY&ww2U8sb{Y$>bmt`so5WBXMt;HK`Qhfh-}LAA{prW_Y$5>wAuwe z4C*=>{giQv)CYD#f@@8rE!Dd)QqGrsDzW`NU!Bq}qjAX7y?2SZ1zo2BH9*rbb+?|= zaRxoXoidS@?yYH(`)x23!ZfV|I{e1(_b!|J@>7MO>?m+CxQd>N*XArPGM8OG%i%*p zp0JnVN_E_R7Fv4DopRwY@896&y8X$>vD#Fg!tXl@83zswHoRzbn z!3XQEF8%|vyx7B^Y4|;3gVgG#e`r>q?na?GLmKH#VFG0re!ftJ(TiHl*7f6D5v8{z zVr11vu*g(XM=Bxsc}gdH@qxSl-L5FN>|d2zQo zhx$0HpqSj3+BRK9(>1l3F^b}}*g{`rnRiMx!eFu8RD(KtX{=_a9z}lJORc4r9;Ca?B zY|H7Fi-pSUyBv}cALSfq)l@=x40~oAM&LL=E^^oh`(sHprioL0`41`f>Oz6-mk*>atK_ek6G@t)O_8@L|GMaN+B22*TUiVMDc>_x?s+CI+B6N1zWeV#gPd(eSBPrh=x3HF^Qx z(k!;&mJgd?mnTR1Yb!jvHsSqimGjl(j#okQ1T9nh2G6lyqCZfVYqX9!h=9Quo0+Sb zbu31~-)YfFdce^P;8v3#o#xnnovCg)*wd*D?A4S7LspaiJf=ZLSX=K|m%`OK)UDf4 zU=jwk6v5Zu{mrfMDsIZ17PF#Ai)Wo$Mgh*6Ls!$Qxg`4#-ZM^YRb-Mn zStbYH+*cfSf<%<>Xo5M)mM+h2*#8=yDEX$CT?hXeyf?yP!^brmce~tRMe&cuyWHL( zUGTbP z7H|cLc^na!CtP?%A3&pB!`H;3>RL?U*q`#@hq`i$eQIklgTgs0-T)sX?CfqaS29_gc*sXR2@5x*hk>ica2cFuktAZN4vK=7jg!2e*VB)=*F)l*VibSPl5OE?KL zxCO9m0y|CPqpg)sHKKlJq}uS;G*oj^a{{1l)H8ZLBbieP=l28n7kO#G*~{cm_W~p( zzj8a0(-(@QQJi!;2756}9&BDNy%}Svjy*MEkK*UE?kX0&v1n~u#b}jQ>(zIdbPhH{ zG&v?d*kpkdc`16(N2RbA1<*sp)TVZ8*dozx`CZ}0K{y3oPGH<1Bq}SLvNcU&)UwNa zdLisr5Xg;vbXwchI4|-uoaE-Sw>Gr*1T%~*7@vJ*`7*5G7ICnNv?@~Y%IIw9e2sXp zcYb7roC`LArdGb~4)>ii=ZeFGXZll37LpJn*mjwnMm<4Qv51~rwYE4%(x9kQlv1I& z__Z6Ap6`UnVY-QbmA}NhlTB|{4}(e(WfZERF^QmWhq{j~aZQJGIdeD2u0Uf^z@^zq zq0YKn!naAN;Capo&(B+KKE{{TXdW2L;)DoW3EVUCABS`tHs z7`PFfnVv~uv9phIY!Y0VU~}urgMCNCE&QBU4dY}D|9Q|b(2(!QVbyyQUx4%B?})M9@nx~ zEux)Qr^Irj*}#K*bC;YZ$UEV}1xLp>IVZ%f(NU+}X3WM8~A>6(!1BBZFC!~K)>DXz9CZrETyLKt8zIDBy8&rBa$0^I~{Xe>Gs-jkIzTcnM zr_9RH2NtS(#{8k?#=asq+EXy;bZ<~aEI%K*doLrIP%ck7iN1TRXw)rKb| z3Jex2%}FpmM+knJa60Fm-I01MabUc?^VWU`=1t{BPO!XPNEPQCz^VoZCs4n7Ht3Ok zgTALYZogTXS)>_em6;n&C03_G*X&4@(6wI6OjIj7I|u^ou}BzweEiRDY0%!uiS_^& zW5)^Rd!L+<=;+$c`wL&49H~u7%=druDy`~mB78j9^M5L}{EC_LU}nc^ig&@_326KV zv}KQAwgiO1v(;EGgBaUdZG^VkT=58I7V};j_Q9^|F9K1UIu)CZ9lfi_wzB!*kWWYF z;2r(K?Au=UMD08LbCx#r{?j%cbq;WMItiF4q${iHjI|HCc2xV+>p!0&oxusDZRpMm z#lnx=l6vVz#;w>JY_UOAUitZ^$%!v#s`gD>6#8*vf&p?32>qK}%T^|Ka8?Vl65%Yf zLmk1-Lh6-ZAmja#`H(4{5P0YZka<#cGNTIqyd}Nr&aBgOgj|Zfe65@RmmX-CI*` zX?#=8<+4Fwwk&wxKfG$jbg|3~NaE$J3jfw%H+WrcV`R#jX6^x*`$WG1di~a~Vlwlz zCN}cT&)y9Yns4w<6knf3jVnh;RDB7x0V-@|5~FnDkIP64OqgKx!|8$+)Oz)G%Vrai zb|_YazPI@wj_xD;h=sT3QmOq&vaipLLilEe_Ee?i+oEA?L8qBm#mip`Im8JJC_+0lsE@sN8 zhB~1+f-w5XzBWEEv#$|V6g#+pnGoHK)f74o=~AUWU2?Y)PZ?M|^N3jk_S)AcZ~NPl z2@qb6@m*Q9t%#fy1{Kh*$|ZOg3k}=?Nq3vp7PA1);^OloDSCNHhU0# z=n}9@`5kWGi6U<@>me@3!T2hLQiEF3 zF){<{%zbHd1-!C?ewogymxOfq$TO&|BHP*qD*D(-td@l!dH(eRIzqbzEytuy75Pq~OTVES%6Ej)Y{ScK}`|6!+MR=&XP_q+?k_s_fK zt%|t7xXw`QK86JlR#%@jce=o;2z_6D;PC9qEX{4nce=B|-REF^>2{w?+W5+qi~;uj z1T;~FW60%yCH49iQGCrb34x}E0?eY=fQ&w8(cnKFTVQ;9PyhQNLSavBdFN%DA{?`$ zdk$;r2t3Z}-GVMudj~Y8vW7ad-(=b#(Yw3cnt!Sb!TlCpP)Y)=?iLsnYnCDah6!Fk z)i1tUW+(?$y5WR2oCA4T>sdebR&_bso`FqukAY$U`+wuUQ$qL$Z}o_VT*BCbH8=KF z?EeYDZl{d39mv0y`Y(Vr>5u>{rm^yGfE8%b>wUv4xVg%vC}70?Bf+|MI&mx^7)!!Z z?@U20#MoYOU>w(C_l{70AOMLc-6?NEu8i(b=ip8=4skLFDwXFiObZb7BPImybNnn; zdSO2!ztTs`X`!~kJ(d`c-i}+@G9QYc(!>UM9tgKh3of2k0(d<#AUy4S-%9ZkPP<*H zDQie?$_!g;U-EMfxS%|VXB`Xun^)y4tbepe4@-3koaJOIF5dO4nU&f5S{yLm(c1l4 zj=Z<^hiZ+L;z*d1C^_<##);~hrQj;2WvQi2Mrfv!{ws}%AaZ2sJo+D8ZK7NZLNq+^ zepo^ct-`YfCcb{bA)=j!yQ!@gy1N3urLQ{*T{&~(KdoVCdnxa);g9oXV~>S;qy66; zyXaFf!AOn8lAR{Z+dEL|-G9XpQAy$xi`m8;6Y*=l-!AXGLA!#sd(p+#&Imzk?#RFl zkf3b=R3*hSmyOPtOMNj~vZ`k+cA=^M7ODWbV^mo9@|WNB~2v zH!Q!tzhe-FbI4UvaV6sx`)Kdbln{Zg;Oh{MWS5fD&v0GdM-T#K5;{YX@OTp zh6x3_9j8;nQ!fN@wuoItzJ+{#e3noXMLimzX@FNLaG zu56D1y+Zq+#@cGiY90!MhBUm5Rn#oB?tPqoLndB>r90g|ifL>2~aOSI7?~GtNglXQlUUsgZa{e@K)KPg`A$+$skW2#(3XCXU97 z6DxfFP&;6@vq)pqR>Awed^8ba@YF1Z<8!e3QM-(E$bjtZnJbhzo?h0}+{N*G;iU$S>ZODx$BExHqQ+|>C@NQN z47YG3a^kmg-h0DE} zMlriDWIXQ4O@i4NFRWzbm!korTvWNwG10fj3)*pqPK6vhwdOeCrv&W?_)VyMz4Z<# zbHv4O8YZl6xSqKlGv5{75xUeRMEUS?;3I|w+vedC={_ak&io(Sb(%fwesgjm8o_ko zBhM+{T3UvgHSk3Z4VXvANmN+1#sTKw2*K~#v4s!p)?an1qf7TxWlxEJvmg4x1+wIA zyOQ8fx<=jhhSkO!90$w_aCUR&Oq3>$*w&zpc%h>S9L%sVacWHFFm?t z{cz{Xf!G2(O!0-cT5qv?*Hh`9%{UWY*{!hb>7DqsZ%+#ftff|Wvuo~#nN1jA6!j}~gw;a_0y+a+gxQ6$W1ts) zYkOC$XLJd*c7AKUokr_wEHE|gb@xUrxbMM0EH>Odw${g&8@JX(;V-6#p5@)GFzA(w zHIY6=Y3kW^XrS}djsL2~*1x$>i78cLl**SHt4j|q4=JKtLy$|!-%md z$-W49StS#yLC3r=)i*(F#@Ii$wMTrhM_Gs4Ai9U{6%EVI-P6Io=m+B$SZIb5xQfSp zE^rwCjPEd9$AF}_72NYCysdy@oaI|cU+m+K@iLlg(RGh#y*lbl|* zxMLc67@Nv$6KjAuD37_7(K{YS{(e29#Aq?wzSi066bhw1U|`IW{1ydD#?ebbNs-kn(LEgUk}^BWvx8*(VyYtRA!OxpcOd+% zA*V!fY{JCCxw}QwQPX?5^7(Qow}NxU=BjC~Yw!5effK?%%lk$!h597?8%? zjHPoze~Hcwyr070{Tp|k{3hNLe2z3#Lg+z~K-`%wTtGLB|AC~^d&JJP2L3i)@#%tx zt04;f8ViPD?-CiGgZiqO(~EGYr60S3F-c8*RbXN%Oj+=yd~a_w8{2k^X<{QNCLQt| z8AAqSB+ny|6AWd)L(r7wt(}O_kzD~U)wG$xwc?gK7%zLey+y||T=2m&vRz^|5clV7 zohLyivh>I$0@ws4FjjRLME-|+yPcI7NLk8iK`1UYekXKx%B%xjp$F{9Y5Nl3LM2=5~5-v;(b_=tzrzT!m4uYxBiO{dd^C&#= z`wx+eB$Y`+zx!cFyV?Aji>7Yz0wv*zoJ*9E(oOl+rZf#3*GzkvE%2=0?(^^{Kinp5 z_5E{JUU8l)?YA@h_s3<^w>TI*_+U5j)Iwdg$V9fF);8*hyJ6?@c=W*Kp+lX94gLiM zrMRV}bM5aYVrByo{k-z<}_6(}0i&7!U$h04Y}>-js6DufXp8AC0|Q zSB~eb=RKh6u%xgOC$b%0?g&U{T;DILm3Uq5PK7Wr>Z$a{&IJWZQ-#EZ0NWn60=N*0 zcP|b97Yrsn1$5l_X|v2JdP|+dl%er_Oig zY;(KYI*6BD5cCaLh^B(^aBqJ0K5rv*Vhy<}cVECYa%@#XZ-+;{pfS*mqcH7MYD9bY z!OXbiV_gN@tN6tOe%`Ur<91MElVHHk>e#l+Yc)=g%sH%yIe;FO>iR`-uR81XRDb)g<%F?{4J~)ynWXqV z8+4phO}G~ueg2I#b@yEV8mf$Gxo!>bAr5uD-wAjSj0Vl|oPmR8)uDF>Y`-Y@XmL`J z;IA^%g-M-qA**$+u%gN#n_qlXOm;x@!6!KLzI;1^TQnNgG|PUlE)%V%n%I3Hp&Ew` zag1(bf3i=8cbtwCE!4h=LH4+i0`R+;U4j1iSWlyU7Zj0K4N*N4K1I$h6XFxc+y1=j zfT(ZNiCcuP)akq9&TN|(^i63}1qU7C%xI4L?@ZKyt)(u;h1qfR=|ab{O-q6qT{uH3 z9pc5fqB|T%)7Jqv&1N6~>?QYXiMjBjTQhR`s?h4RM1qCu1Gvm>_CEsXvMLhiDk#F} zMay^$V`_`{^Bg+1{ErR0LFuo`Wn^Kh&cCTT!tAD~;ICzV4MU)3w7988H6hWuI2hIod}noZkR^95GoJ(eP|R0eBnZP)WAY0PqsR6$u0c8tB~Y zm2Lc;DbE{fP1&^OqQf>`C=Kwk8@!%!KK*|pbNvP~e{6lv5+ygG|BcKU2`O%X2nmR~ z8qNy9OAz1d2XMi!M+7R*s>XK~>zs<4e2B7B|$NY{U@3o-bJ6%WG*|?1yv{@N#Tt>>054I$+EOseKq7l8KzWHc_qreNS0ZA zTBX!ii7Iz+I+wg%xe@Jil2_N3TIum@KBUOTR>c3ylB>0@6+M-( z4H?7k2J~HhlL1mtpyzAq(GKKWf)HZdwD%~qzEoc&K3!aUJtfx*ustr4iZpWYKcy0O z4}R@zi+E!Lm=;vPossnxY-_e4Y^%9ySpd zq-lfpN~06b$Ny&wx%*|VgN(6NFIW6}{wF5)EN8WNXmBOw+^hRHFd5t&3Sctl{688m z#F(Ky_(Vi>@V9Q1PCL{y>mknCe^&x+MQ}PnW51+yMzKOqp?j&4j=EE9l z?}sFCeP?5ZM;L?kHEDH9`{S{ZUjJ#%^>gmKWW4acd`H*Zc+n%j0w=@ew@H7rio`Fh zYN-cXW=}gaU3eS95RfHDaq$^MPI6e|d6<#<(}9mRm&1`c>M9m##%nh<1w^;31^f&? zufDTr1k)2{gYxh9LHLD*vTA->#H$ve^TIyz`?MCV>h)a#Pb?m)(?` zJhhKw?*uXCe_6x+tz;>r53utZ?P?&g2BWEC^I`3|#fG%yi z#e=^MJldVH_-$6FDWTLtAh9qoOaKeRRT68q_0f%Z2%R}X6G7$<&Oh1-@p6hTfUfQ& z_c_;zmuW#o*35L8W2$@Uw4WJ)>%_C2{QLv!EvmF)H@@?pwQE*FfniH#Yuh4uwL!ql zZhDO0E-D}LRrC_VX_wLY@Ti~7A_NYKj0v&V>|m^g5bhN-S65D!KW|QQZ*;Hhc3s+3 zh4HQ5Crs^bGkh32_sKSM{e$Sj87O%Wlm#d*5o-9I--8y*WKQwA2Uk~&-uYD|Wq#26 zZ>n%BA>DgI*~H7+iJ=x%;e>MD-%%avICsw*pym4ThwGoi3T}L>9HKZukYxWWeI28p z+Wh_4>Wg7saeXH`8}}Z4GIoMj1~I0G?qIvZoLE9_+D+g!+NIe-JBnj!Q-7~<52lOj zM5|j|kk=B4w2Ha(W?-}Jw{eEkI9si`GWe&`?u(T~K}EfpTR_YquUb?ai_3XqoP)@+ zua$y_cQ2SLtOKxIZlW4MWN3rH(?x|0C6?mJn9r&fA52}+qqd)FQ)7iinTAKVL5vm+ zy27)9z#xhw{rVUu?gf>7{FPY-Pv~~?`h#zM`+i@YTIs?+mIgvBd&8gWi=d?Yfa$N^ zK!0BoKifVtSgk>5_#eRRqcRvu#@Fit9$lOg5QxPp6LsP-6&raQL&ySr=5%O289i4p zf;9=AQD7S*hvILT_G4as=y%pWqDd$2UQ8Mrd_V7-k5C&r6_fJVHU_HDj5|ZC@4U7K z2*eqrl^_t^fvx!E!1_DJpTVo?Jn@#bihtR1jg1EO;O-y%>dIz%HYwBzDIW|KBSae> zrHYp~J-0L-hm~I!`Spdqkh{O+9q4k=kT>n*2=O2r7ik_aiMG==>TcWmkXu)2s#v1d zWkvW^1y}#({pbAy&>o^&eDvf5A5KZoR^Kg{0jx^CZ&>uXwqN;Bw)P=SZMniw&3G() z*yDikZOa70ZShe*9xK4J0AIt7g0|jHL5Zfe{Tlw{!HdE(7{%hi`LfoGv??Yo{nTt} zbcpjpUM&rrq?+H}0>52pTPQszRtq34Ze3RVcHhz2%}94`>&5Zmo73lo#3Qo0MWuu( zX#xRjK6O}^Y9U-b_NMZ&q3rx!U_wg!v-<~-Zj#wtq2F6lk%6`Sf1%{Ry}Fx}ypa9& zjiLJ;|Al@A`x>SJulFhisVXy zO1OS6tY+qOfA91Ao%{TLf871onK>P}1h41ydOx0z*9_}vlF*Bz|E}UYj@ZDpWC23= za2VguAQzjMrGH7qcyaFAahrpG!@0l7$!hanZvjDD=L&1~t>|4$?jO7~fKsqq=F}Wb zXgF!^tY9>BB;8$_`ON%x+H&YI-1)%83URmmUwC7_DKY3*2#t4?Y7-ZZ6=0-^o(|Z| z1HYRIfFx;9%3klG?(v0~NoDo2nj@Ho+C|FpkHYb!e^W zdRVxqwtdW$d{*O#- z%L@d}0zGO;+;xyPkozTK$JG0)0#=kHgMKU@J53Zx!A#0>K;WIgQ5XmmejvVyf!@C< zX}?W_mCu$9|BKi0qZjTA^}V0|P4a?b8yB6g&e0oD(D^W2~k3}YGFlJut`WmVn7VvB@jq{!G5E2?x4hxIH^{j+8z}5uD%*lzg?gS2h zm_-N9is;;B{`{ZtKBji6aDdFGy~`P_1>VVn-Ty)!a?h%&JWbwwp|`o2NjFh;K1oGH zC6n)rw-A5SS04UP*a3Ybd6sRQXR-ZpG_|`yQZu$Amca`wyo|a><~k*iZtfX|NWHZv z^sdcWK6W|lJRe{Qr2L0%E{!PE&-USKO@Ybv(Mkk!m-7H8X~rPYNNi*@l%LyHT|4=U z@)Wtde!j{g{h_{9A!H8p$z9b1z3GxyZtba!TnuhC;_37H)qNcKV2pHZP|)@ZKmmT! zc86Uu){~&w(i+5a`B^-zneO_Dpw(56FdviDk5y$`!ix~@*OeaaELLz|C^{d zim?CHU~@aTT2-?0^5So-gmihI?STr4@HjC7XV$Q;Ng9f8yuB=u@c0y|; zM{?QU8|d)^eNLztjkx7VxLg``3RjVcOR)JlZ(}}8QY!-v`XBegl`+#J_ZjS;RWL`# z`<0M*3ncLQkp$q&teB%nD#*ez8FXdn`CYNEPkiXJeLlz-{qz@qkBN^DECoS~TZ#2Z z)}vFv94_(rg&=v@!9d`CcKzhQ9qqsgn zhkGh_l(>K!7JeEFZYAMO{j$z?3LKH`DlIszrsM|ojN%j9!uzi!cu%8;_YVCNI?@>D*?eh&`xa`%9`Q!@c8=yvQy^FUK$HQG&7bgaC z7j<<0ul|{7d55tw|AQ8>QMfv|TC0S5{>Y!vMtrZlRM}+NEq9E9ob8$_)=R7>ez6 zn{!h>4i^-5I1og=5hD|X3Xkn6q+f=v@%(R<-yZ6=?@}VPEMR+2K@$85B@`cX!{E+%OC)3!JWrCq} zqYprGJ;*V6cp^pCS^&(9<8Q-E@c{V{6P!BLLY#W=@;eOBo9p@j$&DO9SgR_$G1*ox z$+o{Pm3?gbV`wYvnBX!}+6W)Et6BiQFD;f{SEyR%6 zt23I9`%XCLJbf83vF&VZWi#n4N!BmUYs8V#y_$_bQnGDszRy*U22=SVo&Fj))(9cC z)+y0l|Oxqj#Qbn8^QyTE|Blx+g=yB5?hsTOQhNThXc zxza3&?{|#{`b~M&iAo@aeK~unC@=zL?LZM41aHDxSdU7$t%9vc-~1MUU%3iD7qg?s z4aDfq)iR@AW-x8^MJoVdh_c!t?2BW+a>m35S+=#$E`Oi@!oahTD$2{w%u z@9R~^50k7*J2FlpV&3AD7`!tJ;bYLb6OrJ#G@;`^IE04uciv7Kpa*d^-b=Gza%~8Z zYu&V)J^#(_nn~DQV}RX71MF_aKe0Afn&Tm+2>x9=lgxb0K9DyEUYpXd1aGH_%!0|= zpudg~_E}-?e>E@r#?IJ~`W=Op^sLUnS)7CA)}q~(zI6*i5QRQRyW zi<$sB(JvjN$n{z>ym-E&g&%YjJSzK^*Kf8EgUL<0 zH3|-yii@O{cjWihVQj~JmnlJX-|#stpFiPv2tf7xU-a&?qcQ3d;S$K}zDhuVO#vhR z{~h05qSlNw{jbM&Kl)cN@Uh7CKZbyJ_hHw+671#WPoI|RTEDG%RP&=ONVoo2)jN5; zrx#WB?9sQewvN@~)K--QvK=e{4PoChal@)}9}AJqk(ABqM4(s8K-p{sV18qa+ahhV zM@f&h^ww7=hZn8!AJVdTg5@w}8&6EH`S2K{A6cLycJ3m>!&@!D6O0zDzfRARfV z12?zGBo=R&w0R>J7M)OAnsagh*~h1%Dxd28r4jg)de7Jk-|rEo!mvKcFY0SYH+|S- zTjF>w!R+6OR;2e@Yc6Dap9LzV8KEDUeW>y|7X!?;7NL5uA3(_%J zst6b-qe9EauQUqE1>X~0D9GhoXhd{d4SKoz9*Sf=p)KoyegK}7EA*o|B7wCfE+$i( zXTw#|8|D-j=O!g|K0HYh3lp%ruZV$k`MGQs;mj~U8)QzkY~gJLTEp7qr(q)+u63XG zJWTUtkNc@b)^vl<*7AqKRY?=x~jDMT8qzj!Y8Y#xh#|pmHo;U=EIGq*Fc{$GePgd0b@}FD!8*cit{Qq138Op zmu-5ZG67uQLep4l%cRYh)FjXQ`%w|O>n-}ASa%PPpQtY$d*a}Tlx9`^p3UcsV|Kbg z8Vl#l<6pUr^}Rf?;vJkL7vhjv2BZ$z>8-G@Q(g(l`*q4Iu5-<{ljqStSh~13*exf2 z%(of)p*wP1>VOW(i3NoP^<|ixdL42IZ<*@b6-LHp?707fDaqZg-j(eNbeM9_ce3Z( z`{x7FoPnGuh^fiF>Y|*2j0-nmr@T}XCJV21wFj0GmyZ9?hDt28fq~c@Z^wZF+5>Zq zreM>=b31|29Yb{Jj!_edJ*W6-Dol8_umXX(Z0Hk{iRnnb9e$@X(W+fc^ikdV&I0)H zTpBz&U~Z8N0zpo{iMVHK8^feMu7Rk&d89>^Z)X0@^u|{$3|IN5!o!(0$M3)1RiPZ; zIHX58*&miILjA((pRA-fm&gvJB2nY?Q^fO~x=E5M+VfKWwhjG)EjxM@+O3-!$uC^s zxrkp=%_iFpXY3B5YUO5}fOJ}6>B2{-q4p0~*->e|A+z?*guKYU*5-4aR-us_7a-m{ zAnH??v z>MsuIQorfVB<$o@ zkQa}>C^cz&w(?nz%yuvb^mhR*ac11!Aunz5Gx4Ah7W?+c+!I8N1{}`WnX>53XNE&j zp-#;OfM($Ni>$(dCuT`e>NVh%@5#}1E-s5p z=Hf7}sK*GA@RpW+!NztWi4M9qqfGs*RCsB*enNO-G}rrLXGKXz-stKJ;A^=N2oS+l zX9C}BZu|}celvb8@UOM>9rQ0=1<62fQM&)^$()6Gj8 z$1T5#7w@b@sy4S9Hv>7(c5qq!3)M?3=*ZwdovgNl%>Gguwb?m(&(b7;5OdwIz@I3X z?i9p0vq?*b&4S1?*hFn-hch&G6t`6%D6(@yg@o%npG)_11+svfHve2(T^CITF;Eg! ze0X8EoA)3WTuk`Bh2JU2t);Yv=yUm%3{sNuE<|#7k1{ zBBsDZGP`}ov#*^W6Ry5{%ZnF#tJc_A$WLe4ZHkZq9`16xp=F>euA#yad7ZrnV!0za zRG#Od$d~Tk;G8#o&kv1aVx;~>Im}g!BmhI(m*%<`(1>-0d^@o_S4P0{Ehu3y$0$Ax z+Tlrym>bpNS2X)z1Ea&^NQ826Tk#o~uVMgw%o~hlwmuwsR%fTyjvcw>U65mtN7>a0~x#= z1UpIREcuBn?c>cY)vQKK;NFPq7Q(U`e=yMk2ACBBmA{tLjzqg)ix+FC_?MIMQ$`4{ ziss{jxkxha4KpA3Q4_37SKZ3y{X?zxk84{#MPvYF&lb#C7Z)7B=BJ2K+1WqLX8KL7 zHnc`T1R;#pr|GJtD&Q!t5rfuB7LB((YT7l1Dywyb&K&)Ct+G9+)E7KAz|9VKG*3|j zQG2XpkZs6@QKKvQpPXPzcTLoC`j}AR^+BlfxlUR7O@#Y6or3KZhi_Jp971?koz^n@ z3v}W2bGKHxZBrVdVLu4UC75qQ2{zMBLtI)g7gW=-K38Bz>kI@!9 zgcYzh`zN^=zBn`7gU9tP{c%d0ho1*N(5Wug4ctI(w5KM7m#2Q+Oic0NrY0=i2b-zy zNgJ)=dw$5#m-%JH_tLy>5Gd0n715C*dORRjzza+@B8BCsq3S_mh9(RjvRJ@V zNP5ee@)?$FtWhbLF$ zK(Q^Kr?oN9z7PzP9y;pChG7jzGW@(VRZ`o?X8r$-MYCIq+A&JbzhG!p`H4 z0$Tr>_O{6Wz=C?Sj{G=x0^ju{PekVm^C5spL8ZcXLJ=j;@0S1!hbGY2fw4%A=i1wA zE+V`^VHGMh_~wxY(&A`KeQng8i4)PkvHM>2xVW$Oz5>dbHRcsIu;Y>B1+N>dzJY7T zBGmv!_pWxO{7%7moBIuR6sZ*`AkN*yN?)-U@d()vDlp{nVp>AzQ1IHhyi)Z0BmknN zOCZ{Pi2aq(Z<)7R8xKeUDwsDHeH=V~PEh-f{3Nwv?s%8>YSDeq%u5P+aG(FjxXaf4MFK4d)`tQv* zg!Om9*qEM9!AWj6Fn7kJlzObxL{MR34t(9WA7{}s(vH(8p}ILWfFPD_)o0{B9G?2@ks_CuE0ndY&UBr9(!&9RaA?ES6b-L z?tVJ%6QAas{MkL$x>*mU2mguz>jd@u8ni8?@73(mpAGLEzF@#^@x0}O%E25aas{Sq zBRVO3GqM7SZVo6F`P=MVYnQbcJKNB|dO9EVwpHWiYwk9p7rHv}G8GM+e8qsV;n&Xy z21ihUzT$a3CH}CcCe+L5K z$>({jZ3FY?4Xz zNt-gQDU8#yZ`)PP1Uc8!HHo|Zw3?5X=0azhQ^L_=Hy=o1E#aj3E!b2sAxvT2`t*%- zFjb4K({Lv;W8RCJemVZpc6b4nDRr>ptk?0pz{27JQsv$8VNRNXDj@?LiQmCnbvLPl?j8J|B&s$i>RP(gkFR<*n zF;RN1HF@`d#SR8yjmtAu>G+!G=;h``)2drstz%9QjX0;vt_^JuVT-wAa~B&j%B(Xp z2K*zTC`aIy<*8P~ z0u#oj?^f+7oMkkT23!QF&sFnIvvgxeXftVi()VcFKOc?8w#~nq9_~*CU82PVhhp5T zqpm03`om|@v(FSFaLSgGu+f-9qxU6bux=v$ddX|%l5Hk%zTb7u?-6iARefF%KE@26 zb8^IFrK?IFibC>+WBlSL6b=C>mB%7#AN{I!`_HWJ7$GLzvj`hJ8P{efNT#BlMrW4% zIA}16l;GaxUP?BGD%NzDk$53KZRhz8l(xuPTKd9TfpPQHK)r4EIiybrT2r5Bn{720 zAxMM4_}adiJpw?&HqU7b?>WPiXc~6F`0&xcA~_1p0`%Z}%Q~Ot9T(K2O%{}+?_5}D z{;^V4S-AaShuL4p@~BNlK6_j{i%I|38iM+^eyfHBrEqiP`Hr+xsFeEfhaa|~ug?YU zcWo9Qq0l7mu`g^x7t8dC7`ib8w%L74y8kQPfQu29x}wDqZsLyN{MGi5@n}Jy1#@5z z`|>qK1l=&9TgD3bDu~#E%I+R*dVO(T*}tyaq}ar$Jus4MKaVU?8=@#MhW^Wcj)OB(7v2~gPR5{15h3g zF~B^S+@O*-Cuu%(|3#|mx(F`iz8L(5PViRxmzAv0WUW=?fDC87T+CeC;2dz(8=M3B zE9YQkq}r}f_7G8AC+fx9O2}Qd!C)IBmIMPu?Q%>eb!wY~0V9=E4jnb~s zXJ^!i!mIhA9@-;;9l#G+5}Lh$&}>?9sKw*;(jpQwFdW?+rOv!v0l#yQU!JIr2ZsKm zKytkBCg{w*+!R@W+M)ecg}O_zgdC_b^jF}rHGdKz_&9!#EPkIxv(g2uP+?1 zHOBs)sTobE!*I&D?&1s-MiTBn4mHpE7bXp97b4Vv*W=#KQd_P z#i&%oVb*+^vD!%8gi}&&Vp_EM>mNsF9D!P|GeySx_T_n?&m;zxN(Bhz=r8OnzJ>S{ znX(gOc3q*q`#c`*xjjn0{CPZ_*C0#;(lawylAbSIET2^b4tOM9MF3<-m%E_z*GDDB zmhx${eo5$kH{9~DXz^G;!#`~AEYO%+F_rPU{-4pgZ*51P>OV|GquSHeo(mYj=_C19 zYP|tdnIbS_lKgGs&lS!8Z}7u~DteoUPUW}JdChT=FyUP2s>HTFzEpTa`j3@FgQTh= z?)Z`~uhy&s#$4v=J7WlVFRhNo{OP4;CjxBLE zp!tPRc8FI!v}3IhvleB#9G^sGLf8kCk1C0*Hgy@sZ{ThRS*cn8ti!3?9LJRxLmu>~ zS47oZ!=!+Y zdWAP-bmxSl)xaqlpdUW_ddGK4e7&8OcB#T0^p#8F;KQyt>V4zYpXU_A)f^>U;j68n z3e7}i#KQ}T@{EON5m}cr!_fEQjHE7B7~>a#*FMyESiVM=jXoFSZ#LC(Ggo$9EU{rtsV+;-mW;$i*Y2s>)oeMsF{J2J^w+N451Z`>zS5uq9# zem6;(GFM!Y*q1QoiHk)TTY*kX&Q4Ils>Jq#FJm*NLg+(&shX4{?d{m#OHA!1fg0V0 z&+oE>bgFS(OGR_iY7SsTk5EOjsPB*P0nkQ`a(9M0V zCRdI~uDRAi<4Xt$!@mg$C4TLR!GXX1Wl>@8R|9|`O&9t5?2MV0+9%lZc#z};9cZ3P zG%Y}>LIP%reoCr^-E`BiU3SD~6SLH3j0%Q#RTT3dufFT+Q5hXzIy72g zD~*P(Q{EkBAzVv%XZ}fsN{PgGB#}A3?O1wOymdQ}3PQ(kpevt2nzJ2n>Wk-JC(8Mr zAq8FGDRUpH=rvz|2=8)}KBxDL5#ILKY5cG!OD=GyGkJF48~Uh(4#raL344?fkrbz@ z_l%>vH~5H#vU%`xph6=ochW&v_0Tq>??c9{?v3x}1p5?ZFnFwJgVog~l81b2fAUM^ zh1r|UJ^E)Oi98I}e%%2hz{e5h3t%AIP2;3(t`r*EgUM&P?(U`qE1eUp zm+M!1y{PGma)i_WXe0U?w_U39Qk8j}^IOwP!q9>5!M^+x=(k^32}{iG?qaUPvgrmb zQQ$Y@#HQl-KW|j(-rrP*iG_ZVszc$>8AxgTh3dqPDt-Q(_$1{=chyf&ln+NLV+T`b zhs!Wy7CQo2pUowb1~2JxCo-AoRh?Ejzxi^0Bt3si{x$nLV-d%?alBM?fhl9zyrX{4 zBFPZ2Xy7J?Taz@<>w;M*%qvf;+uLem#=KiGFU$vLO;Z{jeDi6`JO3Q#X=;bk##G&` zRs25tMvCxISGIZ@;OhnaW|I5LjcLuF&1!Eoxw?UO=llvfT_tyXiZFseWM~uHauG53mj-k~VK8`7Ayxq2q#_YW*yh>wj+-_F2zTrg-U;a( z$+Am4#IuLS{L8Jdc?(E~@yOr(^o+bTA#o63#k)=8aq@rSKl&*iHN6Q4d=1H@eL$g( zbnI56#GV1+q=NAg5AbTLuWLdj244lh;CuNlM=w9!B~rC3yv%rPwu{#JxCs7o{I>)+ z!iHj<`BKF+AYtjqRD%&dx6g$zl@8_)uHH+px$IOC$EJ*)plcTl1c>01#_*afo#BSj z8^wHHB&&O3u`Ka@+T20wb7NM{#A1CliWV|!k-G<&GFzR zD+SlK5(W9l5h#E4s;)>~NYB(C_{LCxFJ-tk+nz;(A5F_!5bo%>3&GriM0aGPZ?Y|B-hf;0(USq6#H3IcB-xj9rRU;{k=1R6z?mWH%mHadNG5n> zFZ@Q-RabTKn^&(xo*nXi=VlBb6;1xWyEc#t-US~3sTk@5kc#~hqyp7NaT+9{x&~{n z09N0zrzJ({csxZNtGXIgJ1Cqsm9wu`Jt54-m)&8C4oiWpT0>-^(FXQ(lvL9_Ro&Q` zNKw-+zIOE9;=bgAqbzhHmA#Va0zov(-T}Zf&aC{OA2iRfMotK}swC|=&dCQ!h zBI4_SgXprln`kfBSUZVn0#@mUDaifXG_gNEZfpfz2>P_jxZd<4W;;aQINp_0aChqk z?fX-|29xNb7>~ZsX?p8@(xJqKe4V9>FP=4>S2m?Zj<3HI z>s0*;>d3H31e3VjnNFR%jwM*pgM9uh7#Mu!e`YG57`?Um`3HXyP=qz`Ws?`!x8boc zzWmE4`S0!)|0(E6&G`-Z%$LNwI$-X06Ka*ao}5cM#zt2gJ~Ro681!~lMk2Vw=lJgC z^|@`qnq*B%eV23oiJ9*jaZftjTlrUuR{RXsKp2$`y)Q<&a^cmQ_(pilGyc>fI6yw{ zh+G8E-r_4f4FZ|((=B&<_b$3jil7-=x?7TWy>9(i#NxtTop0=7rnW`M{TH!sUF^iM zeGbM!;OL zv3IaQPgaehpBG>bdV`f|jFfFr{jBwMV19Ui9i9&n#PAIlzRaVKvOjOUIMzYr6kT55 z*mzNvq$s}4teVfY_#~K@cFuI|?vbHG+x&u)!dn%RshSy7BEvLNrm!^%k#iPr@C)0f z755@x(PObo5X~m5jh<*VCef~WxkmwuF$%XgEX+C)Ybj7Lip|nzTyj=%_Y+BjJ}kTe zrqP%Yj|YNP8>8AR6P`TKnRG(l@$8m_QAx!|stP2gL}e-O7o5r1_Ij~BT2>xJ)@SyQ zd?$27Oe@3M=L3y7?tH&-`EC$0FV2vxUJcwqgbE&j)4Q?^VBj6acsH;znQ7)>-oT^z`9Z~?=| zOObUZ>^=T`H7DYEn4EKtC=PqSvMJ>T#P>Upk__@srW_C3CbC`$uM4B{hWsw#M}Sw_ zJtAlI#V7`BDEj5C+@T}eGUs|=z>>h^Mqns+P{Yl&$UU-Wii~;L8RKVFRG!^~B+AmhY&#JU3BDNLnB%8y1zNlFDx#8@I2D|mm4@>Gs=^WR=I;3@=b zY3`l+=G`ObZ$Nu_klh_#<{b}E^WRR5$A;+_t%o=+50EqBOBRM5`WGZR3PTNvj$%aj ztBzvx@2tCmDt4a`b#F&4;{|6r3KIJMyhGB*YZj$_<4ogg1`bC%t6w#`!!G5PZA(1GU=Vt~c6sS$ukU+Tn?s$!b@Nn&-mu0?Ud~yx|LV zXZV23NtcG&Zk1Q&lhwdwu>zJBBJrs;s)DrIt6epf49tLPb!8#LRK@qh35?|Gs3NC5 zfYBrQ+CX9h%?;YQuZ^Ez%GlJ;8sGMiUtsOCu#u-3&#El07UiPnJDVd@F@ZCGFTThu zyXCNyS-d03xbn?T`K__ z-?8mhF+)l?^`}{^AL~%-X&h6j=AN2!=7YW)7~gE%Sk8d23P7tAE)R?|UyB38j!2)j zu)1D_HP^N!Ojm5G77)+wUZm&DX=KdaKhE+apOD8df71j&tTExSl*n?pfg z4b8SJ{NNlDEzf%$Rd`)o*MZZvvT)8MLKonL-qm|LFMY59?gskDLJd@fX@MDx-Fm5_U#_sQ)lh#-=U_73&c{ZvXy)3Bp6Lq-sGkQ>Pl z-qARk#<}}yl_q+j)GV0uyGYq|!FefQ(rj5wQz|m7o%nJ2>5}#-DrF`H(*ZMne{Q>g=j_cKkS zbm4&V4uYy=;vSs#DYihFa^t%fG~+U!P7&W{KXQbj#bcbJYI_$`9-0qWz~BOsSCaHQ zppu6^`{lbpPP>7teWj|d{IJ27rK9Q0IyB3jfgt0MnZE4;;6xG`np#>YPWZ$&fz6P4 z%;N)6cB5K_k!4-9-F&l$ju=?WSVt0(phtuqDdEZx!iZ8G2Jb*4{wq3(X|5B9SJ;C{eTS`z}xK#Q?O^LIblvArMw8%hoF3a(hkCQ*%`>oi>Pp2 zItf+t%6{cf7rQ^gaB^`mQ*Y`uNK?@^L5bV)&yrg+y9R1njYW)%3{6jh0kmPd96pg^ zSX4`}R|*SN8>vXp^}Qe23Ow1n0RDR`0ycgj`i%tf$Rq)X2M6>(nzLX~40Q8QUT5x! z|Fp0P=*TvAC;Jw8n#a?z4;wUEg2-ixR))q^gEM(U)f1}wi71f*8fqWt+5)o|VN{knLAmu30& zHSl^0hHEL~od6#8p#-1;`zEabl%$vvu*mObku}>ZDZepz@zyF!hy>i;VRcT=!36UZBAi3;)&3^U3(fR?&D@?v#$#$NI83Sj-RPQ)DB~{}&0d#rjTw#|9<6-dJ1uQ7UlVSYbUuQ}U1a zO9tKFZjdCBzXE^$CmYQFyV9Q9|G-sYZ5E5A1cZu%#G6lUnBCBWh0LJ9XC;X^;FUz6 zgYUEJYNW2ffsy2u7Gm;n{lE?oAy{4Ywr-cn`dF3Opg6;5N2`v!|?(B zsa9ZgE+IYCj07e`LXBXpEGF32*)sRx`5RnWmm#8B=aUL3SOjb}Z=F~YrlEq;xSF=Y z@d>_{OSVYV(|?*h#no3^jYd+A`|hett4PO(lTyVjQqwVZk~g{a+3adV*&IXb3U#y# zqYP}#0Q--~^2aaoFW5NukNI4Kd!Ftb@N0hv6{hR1^n-UbMgj1x`Q5E=MBKKaZ|1K7 z%bnmuH>Irig>-u!B2#25DQCC%z*9DlOe4U_Gfq#<(sproL|_-%(iCZtD86 zOu7~8qS*X+kj$M1UVXG=+^&flD@YPA&FN|A;SE4{mG_vbn$X_hIs?C>c1QHrCmUl% zOFZt@w6DyL#qFBN!Y*{RGVNEXkyP`kACx{n#m@@8)DDT*;e8R4(Wh8%dsZIDOrDWkZ-DN5M{2^p zI-r2{o(RFK$vbMbllj12kLqA8&K|zq$S!V8X`sy>JC;fBf2!_ zFOjNSo!^!xtn^9%=NDOWt`3=jyKv#Fjb7IZd5dDDQ<#H+@x_!J-1B;av?qaHz`wP$FE%mqabE^J@JC-TkhPz?<%L$` zv{FvXYOPSptC8^D)`RT4yM8~m4MZemh&+Eqh8ylVzUt46 zIcK%`Yjf3=x#_mREo1|Brka<(4tAkc{6^a`#gcF4jX?PdTUSEs#XSJ*!I@@wRJ%?R z?+eA|wdU5lnsfi?WVB`1wr9dOKa&N%+*vwes2&Z>HqpJ1g-mCUG_eUMxdvi56FBweD1JG=I% zJ>x8+0j1v5HCs$D7HwP{mtsl1Z%9K;_MoFuY)^zw^zz83d!EXQ`B$hxIm&UAlqE-$ z_1T>GMd)Zi`>s}tTP8(gqqZ>(aVF?2L;5!xEW80$L(fRl477FwAFQr-AT@D1>*9+7 z-LXiy6L6Yi9Ugr^YI6Kk<-5aX;eRn~&%PtXeO}1{NZ`&D(Nl_`nY~_eRiH%Q;J{-* zg-eSyX(iBW4e#g;5F(Fil3ry2%vR11C1k;(fp#?T1{4Oa7_!{X$PYd{qwM~Y(^mB{ zaHzoiPB1QOAvDgocJgu4a`|Pe-X2PMnD+sWVZzSlpFQ26{O&PdlB&!_I&>jj8KJC| z=d;>-j=CHr9(3Ezdfd~iFA|k~+C-)@G7tFJ#>AQxDf)c)ps@7`5C z_Ivu-!7?}rfB@om!b+pQwEenEj(`K_i$8Cg6K8k1SYc)T$fZT+V_lAtc;J~!MSiSj z)2x^NO9e7v2}g*NrRO^j>Z-cJ6ja>7%4Sm-@%TLtRs*kpJU2NA5TV>Qy3;pn zJT#@a=dc-w@2s2>CQ{}|p>kzMGDAav%_PYit2yA6OQSdY=#uK}Q zkk>sdQ!qDoDiI!jWlYgMHRNN^&}*`6(1EN8>alFXQ9&kPx1gvmgpPPx(Ce$I70h&A zfu0P?`2<{xULQssn*qK7xR()wnFs02_w~ea?>wL@W`_*wQ6iR(3a=S{|MSkj6SYh1 zG#4VAFCT5K>v$Z+e$>H%evuzvX)y}UGO61HD0hKX#TO#lpWTylS-gSq7&4dFhtt}P z=mF9e^uM~=P6>l|!OBkn58#)a&J+QKtb(mit1M!1Ni~P8Qz{lB3kjW_%lS*kOFP^r z$l;drPp5vTdZ|`dyUqOEfy<{rxjG)GvU?RS_1_od_0M01a^Ls4!|K?^$j~tpsPz;P z>8aA3zBkcx!t(OCIP|t5sC<9%Hs41VFU#%INCS6<rj6GM3|JA-F~CQ98)r=-xELUiaQWGct8g6MT~6jX0s;6vQ5<74jO}w(t?P;8sdn*$^{W3m|NKaHk~bg ztzZ4fugjV)>bUw=0>`9;#rUVXu%(gql<}%giRL0V{*2*NJ?4#2SF(hTt|-nPaDj{B zbz?{4XQ4TH!!lb=U}ioR(|UmI7{rTR=bRt4JKGEWS(nm;vum~^U4z7QecfCa*srW- z7O&4>DKvMOQMXRnqQyxZ(-78N{di%}p7}u|8`w6vsMGrV(DGzi&5zYfC7K=i;@edQ z_|M)qQ@mllN}gTw-H>mn~FLM#YHf$*OV$bHEiwFBd%s;J&Rx#qJ_I|}fY55QNHG!rCMLs-Nlk(IToOxiyqHKL*F(&Gj{E7Tb{OJZsn2-Ol&+}Ic6$n z1R!aHWSK_cMuJ|w?6#CC3Je?a@%g5aZ)japkmuGQx^c5$q8&}9besQ2iZ0lSfCiS^ z%z#U*p<<o| zc;0(_l(?$xC+Tl6Nu9XTJ&|sRJF7^GLkkP{#aRAo`KC_|z;Ffd!TIT9?g1{0rI^b> z)9-Kb&Czwdp-&36AHFglnUNab5j8D9F%{*N++URUPgo(kiiVxzt$|H-T!)yy!hFXX zh^rlEx}c73OxF-P?OK59By6V#?}r7$PRq` zex~=_EV27ssk8%dV-#II*ih%fwgMX0iAaH03l&(8*opgrMGls2#-4g0dT z&I3QtyO{d$sX#9WLEv{yN8MkQSsYu%uRa21)R%7hdt}i6wnp? zU8cF?9yI%0WRU2HdbCCD4eddbi^2o%p z%;wl(Q>pKwH5al5qh4tli0m$@{_TGTp^?b+?mxqlh8Px`H! zTyxlE4OsROE241iaZK0OybSeKh!_H=)>qbZ^)tE4fj20L_I`fl{$3W))(XBjt0PaxNpSBOTI zlR}46VnO_e!2UXs4V9|ga}l@%sZqI=w7bcC`H#%9zvt0iO3J@wo2l@&lj@w-q8v~= zb^;^m`Sz*L{GcQIR(#;ur~5WF=EKh%h`)uqqYtFz!EaG5Df+-NyEBll%j|T3Wp-62 zpO@L4!~;h!{LbKfo2HbFRd)KiRt;pV?CgNPWE6c4*9rU<6{LDQTV&X>$S$xr>vdoU z>GwWof*Vs`hE?y_hTD(NRO5+0cIew!N|ZJ|-!UDnhy z*nczypLqv!sn>$a(11++`Q<2Yr$1Pcw-0k>;!STX6!q+4e?KZo%zdWG?#d_jgIP*c$)m&D?oohkpQ_(g z0iQXb3r4@EEL@*0i|SaCi4@@L?Wi9MUVM)!+S3`Lv8oCmBo|Z4BK$}@T*V4l@VOvW zEE^~Lyv*9aurB=E;p-f8I4R5sCyj<=!+f*MO+La~B?UT{H$2k(DyQ46AIV(k! z8@l>&7$STSC-rd+>VrWhRMQ@>n$u*XK=8i->B2rW9PrI=K-wZ_yR>P<_Vd*i=aE2y za+7@dNg8?+`PT5wo8Iw1c<$y(Z=YFHM0oq6H#dh{%Io8!AR98cF|f2PAcI4wYr!s+ z+x$en=9#yHx!ZRM%00b2D+$eWG13*fx9MZR;_wsnX>!JFpFR}DKFMofJ&Y^fj%%}E z_L!a|EZvK!u9`f7(Y3-y1T1g@l=EB3;ap~qR6LHm$=v~Xwsg@j>cJF|t@u@d(vTlQ z(Bxa7sc#Y)z^cai@nOuKMh+(F)u15TqmIb~U^ZsK=zxCONH?Ba9@HZFJ*o_*Y zX)Dtp@i+vi5ssjN8X-IT|4{edVNIWL*e`X|S{${s2qI8LMP&)dk_5C?P=?gS9wI_m z0U<&N1h93WvPAX@DobTU5NzUMvHS$~|1+ogU0;bU^2`mK942i!YAudxM zcxT-PAG6g#m(Ft<%wU8f#m4lU5jR0+Sy^7KY(KH^75heXT>SoDy`EH6gnE09$K_0A!37Hn6n277MW6W#*PRDnp~#x?y|vi zJBs}%_l(PEE@@#j& ziN}{ud~>{Wm(%nSv6JDOS7mx$VPysqdd|fV4mLgF_|Zf_@PzD zgzp@8-+_~R5)(N5vAhu7@rKzh_%s$(#SLr}d|)k&0p~rd%=%thmCjF2a@)bV@dqYR zal~h`0HsWeKfj6HSbS|Jv~$;XCjv}olgt7fH+r#TGb>--gc#lGN^Tv4Ir6?xtS=;c8>>yWU~E+k6-A>^+8|h z$o?LplMw-pv9zp;(y0wIRR=*v`3pAVhUp=V`}P^1kPh2Bt2uHK#M=$h@ojoI<-4OT ztna_p*{$q$Xo_Dspgj3}NVNv$e^w%vn~Xu5$nxq!>>b?CU(4~N!Vo>Ci+V7Rwbr|p zy+B6)zuS5z`+qCbapRN2_~hWAh%dBOp7V1hu#tny*5gi~5@jW=?ySJM=WC?nV$U0s z-Dm%*zI(@m?&~3Ej?$RxW$l%f-AtK1i)IE9&}`O2@qbatMqjcW%>^}_+@B7(1a?v4 z&@FxrSslB7YV;);<`*k=ygLlk&VG;Il*}~~1Fd6&wa6jfwD3B*_v-UP0Ep_23H4mT zMSVQnti)a9y~fSF#NhkkF>GyjS*Y7ec%Gh8_~4mgMW^WL4|ZEjtCoRdo`?Yk?oR%B zndmSBgNO+O54KZ>ougq&qm3Lxn9oVF)M8D#>0>p+F1w7ZvS#}32|8qlB(=?=ODit24Fu^?iSV4pH=o1k3KTCre|O3^m#6)dhV#$8W#g|m=}Tp zJ~t)k)a%ORXeTp2m~PMZ8)EU(`mHyDhIIa3I_PtbAh$;V>pZJ|>Z0apC$A_ZBNXAM z6&!6Fi52RdU#(zkb%ejZ!wXx?>~w1j!-hU1sKH-NGyCmVeT$2)w=*MKEnlNGHU;MU zFyhi`3X8WVq>Ah#OxjwR*XnI7A~Y~;;=3mVztF{RCRf&UbB?k)XF{TzS1?~N%9$ee zL)qG}+K~N|c0lR_xpd<4`*lqvdiXa__NtdE;m&1|E3w|46UC?UBr%UvVN#1bm+&Ky z&fJF*6MD=bR)X0G{=0J|tp?28d@amFSD8C@I zQWLe)UPHV*hssXc7(-H!p`2_CeUF<1YMgC%?_~aLPa(W*F`SM*nD@jVYyILJA+l2a zq;;8Mhj%^>zsGleSDDynx#UNi-T8;7YFTDrN0bQ$Dp3E&hNt#xwq;5yz-)9uu_)w!Mf1SGz|e2&nmF%vISHqR&c$x> z+v2o+M|Fhua=Hf#5X>~JGMRs=X8i3m31>sP=Ie-IVs;Ns=jjws&LdX(osJ4)H6qWh z+jeS##F3q4`lKS=TMqQ_BE@gn$NYwUOktyH_MG(e)Fgfi<%+IYRulR0bP%@rhXb^e znOcG2!M;RcczWnTc*{BVllIOmVq01VQKvLMb6CKv;J$bx?8-0R?(6yV8Lk;KTk3<` zdajM>O<;l_$Eg+up0PQkHfq~*a2Rd^)h$_;np1Q{WTSH)9+?-n?drh^B4~iUH7%1_ zdyA;T30lmK3@uOXe$N$=k4A${Ox}Xs24D zJ;Yz3nu45Ge+Y?%s>L~L8Q{C9xmR9Z3YgeX7GKbm%zgxh!onFaW?+kJCeb#_k&`iFoVL&U|MQjO5*wm)h|@)G)br zx&3LyFXRimuWXE2K}*W?mr*cO9hL{OdHE}zmE-TST1)%8GRP|@kR*)lq$NO==UjX8> zsHM?hd(X+%<7|HB;x}QEdDM(>RZYy>%~JLrvH1B2LCej8Q$1TI6xT!hsg1_^p=B@; zRq;LR^pE`xek#tA*~`a;l#<$Zk>D)kI*oYZ*?6x|T&8s$##K|Ct!=Anv7)VCyj{Fh zc0}SvW2S9NDmofb3#Zr^!uXLDg17Vp@{oI}n9);A(y&N*;0aeZdvGY?rgFhPhT zeDP5+Jd6d(OPE;J9aT5Nj7do}YCyZEG9NxPrq2m5)y|zq!r+{6mJ~RGkaJr!dA*o` zx(J>UpALR$kj|lN(K>LiOY7Y_viQ^Yr`{~=&2E0P{8xbb@-&{2 zeX>Y*PfM#sbea^t6}y5fVGY;uK%_257(1$LP|eK(+S+CGnxM{ztr_FI(sxGsQ>4q8 zY`SdATEZjN2}-CDu6&m4D!Z_7wma(`!%NW) zo=!@(xH$5KqWVkB6EiM~%Lh`=P)*8hdnWNGZIm>?%eO6^MD&``3WT6sg9%)(E@tU0 zAkx)Ksz}D$=eGZHk24Aa)SL;2NI0X{<^ykpaD|m-lpQ#MT*;LpuLVSPgOc7-;@)n! z!6}ROujf2@$r}5)dJiZg0Vi;IQ$r=7i)61_wkcWCMH0Ut=^`P5O1@ZugPzfR;j|zsXJ!vM7(bfz#-Y@5^T&@oz?)`yol|ShoGHZV#TxiO zuZ_&bv--0*WACg+IIS=ZXjgC%}`FARa0yQdNSt@4(=9%#HX(f%hK~JpF=qcMWYaA2oD(J*-HP`uqyA z2VLP8$?s;WuXnqbuRIyPLtd_3wOnCkwGN z1nBJgOf_kU3ZXhP7<(I>U^Ht;Yx`--w_S25efw)*te;x~NOcHj3Q)B#ugm;TGJ8^2 zc!{kw<6lah*y*$~y{LrKH*HY-_95^6`Ukf19idZ^z`b zWhDRB=0k)${t^!r5jFkN)kYpK#4+lF+z?)I-{X(^4ARnMHQYehai$2= zGqOYL?gV~l(e9ml(InD1Es!6jJn=VW-3iDPL`0LmX~dh;-~eF9nZIY+m_3?lj$*{A zAXw)CgoB*3waNLPs7KZN8vk#n{@C>Hle`oC&we9N?1wa6Xv!}_svP1j_ZHR48GV=e zer{4T>lKA&{B+P|>7`5mN_{9g7>A{bo(6BT z>(F2vS&v|)u^^%`@Ik(&J}wp{*2D5)eV^|pZ#J2fx-QY*R!XL6r?Wczo5IQM=EMJ1 z-a;x8KDZpWmaPVf6^F!+@5bP!zp^OZSZXXkBh-eFh>vfCCSvsZD6x}$$ylwBdBiJ z#P`jh@{iY^gD=#lL3Cm3ubF-^ZoU)tZ%@uc_MQHAG7MM6b?`XD3Ihh!c$DMxW~j5V zo5OBKo#SSZv(`4yC{H`mT{Hz~A1>v?^^wFooz9zkf@d346uiICK4{`rJGF6*3dUc? zbsIIs6&iBHucqrh|Mzd>j}{(dELl%UPAiq6z$nD%k)kpG zL6_aD1BQDCPwn-%r$_YhVD&rwE-#Yu=L{R}bXafF zeo0*G{2L@km2L5L^!z+O@y5$>^qPApu@tl|6!Sy15EWm1+mjjl%U~?`Ph~Y}wu^5a zB8lm;Hcu-cT4ngEBBy(*6{26XEboaIa33;%AFS#Tm7h(AF+(4udfulf<(s8?etw(Y zg+K4qO8@vKA`z17>EMmUpQFEOKV4IMwhJZ<4#ihJszv*j5_1Nq^rW`VNNMKCP5Pao z`VXP_V69PiIASNK?%PjDUn*P}WbtZXRizJoUMHYEgmU(b)p6%gwAgtDWx z3)N)Z6@TW*UF!(&4m5V2aVJ`pXofZFL(zjIcCOK+;lhuWlmTrd71nXfJ{M^(k?EJW36kr8lMs@&YTR z5??U$MeDUQJ>AVesVVu$7@NJgVsGw}HlsOc=0J3Gdy0dV^oC9ulApRbH!MTZ%;Y5Qj)|!*+9Xv6ktE_2^js*H;am$yDJn1-5=pj+9@E?Y}BYmYRmQ2 zRu)q$6E2IBp52ynIL&>D>5-Y-kQ6=w*>@16MrgZ=nTxG+wGDxBf%6%>W!}0<$&jNL zn%G}R&9~q>pTzR8sEqP^K;piJ{qSHnS}I@;`vW9!BXQMBx{ zHem?o5dSep8ZX=U2`*{hzj1mx9PZr2>yzTr+=9iw0wWeN%j3cRRdkQ~e++P8sgC;K zy=X1oZ{U|s@Ju0DlH;52*f886(SCfVdOdp3wjoah?i&e3dJo^>$lB!NK>7^MM1i!`oO1II3#C-P0W;6i zSxPW!f$989wD)|8W$TH@!0z6yEWa9k0Dm$OoSR(9QrP69go4=id2?%Ak%`)7tA_`u zt2il}h;nibucX31V!W*~w*vGGqR&K#hWb>x&c){zLEKo&m-EC%KC>TuqWXMeFfR-* zn8?~h#0oA=IB`>Bn5{;q=N-2_Hg5X?91yNK=CCare&0!AS}M2lHBcFnX-M;XtgLe? zq7-?*rx|!pb}(S}-;nremq#+|CIKXuZaV{<22RP_5PjY0d#kJG zJtQoVntIY5pOnDA9rIl_^7n-eDj}(X#6W4fhZ2off_&dc2LM4W0(&2gyXTgjBxDjN z&?rZ-+<$>bAEiDaE$47H92#`!e4xlH9j{bZE~GNNXE1Eia?`z(`Nzr^Sv9r+EU)W+ zak~n;ZmYT(m8d(RlvC+UIpb7xD)tCKW|eg=y>P(ihU^Gy1u2bm52Hh3Xymo6$%R20 zqhs~eQKdQ?CXuBcx3x`BWYW?nZ-vC!AoeJ@9DeTaQI;^LM=^oD7(wpMa&db7Z(m9V zbBc;`DBc~Q7^z)C%9p77%d4~U%3)F(n5t9y2W&e|QNMGj!|9)3NU|ofO7zlOYx>^v zvc>_A`or9P5)*w{j=;54Zh-o*;Ya#zRmagn7(Jzejy<&6>uY6$K;eM0L53Gdn~cpF z)H8Un+UDbQXXB-3Qz`x0?e_Cw1BXPr!7z?;(m-`*f08 zpe)vwI4hp1y26mVL?V~`$ z=uU;)ns3(9mr_l6DYB>3tw4%Kl@|4T0pCEiQoBX3fijJ&V%7F#-aYmIjL#+ISW#J8&tUAj{>vQKc56&VQ8 zfKKI<0i1FK)z=gk=2h4Y(n`}N4QxyCu}XE(xOK8B)HVYbmjj5#0InbNRpHM`6n-+{ z=5nKFa^jAliOan*P=tr20P93fe_dOS=~fsw~hoqOfyR& zjxA8)zz@tg|En~RMnS@)gkCH-@H&H~gJlG7$ne|du!VH)In<`ftiRMgB6rPg zZ{Pi>1*m;5#%`wz+2&MWC1yw_SBqF1{s4dE`vTYOQTQ$F)I(V~YZ?1lCcJJoBNOzs zG_JK4n)n3aZoSo?l(ZIV8~$IVAzPi~IZPb@q3 zU~BB8l1)T)7>oRPOZ6hltfIhL!Oan#4|&~I;o#w9l@~m>Gw%|+Qz5Ys@QNb477&3( zwK>+bT}40A+K<+<$a_j3+6KL`yNsjgSw+jIV}*B_io;Vy^+o}PgWpUxC=y4FyA74d zafThOA9LRrdc_;D4AFAYauGM4_wDu@_T%o}%gkCo4XA!5tA{v7ehFpT{BsEsbQ{?3 z-*~1U^j=?6Qd{@~cdH1}Zs)8~ z{jd_MpZ&kWDtP^6KbC++_bYVJ?`+jMMD0kww9^#;7{E{yJjxpOT}{D^mGPFw5XSkE z&R+z4&uEbl8xL%DO9vCr-F`+crCU=ioLn%e=)&rY1DXR?{pVLeh$YCopmF!q5EI;j z*pZS(Jl@48J(t@XtPg!63zny?#;fP9;6)!(o#x+<=l^*!J1|ZmUs(OnuYB6@)m`7! zk-GqFH>ue_-DV8^2@7UAX>HW27Uu>$zYi>iyYJarMGOvG;tfkgkx}qG017OR*mP$tI+I zOq~y6Joow?r>v|Hm1*Mf?j>j;m^x*l{dzwqwH~ME2uW;20VUh3D(zelO~Ga_tzhO)ibYiRY6u2IVPU3Am;}Tv_hd9>Xxc?^?}EPOe8R3v!sPMR({p^$kSQxnu>E z*$Myi8>wA>{#DzoMby!y1$A8fc0YvOpuDaViX&@V{}uAr^5iv>1&(0+8}p0XL6YSs zzmMF8~|G6s+Eu+jo26==)IxsQeZ5$)J4?= z#u+tKwM<#R&D(fw-x~0b=)MO0Q$vh8ul2v|^rOau0`P(@3{=WP&cBW5umh@6mjmK7 zV^f+3PU%;uDZI$vr@`Wd2;@F*=Cnq@J(%J8oZcN}@%wgQx`Y^Auqr1l6YT;s`J7xZ zR~dQpd_z4~;NDgEoT)zL(k3@yk*H$T zG3J&4+F){VE7M3&=E(S1$<`vzG%vH}8l6Sx3sV2$|CJh9`8j_V&WvL;7TU#`^UfvI zA6$ch-gpKZ*eiWsK6$egDZ5_V*STVuH|ih>x7?t##Ebec+BF6OBegjEU+(+%9c_Ux z7yRpr4tDYnZ8}G^^izGdGJR?)6J5p}9I&+!lr}ms1z(aaG>~jD*Mpy$2fb%?#ne&w ze!xe_on5;8$3x67xUCV+$$V`1`;qtOV~fq@6Dx?*I&WvvYCLVjJ6?B!!fm{JSA%S# zb6az9C<(QxvGJ5QD-PP=g7`)_y0$iPHh4W9#^6&4q}oyM3iTKz}2 zulk#UI;S$~MQ{O>`q;lJrKNbjz$y<5a8x%nx6zqK<WDI#+$o5B}$vzIT3jIPMzeh*zB_8Yi1v<(#S= z)(3I>dDA*S;&e+U7`EJ6BpJ4>?+ASLMYkXM5)uDLxA)FHY51z!AuJ}rGee;-V-h>B zL)+u#nIU&pKTy`QVQR<{9@`Qz3#Vu%H$A63QpIE?g_SO_D7eq+{2efQa?L-`XGyhZhO4+m_9#!ZMiF~)it039|D1};(d|}jK)F`rkm>(*}*@))3^%-m77mU%3 zzk}k{73o)_Y5~+_$!85}!aMhOBP1}U&pZB9b>yX>S}Efo3A#z5ok^LyeOui>%S*KO zS`*?YTGGZ|LX_W505$oqHNQ%leEk1;YhDUoLAOB;zO#nMf%AhV^#3qz>7sPLQhwv3 zmR(LJk}=Efu=6j(b9)8KkzeZ*(-^H8hfu3T^m8%nFGyg*;F?6|RSp4(-naO_FVUak z@~Zpp>iYByG_IiKvya1?!+Pv9Nst@^(1o)Y7)E=@%JE`h{6zCTc&^YSfd1 ziiIj~#ZMx5{Zbv^?r-`-{Wwv}tt)=wX#c1DZA}zodrqO0-reV@7Y+ zDOXIh_X02aW&abdE6Z^@Zd{IMRZl}@)IYiLn&k%UthgtlER1$%3ST8x{?NdN-TCc% zS+Bkut!%;O&gKgygjOAy8aDTyYSBwMBqYSqwTFe|{4styvJf^qc;5gDc!&LhwwdX$ z)@~K3K{0&YoZ{8?VMQA$$l0vqF)lbtZ%^%|L(avAB)PZqd;~yp@5wE zct*i!*n=3=NR9syY&p=49+uBk$K-_=Hg&Q$pFXk{fB)AkCT#mdi!yZlhgGoEs8OnV zJYR}xV=)d-b$qXaT|x=FzV}G&A)4ZvhnUcC;e_W*r-MNFx>NVtG_G$dBCcj|la1M^ zCE0qo26VN2cEb=MjwTF*Mh#`Flpt|!8q?^daD1C&8q=KVrX-ohsrJ*^?E36 zTiaEDoZ$vIeZ7>>8d6R0xxJIUACUeX>wq(B*{siHF z4wCD?BUUiyNRAjmY6T{=4E2vn4|N46pBKFl`g^YqsuVEqBW*Ks(oF40*Ip}*HW-c_ zu0XvCLkXk2&AkX-?kU$mOTiRbrtS6n#GBP;aVMovfq{9-&nkq=?CD!!PUDp^UFqg+ zl=~H3l2q|Ejlk`W3YIk7 zqEDcalkZdki8JAs25X%gKzQcLq8u0BwJ_6zRc2E8+Dm=ON`b~>?lau?!YWo`MZ72K zg}kBxxCck;JmL+y>-ME&pmgj@(Rh?@LhE$5K=_YJH&psP=WY4{g0b;$87X2q0T6Ad z6~j3nK-fCX{eciw%1_g=VSP&}JyPhQppc#Dfa26TK*7+zh6-vsD0mq5p6Ps#Obs7+ zzwjsb_hN1Kv{(`>uN6nsj~pdlc;)ZYJ<-*PNDTR7X32X#h3oIrmCHb$?_HcLo82s7 zpD4&*5G^)LwaRgX67q>7;;5vCvGFRXVa&;`7?O~<+n4uk?PggF?8>UK<%LW|C3XCS z3ECL;vt-laAhUDyvI^(;+luwaI~oq&m(CnG{#nUaLAflxCr8z-EGWdup8K!)Oh4Uj z2}%#6qlRlOp6%-fg^bq%r|daJne+F>0koX%j9U7I`+^r9YY~~V<3?Z{{;kuUm%H%! z>bSYKtqBh97LE!Exrsig^cN53y2Pkgr+aHhvo$oJsPs7yCWkD4SS-)<*#=5GpK?`0 zkljr8Opull!F*>f9nPC7p!95m`ncv@YK6EW(`?Gq0_$rpJZsQ%vE;n%`J>nhBt%$Y zGGDRW9ARJ#%;kZp^rzDj=t-m~9x%BL7(X*|hTic5r_i^d_Y*?yo3&ofWgeP`$Ah@8 zWS@Tkk$*Mw^zH+4Ek#?_fFPiyQ622{WonUAyjD98=Q8dWiOv>E{{`rl1Y|Cm;MM{% z+w%qpJ-VEQUy-)NQqK7?&q7tCAgbTu<_?~faZXF}inA4ha^S8$?T~@wj&XpGwWHN( z5xBGIM^Uf92A19EH%0bqTZArSYjZRRQ$s$L86hu*O5&U>Pf{}%?zW*UHFs!+HCZ z<{M9CYvCk14Y;e4`{IhT&01;W2*9A!Jq>FLlgw;-1=}2h%#SGANl=4-8}5Ij23x0R z(ri}x-h*7wKRSIDu)4ET#JwsDr$!ZRj|uv=K|#c%^jKNa?A3-9XVu$DLL{zgz}3$r zbgY=7uUrr|vME@C5^T9}N4TnO%To*Va13un&$GMEB0(zVm|k;8mg4vLoM#moQQaR9 z1u(24!#-#>#fAWS%B(z~n?4EZBxxDyu2HGEmim|Um{Oy+rNlJe{nRM8%w+GYL~W0) zskBjm93=DLQoQ~zj>ehfcz6wk_0zf==l#y@%l;)%&L&Ht#?o<3t*_Y`|8d$MHv$|= zs|1G<{WlI}!h787YchSr<$CQ0x{Sbh@%9i3yNU%g6c7>vG!)YwzLRH$CHH#N$tSo& z3UPP9@cmCkfhM-RNR8S)T;W9P`g#kwdnCI|@r;W@BfVTyeV2KRqHN<5_CfMwm}Cym zYCJIMu5mYTbmKPQ{2as2XP`#^bab07EQ?cV?KA9dGY!nvgrp--cxk&6@YNej1ZuAU-TYl^vA@9{8*Sz8#n$OCpxG48 zrsY$D~w}e9XS|r{7Wd8JLYcnZ9VaRFQfGiW=KlXd{e3s)G-Fna%`9E+{ z6lVIZMVawqAf=uH(1?=Waucs1HP;Jt?N_af{lu`F@iCZC!;>XjZVF@6yb7qkB2ci678d&7Bn&j2Zq~6bp^BvqgL zjsmYuH$X)LzTi|V-rj!aiR~ZdtUW%?{mM;^@+d{B`0(bS)k#n=RK&^Ec+*D8JI5Lv2qKXzp+2^xSppRFIyT-ei} zspH%k9bwa*);jA+DDK)un8JpU<1sp=+(@jZMU7BX0x5|+a!2{InpD&#dCnJ8ovJ!3 zG#Sl=B^>5!avLA#=$5%3=!pI;OL}A~jCWc}2`FSKV;>dO$aaE)?+F`wspv$Qe^q(9 zLWlSlfzq<|qfe>im;boxBx96QX5^#pf1^{_v&s6zTvRI7k3hzcrbJqwW}+(j5mr~JTgfQobeAxrPN;%VLxU6S5qWDXL;hyx2%6pe^N_kq)Vurd_{{n zT~qZP*w#t^rB^-rSLF3speg1ZYblg~NpqIBaSgD|^(~$LFimWGegR(NtY6^kv+GRF zgSnH&7E}K4?u9!b@{aP8d`_h*_|LQdw;hxu|-&tr4&=ai;X>1Eq zzQKWusmI=sUmx*$t!+tu`5XE4XhTu_DNNP2CNJ^0+d@|8?}tiP3=hwAFZ33p`R(;? zXR`BKMmzpmiMPf<4$Zkar-AbX&KT4WUWJ@dP+6g}&#{U^D+3H+VlXdd!BX__(i61& zuho0xGFG1m0-bWD{VB(isaOzA_>ybagKn3ceE%ocUe)?L9l7i%JdiTmN%vJ4j+#yA z6`I({-E%5MsZA8ujRoZwwA7SbI5T)0qC^llHm8R;erRSfG()o$tZUwoWMYC1SmC}s zhjNM)!$DA*31AQ4V&+9Y(mV>Fp@0pW7n&6))ma}fT z!8SpIEm@yz*c~9>H3=2p+D7m0&xKjIrG+FSS9JI0Njv;AA3BsAdf*qY(X&^r zZuOOW&v8?tM7PW9N{VzKrRNQh_cx~FgU003p~G9lzsPn5_gtn4GAnMW1!=hiU@Gq7 z?D3bGXdh6cvp)7MibFw?z66a90H8{#jq6AsH1U+<3$xDMDKsU6_lfflVf)rrVKcLlF_*$8cz$eOJnrI+_RM&N zWuZY%q-49J;BYgAL^)c7j`3sENK)q;gs&YA`unLp{d}GMsIeqQXX%u0O z1{!wA7g3kfwBNKw({^ue8k7Wzz(Tx!YgRSHCLPnMNL zn!B|QTONWlY8-b#V^7_yxPkL1+p0 z0-3)%ySc`ZjgTyc_wZD3>fV3XFVgtmZacNpeZh2<>_LVR0y`|L`gh zqFB?ggv;sfBW^MANgvIosS4JC;5oU`MsAcq*z{{C#H?OE+W%yYT z1ZUul_r!jK2#WotN1A_!+sjrymgs42kc^K;(bvXD!~gZE_eagmSFZnjIe@63(Cy=n zh-$V=mbDG-g?a#{G*)D?Qu@VR4_l z+0WL^kE-kEXGVR~OZ%~tp;uwNdRA&s5`qDvELR_&57_X{T(mJVcz-QbQ|jm+#r};U zyYDw0I=ETu=evo*$yZfNn?LKlT_?Rk`(deFmnA_1;0P^E_^yu z1#V(@;F)a}!?{E+U_%!%`B3cx6GEUGCM^j@&C?s5o+sF~$$Od~%b>?wb~GW=a3@XxtVpN(7t1*of~>oG;hxuY>p zXS!Js-38)E*3Gu~1Z#0|afDDUd?BGh9;5vNUFYZu+rz#W4SF zfBOx&(%I+S`x&ea-Tv=7@E-0Tt-2$MM$cTZ62FP|(Kbr=D~fyds{mCo9=6LE#0s(jVF{WeCb_1pGi3c(|0V6>Uq5(TVy+|+^7l(b>! z0|h5D%CWhz*+^YZU^qWB8D?7AuJV1JC;v*)5j)oGjBp8tAhp|9((EGDv!>%5nij!B zR0?g;_TfMz}tIRW37`9Cj!IJn3sg7^D7($g~W}0AE;TwBF<)UQY0KVM{Gz1a7ydvU6uSPDc=w%1!a7al!LTOdw{0c6I^FD(VMKXvE zl8_EdxY8!ET5ce_W5YL4GfJA6^?6yy9i<6WG0d80io)5!9_4OYg^-&VXc~Q;=Lz)>_+TE`4%=pF z)?>E7b`S~&`L(9RqoafO(NyM>unuh3`^MGpH~peAKKXs2&X!1HP147tIma2jGto(d zRz?uP%rc*OwLH~C72Y|_KcR`iWJCzrYUveryBWk|yicc1HCh@WFnJoC7LFV6!NtFS zQMZ?Ow1_U3TNEq8)0p)c2WSx!ql$A7c0|DX-3GLqnoHcik8c*9z^GwYw`K0iUCroe zlE}%Ao24HpbBMAA-f8!KYpj~hcU{`QPHm`aVqH|K}99sy|jSrfIg8eK*e zcf5WW+8HX26QG)K1wIt|{dsSr^b}zT*L^&q1!Baq>C7--+QhkmECP${?UEB;UktUO zH{#R6KK^j))eP$90wA*7)Mr^YNpXuOZbXMzHy>mG& zBafTjb7b4FK}Z@}tJr||wi|EqjHmV@`dNILczCQ>D-rc5ZDpD@7zSOn^V^pjG4Q& zDPH{6u6?_QWnNi@79Ez*zQ;V3XUfx`3<8gk`v870N#C9v<2iLcFGb4A!oxCBcLpbY z7j<)Ajhv&w{d^j4j^s&hNoa8makiGV7_DjkRTGUwjMu;WUYuF6vsDLQ@3tRq%p3n0 z?OQ{fE$tAW{b&Vh;oWj8&bv$>ZV|E9rZNqSk0#m!(90P>OZ+f$6kpUfJ@4AG5T4Q^ zu1E~t@(^YXSMLaXwq>x&$G~jg_;n3IU{A0c{Z!R&dDJ{4dC7`n*P+*iZh@;8Y#!GM z_sUwGA!gRODj{H|@RppCl1q%a?XW)VMk{ALp$ zAU?|xobPWw#4{ZZ zK`6{gAQ-30?zJKI9-Ru_w2(qh>s_a+?&@kph8RSP#mWx*z^U~aHNo(KpiqRW!?nU} z<79>zy+D=y*1J6jGK%CM>$M#8YrzsYL{}axW-(^tXJ^@-=A^foXs$XfVcRav*2VDU zTnR*uKadIVYIcWH-AC@%wa(0Zw2E)7G$_Yz=`=(t)QV`9*|9w+wP;I4^X%VlYKwWr z1(_AsRtw^}H%)#44|CN$Y@#1Ty zfw!^g@y>us85XD^Hg{Lgqf?|Kljk`%jpn4%iY^RA#ssAkrz-@ zDqg}9kR`p$2-DWf`*`!=g*i;x0HmdAW~k)asv+im)WS$c`t_NpW4jQ;3k5%m9(tjB z?F9|^cyxU#i=3IkjIfnlEiR_fy()_N6~i|Zk=M<0nkp)erd8yr4!)icy{g-lnAb2B zk&h1ibZEM(kTb;pu5cynb#IIsqV!A#vOtuig1{>ce_5S2$?9a)UEC?>Dm>o&`N=_c zW1ZvO{m3`^02aS+J_{4y2iAhy$vXXgkpRC?RdCa8$m%DwP*agd;^{K8zC<=H=< zMpB-?J#lH?V)e)Ir{^L2VXgHAs&{-{_AxPUC-+}gCr96dGK|Nc2 z26U`CO)H=z8R0{=s<`jNQmHxVb3BS*^3m4AW&Us~JS6~|2VQFrt#h}LonJ(m(V+aJ zem&mIQ|M)@cIY??TSgv0mzk>4?hgu{iTfg8)Mk-Mlb}$t6g$b38L$*DB$7P>By5a!8&d)GV#y%hMLx zwAR2eCRzbvCZDF+8gce6|7Xc_#WINaLUHypK$)4iTnI3o9}QYm#}(IYIoYF;J#h(T zd;La=MIJ{6fhH8^Se64v$<)OpBBXt>`t0H`xH%*0e%)nrc9#tfewFnxGa~|{$FlW6 z_@7xkBaop)W7&UjdepZ0b@~-nZToCM+o}}#^r%hU^9l{b9Sc%LsXQVaKywDx-h^oI zCVWDF4fH(W4bwWYoxJsUDe~M;`X_@o!R_uDcp-k>0+juVMA;9Oqt^zo8%A$4%(O3@Htsm;h zAHVh^Yo(jWKHRKENda_0{M6F^2fuIYuOCI`8+Ed5)jU&sq_&M;gZ_XXs>0XTK-7?V z!R=MHMu`@p)P`tHH_*iy7Si58lZM0h@tpIHJ+B=vhUeF6{#m0f#`2T!yT&FW+?6+& zq=$@Y#{CQ({0)5#n_&JH>NL9UQ9yWw4dSL_-gu_+o%s_!nD<#8w~bDZ%U3a$!+B~a zj;+4cuKh!DYExN5TtW`SCwZUSas8~x4car9|7Hc5;TgN{q-z-m-gupa$}shL=ogry zE!PvPaM+BTHa*>yJGa>)*Pc@!b=~G$9BkT$8!jJin~%6DxLLwBZh(*?R~qp{qWfr1 zk9%?cr7nG17@tI~7k==Skx;|U+RSZEzTvHtnG<34R^fT6_?n}EKJ`|46u5dZHnTcl z9kgIvE52|#&bFNZMg-cJnmhv^VmLW(@k$4Wq{_RTK%gr*h7WY7B6z5Vqk$G4z1(&v&DqG<_Llg2$_Dw-spscf?7PIS`}w?;D> zyn2DO-yI^6_V098&tw7#2#DTNb17-3l5TG%rO!OlTZq*Lx}NsSZr`-1djDIvpC>UA zMV@!ozNs)eem&{MWOldR8ewM1B5gebv3lR)GJIpBswDD085^-uX|5ew}o(?Ppk(fS^f*{sElQlBpILKZ%;ls=u6 z+#C!uJXP-UhsOS1IRkn#O_{Uuu@H4z%Z<3Q#afcxMZT_)~TIm*_$Bi?M9;jwZIT;Z85JDo;L52C7#VE0GzBI13TC?YmO z;Zq$->3YAGfNaL2LH!77JpX(|0i$tSShQrLS}8R@HSQO55}xS~ar-Q&GFrdHqEmU5 zhIe=F9mes)rWbBL<8}kxt~{(oHZyowzPmYb_hIK5c4p^q(!y!JE*)KH$_M}Jn8gAn zyT&a?o3$-*#K*Ml+tVMSUVUqM08af`Jgle|J`x#Vs+EU~DfsDCcLfcqUfK%iIqO@b4<&nX+%^#{tU;yN+4 zqn|#%oW*-(dc_Y+-(Ba5%Y zN0w7XRV;9P)uC|4BilF<70C2CTMju7_3`Z(T8{G=9Kpf!RR#G$uKoVG4E5|nfgP=h z!mpGy!J{;VVMv?RymCQehi=3)6;bQsstuF&MHDpfu*Y*HTf(C!>}3zO4ILnwkj6~s z$l8-ZW3q=csfm?MVO%$=mBpZArTW63=ohRfhnKz`e_lEHyii)4C!5nYFp_j&7t(-< zY>lntM)aT+6Y3x0xnmKk*vJ2p?Om&P6X{wO@Rgs(j}(}S+I~!SRBWF=!#h~-r=cSD zN$vj$_H0YU2%*-koipw)e)%veTJS^jMLWx^DrzpKF`ZtVR~YP>Hge_y7-e$lp}p?A zu(RRl?d$vPYUA1Ac|A+VbBqA@$2Y`r%Q#k#~P4);7 z8F^tzM^dH4b+$xw>3Qrqq!@6{IoND-6zD`r2Ar8^jA50u z2Nm4R#rUK&%;J{ReiiCM?(ap4Gp>g@ST`&=?ZYm&pQ3+AOdXDlm>z;=6Myh4J(@rX zr>9_rCN8l(*UH_dKeRmb@?SdR(x>l>a*K*ugup6GBYG^V&B zDJqJAxq;+z1XLtco`dr|Gtc|q`~N%V&+`W#&UNnlKHvLyU)S$q(BC_Z()QFcR4sdp zkriAWFV4G{Q$o+mkM4GnMiBo#P2if1@Gx<{R;IjanL?f~1vxeF+~%umb*Q8%$mTIq zB$v2BW-v7)>r|5nI^3!?gI(fhX3{I3k8(`Jm*!PFBjL)wz?k7C7& zfR0DIFH0Wu^=~ng-Crg=w?!r~z!F!(l#an5vFh9dUC(NEtSJV{MWczbT5#Dg)wwi4OgTP67?3UrXCOzV_`7k^%Hty8N||54{7 z7g6|ICHj)a_7qQejIGNW+`F!hz5bU^2Wx9cnAZs_UtcA7AQGJ?^|>+KncZ_Yrf_t; zE(Y{`{2h#u0MV4C!9Far*l8|X^hVb{)T;8Rjcy-GTRI|t8JoeA6{WqmjA>Ktn4aC= zqoeNs3hvEpu@)`@5%_;8TI82CMD51K*7{E{JkMF|E1e7zjuN1v_IUJ)? zmeeId_S|&(WgctP*;TEB&s7$24#pkO8W~7wJ(E2pa6~a6K47|MU(R3;vqd3mSUl(e zT_2A21r@q9Xyorsl>9%yJ+pJUK`-#8&xShZ1nQ4nEsafg*G*KIeJLGv5A1NWJ4&uH zE3MYR_1D#i;dOJtD?$!I5fZY8OSv+2AvK!uNWc7E)nsHLYFA(^WPD3L@YH{8U0Dff zMVhoK;Db;pmL(qsb{G4q36|)kZaWn2;LSThQx(Pl;9wYafWUx2#y);w{&viShrfu~ zdX~PaX=p+l66hBf%}(1Yi`>nPky=}?Bs+B5x=cbkfugQAsig2Xjh928l-cxF^9Dd< zXRjhVl6mxs6@$Dxu4rzC{u1mtv^|6Bo!YjX%n6ruBAE6-IyT<>LgXdIwT`N zG~+WG71MU}F$Neb|1T}xqkKkt58HL9wahj?+>!8Wb$o=unTNsk2Gm~;RsVdnR-n`~ zc?W+Aj>2JCmEKQa&iQ9->$ai(0?$wZmu)Tf=$5vL-8hCUr4F)ZQ=i26VSS+mNU3L# zgD#~P0otuAjgqM9p^XZD6{To%L}50tymT!2^loJH{*T<=)mMFG*CftnQfWvNJ(6sR z$`O9gQNHOzVuaaOpRD@S1nJdH6hEHg4INC49+Vu&EjK=C#YgP92s?qZZRVy-^hRu502TRL zeVS+(`J8v>&?G$hORxIKh-hrmXmoI^Xoo^4kbQ_eq0L^>_r1;KE%k!i7(Zytnr;rZ z(jU_AXO%RUFvVFyhjXWL2*PI*D)!vdcGLKjy~B& zyPzGYi*GZE%*9QkJE0LXVgOcKMwbFEbkrL_uoRiOxt=Xq#-oGx56IGP?`wCs+f`o~wV8g=E~>UBr=|8Vn8g0pG`5UEy_N>Q#a{hBjdA>}$d ztKA+S1I7z0zvQu_Ni~ZWFIuu!q|IhV zBCZ185mAjJRxgmAkIVH_ldbY(70Az{XxK-X8t0?;ZasOiLBok?md|Z?F+TO>&ROLr zur97*j2x*ddG-TQ6A$d3u*?QY9#`i}+4{5(omL`nl*PUHBKoszS70*4Nu4#oxz+(;zuutkr=gfu!uT*54W+5R{oSOH%yr7RzuHl5)sQ!8h+K{#QN&Pg|T%LpDYcaLMo``-% zv~S%8pl31#qeZ;fsfYIFHzD9seeTWhIo_JeM{AS&)66+PGb^(u_}A&~QG#sW!_hz% zO1&BR5aq6&?>aT;LchV0d&VBeIezDumq9N16rU>xn_znb70(CGL7$u7unqJ%k0U8( z1_Mlm^hF$tFll$G(mz5%=}g{1k|s9)O0<#88=xQtW=TZrI`yAFwSQUAB{Ak=%Wb={ zsW{i01!e0PV1*qh!vj;zPC=hlx8ve(hj8*lSOu*Q)n`Ig^bgpZ&+34$p@4sFFXhLL zPwsfBv>>y}BJ(5VuxE2HCRMsfU8VUTtnk?B+i6GWLE?tx!Y@RXH8J@E`WIb>hW1%D z;DBIR_+gk7!?@ptL>BL6%?mZoHyvTbyn4f*e{-%t8fr-|PN7_3Af`AwIl4 z^D5g#A;e}>O=rDMYB#l|vg;xn0$}Bro65mC4@oQH7i!5a^hJ1Gm*FF14@SW}-?=_- zkK$POTV`ziK|&VKMGt8K3`alzCQ~$JcU4$L${G#Ben6Jt_f2s)#E#cA`^)}L1ZQ&EMo3`6Wc`S>+bI47H)vcByCNdH`rP$OMu+dR&uj=Z*+;V8dt5VK=YCuodi{NE}$ndNidY7}ke5 zB>bvY!Mrz_em)S|2bMX@!BbHimZda`g2HB7I$LTcEvlOB((kZM^ndd6vLL`3?wZ&| zq?kR9@v|Updm6E$J<94wL+Sv%br4Y%Xv0U4j1Bk~Dp)w%Y9BmCEX`oYfi?$i0VA%!?{ z|0ATiTp(cd+y^E2N&RiihD906vFHGfIeG#V;8TX*_QW^DqLd+>fvjMcxlT)teXGv9 zW0UE9u~QQSt*G7LMhYDzpfA@VJI&2+y*t{AGPG_@WNdcA|%O8T#YUbM=&hXr&oEcjN&%LCG4bKj2HtO1sW4RL9{DUN4J{ zh?`=7>-(Yk<*@@Lu0@3@6WM!)o*SU%VK)*;llKup^Z^M@By0IF^UVtz zV_MkLZ5|>7ZK;}R_AyR-0Y_NP3UZPWP)quFzlp7T)6>kTu(~X z9da1iWiIMkkUf-$jt^eXxLSG}ago;oSBElKPDbxPvle}i5vu-jVOPNNB`%ED{_f(pd zw&BNQBL!$lwFh@-@6S3ukP%VNPe^ey%{gI?9loI~YWGfzpG7w3 z8z53}?Jhd_~M z4X07M7$@{eO)8D5XT8CTl!XQj>vJiMnxNex+LJdF){RVQqh>!lWZ(%y90|=a#3@rV zWzu@l^SJA})as~w^|x|gF6^hT+=CApo68OIf~kF2nBtAp`>hCUa+D7w5EegP*H3$l zS*Y9abETLMVUdI4s*xd@!VM%E40YMcnwwKKZ#|RM+gw|>-bn#6`Tq1rkmhA!$ z9Bz1P?{=Jz-z$9pqI$~Z$#vu{9)G3JTzZh9^@MbC?A3dW09E@HMraE5B?Pl)xa#bg za0?e>FZ%yt^+Z~M9BQ~clFeA{a^#Dk7wLN*>7cGJ$stMy`jqq!F(gH{U+N%}VB|%n zNgq1!MD-wT~I zX_5>(zR2NVW9D$f`90VeS$GpTDopg9N31O6*yE(p^MKMX!0hgexS{!NgWUGXbk;I>rZEqFvH*TeJJt!*>Bb+=!L*KP87=vM7Ly&mx4c20&Box4Tx%h zjIuQX=^SbZ;{GNz%CBA_f7(ob{Hv4b-#68<`8yzgkC6YzvqQiKx4!XK>wdrV<2H$% QdMLOZ^ZaM|kt=uq1>O-npa1{> diff --git a/docs/content/docs/datastream/http.md b/docs/content/docs/datastream/http.md new file mode 100644 index 00000000..030cb262 --- /dev/null +++ b/docs/content/docs/datastream/http.md @@ -0,0 +1,179 @@ +--- +title: HTTP +weight: 3 +type: docs +aliases: + - /dev/connectors/http.html + - /apis/streaming/connectors/http.html +--- + + +# Apache HTTP Connector +The HTTP connector allows for pulling data from external system via HTTP methods and HTTP Sink that allows for sending data to external system via HTTP requests. + +Note this connector was donated to Flink in [FLIP-532](https://cwiki.apache.org/confluence/display/FLINK/FLIP-532%3A+Donate+GetInData+HTTP+Connector+to+Flink). +Existing java applications built using the original repository will need to be recompiled to pick up the new flink package names. + +The HTTP sink connector supports the Flink streaming API. + + +* [Apache HTTP Connector](#apache-http-connector) + * [Working with HTTP sink Flink streaming API](#working-with-http-sink-flink-streaming-api) + * [Sink Connector options](#sink-connector-options-) + * [Batch submission mode](#batch-submission-mode) + * [Single submission mode](#single-submission-mode) + * [Http headers](#http-headers) + * [TLS (more secure replacement for SSL) and mTLS support](#tls-more-secure-replacement-for-ssl-and-mtls-support) + * [Basic Authentication](#basic-authentication) + * [OIDC Bearer Authentication](#oidc-bearer-authentication) + * [Restrictions at this time](#restrictions-at-this-time) + +## Working with HTTP sink Flink streaming API + +In order to change submission batch size use `flink.connector.http.sink.request.batch.size` property. For example: + +### Sink Connector options +These option are specified on the builder using the setProperty method. + +| Option | Required | Description/Value | +|---------------------------------------------------------|----------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| connector | required | Specify what connector to use. For HTTP Sink it should be set to _'http-sink'_. | +| format | required | Specify what format to use. | +| url | required | The base URL that should be use for HTTP requests. For example _http://localhost:8080/client_. | +| insert-method | optional | Specify which HTTP method to use in the request. The value should be set either to `POST` or `PUT`. | +| sink.batch.max-size | optional | Maximum number of elements that may be passed in a batch to be written downstream. | +| sink.requests.max-inflight | optional | The maximum number of in flight requests that may exist, if any more in flight requests need to be initiated once the maximum has been reached, then it will be blocked until some have completed. | +| sink.requests.max-buffered | optional | Maximum number of buffered records before applying backpressure. | +| sink.flush-buffer.size | optional | The maximum size of a batch of entries that may be sent to the HTTP endpoint measured in bytes. | +| sink.flush-buffer.timeout | optional | Threshold time in milliseconds for an element to be in a buffer before being flushed. | +| flink.connector.http.sink.request-callback | optional | Specify which `HttpPostRequestCallback` implementation to use. By default, it is set to `slf4j-logger` corresponding to `Slf4jHttpPostRequestCallback`. | +| flink.connector.http.sink.error.code | optional | List of HTTP status codes that should be treated as errors by HTTP Sink, separated with comma. | +| flink.connector.http.sink.error.code.exclude | optional | List of HTTP status codes that should be excluded from the `flink.connector.http.sink.error.code` list, separated with comma. | +| flink.connector.http.security.cert.server | optional | Path to trusted HTTP server certificate that should be add to connectors key store. More than one path can be specified using `,` as path delimiter. | +| flink.connector.http.security.cert.client | optional | Path to trusted certificate that should be used by connector's HTTP client for mTLS communication. | +| flink.connector.http.security.key.client | optional | Path to trusted private key that should be used by connector's HTTP client for mTLS communication. | +| flink.connector.http.security.cert.server.allowSelfSigned | optional | Accept untrusted certificates for TLS communication. | +| flink.connector.http.sink.request.timeout | optional | Sets HTTP request timeout in seconds. If not specified, the default value of 30 seconds will be used. | +| flink.connector.http.sink.writer.thread-pool.size | optional | Sets the size of pool thread for HTTP Sink request processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 1 thread will be used. | +| flink.connector.http.sink.writer.request.mode | optional | Sets Http Sink request submission mode. Two modes are available to select, `single` and `batch` which is the default mode if option is not specified. | +| flink.connector.http.sink.request.batch.size | optional | Applicable only for `flink.connector.http.sink.writer.request.mode = batch`. Sets number of individual events/requests that will be submitted as one HTTP request by HTTP sink. The default value is 500 which is same as HTTP Sink `maxBatchSize` | + + + +### Batch submission mode + +By default, batch size is set to 500 which is the same as Http Sink's `maxBatchSize` property and has value of 500. +The `maxBatchSize' property sets maximal number of events that will by buffered by Flink runtime before passing it to Http Sink for processing. + +Streaming API: +```java +HttpSink.builder() + .setEndpointUrl("http://example.com/myendpoint") + .setElementConverter( + (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) + .setProperty("http.sink.request.batch.size", "50") + .build(); +``` + +### Single submission mode +In this mode every processed event is submitted as individual HTTP POST/PUT request. + +Streaming API: +```java +HttpSink.builder() + .setEndpointUrl("http://example.com/myendpoint") + .setElementConverter( + (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) + .setProperty("http.sink.writer.request.mode", "single") + .build(); +``` +### Http headers +It is possible to set HTTP headers that will be added to HTTP request send by sink connector. +Headers are defined via property key `flink.connector.http.sink.header.HEADER_NAME = header value` for example: +`flink.connector.http.sink.header.X-Content-Type-Options = nosniff`. +Properties can be set via Sink builder or Property object: +```java +HttpSink.builder() + .setEndpointUrl("http://example.com/myendpoint") + .setElementConverter( + (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) + .setProperty("http.sink.header.X-Content-Type-Options", "nosniff") + .build(); +``` +or + +```java +Properties properties = Properties(); +properties.setProperty("http.sink.header.X-Content-Type-Options", "nosniff"); + +HttpSink.builder() + .setEndpointUrl("http://example.com/myendpoint") + .setElementConverter( + (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) + .setProperties(properties) + .build(); +``` + + +## TLS (more secure replacement for SSL) and mTLS support + +Both Http Sink and Lookup Source connectors support HTTPS communication using TLS 1.2 and mTLS. +To enable Https communication simply use `https` protocol in endpoint's URL. + +To specify certificate(s) to be used by the server, use `flink.connector.http.security.cert.server` connector property; +the value is a comma separated list of paths to certificate(s), for example you can use your organization's CA +Root certificate, or a self-signed certificate. + +Note that if there are no security properties for a `https` url then, the JVMs default certificates are +used - allowing use of globally recognized CAs without the need for configuration. + +You can also configure the connector to use mTLS. For this simply use `flink.connector.http.security.cert.client` +and `flink.connector.http.security.key.client` connector properties to specify paths to the certificate and +private key. The key MUST be in `PCKS8` format. Both PEM and DER keys are +allowed. + +All properties can be set via Sink's builder `.setProperty(...)` method or through Sink and Source table DDL. + +For non production environments it is sometimes necessary to use Https connection and accept all certificates. +In this special case, you can configure connector to trust all certificates without adding them to keystore. +To enable this option use `flink.connector.http.security.cert.server.allowSelfSigned` property setting its value to `true`. + +## Basic Authentication +The connector supports Basic Authentication using a HTTP `Authorization` header. +The header value can be set via properties, similarly as for other headers. The connector converts the passed value to Base64 and uses it for the request. +If the used value starts with the prefix `Basic`, or `flink.connector.http.source.lookup.use-raw-authorization-header` +is set to `'true'`, it will be used as header value as is, without any extra modification. + +## OIDC Bearer Authentication +The connector supports Bearer Authentication using a HTTP `Authorization` header. The [OAuth 2.0 rcf](https://datatracker.ietf.org/doc/html/rfc6749) mentions [Obtaining Authorization](https://datatracker.ietf.org/doc/html/rfc6749#section-4) +and an authorization grant. OIDC makes use of this [authorisation grant](https://datatracker.ietf.org/doc/html/rfc6749#section-1.3) in a [Token Request](https://openid.net/specs/openid-connect-core-1_0.html#TokenRequest) by including a [OAuth grant type](https://oauth.net/2/grant-types/) and associated properties, the response is the [token response](https://openid.net/specs/openid-connect-core-1_0.html#TokenResponse). + +If you want to use this authorization then you should supply the `Token Request` body in `application/x-www-form-urlencoded` encoding +in configuration property `flink.connector.http.security.oidc.token.request`. See [grant extension](https://datatracker.ietf.org/doc/html/rfc6749#section-4.5) for +an example of a customised grant type token request. The supplied `token request` will be issued to the +[token end point](https://datatracker.ietf.org/doc/html/rfc6749#section-3.2), whose url should be supplied in configuration property +`flink.connector.http.security.oidc.token.endpoint.url`. The returned `access token` is then cached and used for subsequent requests; if the token has expired then +a new one is requested. There is a property `flink.connector.http.security.oidc.token.expiry.reduction`, that defaults to 1 second; new tokens will +be requested if the current time is later than the cached token expiry time minus `flink.connector.http.security.oidc.token.expiry.reduction`. + +### Restrictions at this time +* No authentication is applied to the token request. +* The processing does not use the refresh token if it present. + {{< top >}} diff --git a/docs/content/docs/table/http.md b/docs/content/docs/table/http.md new file mode 100644 index 00000000..630c0f2d --- /dev/null +++ b/docs/content/docs/table/http.md @@ -0,0 +1,545 @@ + +--- +title: HTTP +weight: 3 +type: docs +aliases: +- /dev/table/connectors/http.html +--- + + +# HTTP Connector + +{{< label "Sink: Streaming Append Mode" >}} +{{< label "Lookup Source: Sync Mode" >}} +{{< label "Lookup Source: Async Mode" >}} +{{< label "Sink: Batch" >}} + + +The HTTP connector allows for pulling data from external system via HTTP methods and HTTP Sink that allows for sending data to external system via HTTP requests. + +The HTTP source connector supports [Lookup Joins](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/sourcessinks/#lookup-table-source) in [Table API and SQL](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/overview/). + + +* [HTTP Connector](#http-connector) + * [Dependencies](#dependencies) + * [Working with HTTP lookup source tables](#working-with-http-lookup-source-tables) + * [HTTP Lookup Table API and SQL Source example](#http-lookup-table-api-and-sql-source-example) + * [Using a HTTP Lookup Source in a lookup join](#using-a-http-lookup-source-in-a-lookup-join) + * [Lookup Source Connector Options](#lookup-source-connector-options) + * [Query Creators](#query-creators) + * [generic-json-url Query Creator](#generic-json-url-query-creator) + * [generic-json-url Query Creator](#generic-json-url-query-creator-1) + * [Http headers](#http-headers) + * [Timeouts](#timeouts) + * [Source table HTTP status code](#source-table-http-status-code) + * [Retries (Lookup source)](#retries-lookup-source) + * [Retry strategy](#retry-strategy) + * [Lookup multiple results](#lookup-multiple-results) + * [Working with HTTP sink tables](#working-with-http-sink-tables) + * [HTTP Sink](#http-sink) + * [Sink Connector Options](#sink-connector-options) + * [Sink table HTTP status codes](#sink-table-http-status-codes) + * [Request submission](#request-submission) + * [Batch submission mode](#batch-submission-mode) + * [Single submission mode](#single-submission-mode) + * [Available Metadata](#available-metadata) + * [HTTP status code handler](#http-status-code-handler) + * [Security considerations](#security-considerations) + * [TLS (more secure replacement for SSL) and mTLS support](#tls-more-secure-replacement-for-ssl-and-mtls-support) + * [Basic Authentication](#basic-authentication) + * [OIDC Bearer Authentication](#oidc-bearer-authentication) + * [Restrictions at this time](#restrictions-at-this-time) + +## Dependencies + +{{< sql_connector_download_table "http" >}} + +The HTTP connector is not part of the binary distribution. +See how to link with it for cluster execution [here]({{< ref "docs/dev/configuration/overview" >}}). + +## Migration from GetInData HTTP connector + +The GetInData HTTP connector was donated to Flink in [FLIP-532](https://cwiki.apache.org/confluence/display/FLINK/FLIP-532%3A+Donate+GetInData+HTTP+Connector+to+Flink). The Flink connector has the same capabilities as the original connector. +The Flink connector does have some changes that you need to be aware of if you are migrating from using the original connector: + +* Existing java applications will need to be recompiled to pick up the new flink package names. +* Existing application and SQL need to be amended to use the new connector option names. The new option names do not have +the _com.getindata.http_ prefix, the prefix is now _http_ prefix. + +## Working with HTTP lookup source tables + +### HTTP Lookup Table API and SQL Source example +Here is an example Flink SQL Enrichment Lookup Table definition: + +```roomsql +CREATE TABLE Customers ( + id STRING, + id2 STRING, + msg STRING, + uuid STRING, + details ROW< + isActive BOOLEAN, + nestedDetails ROW< + balance STRING + > + > +) WITH ( +'connector' = 'rest-lookup', +'format' = 'json', +'url' = 'http://localhost:8080/client', +'asyncPolling' = 'true' +) +``` + +### Using a HTTP Lookup Source in a lookup join + +To easy see how the lookup enrichment works, we can define a data source using datagen: +```roomsql +CREATE TABLE Orders ( + id STRING, + id2 STRING, + proc_time AS PROCTIME() +) WITH ( +'connector' = 'datagen', +'rows-per-second' = '1', +'fields.id.kind' = 'sequence', +'fields.id.start' = '1', +'fields.id.end' = '120', +'fields.id2.kind' = 'sequence', +'fields.id2.start' = '2', +'fields.id2.end' = '120' +); +``` + +Then we can enrich the _Orders_ table with the _Customers_ HTTP table with the following SQL: + +```roomsql +SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o +JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c ON o.id = c.id AND o.id2 = c.id2 +``` + +The columns and their values used for JOIN `ON` condition will be used as HTTP GET parameters where the column name will be used as a request parameter name. + +For Example: +`` +http://localhost:8080/client/service?id=1&uuid=2 +`` + +Or for REST POST method they will be converted to Json and used as request body. In this case, json request body will look like this: +```json +{ + "id": "1", + "uuid": "2" +} +``` + +### Lookup Source Connector Options + +Note the options with the prefix _http_ are the HTTP connector specific options, the others are Flink options. + +| Option | Required | Description/Value | +|:-----------------------------------------------------------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| connector | required | The Value should be set to _rest-lookup_ | +| format | required | Flink's format name that should be used to decode REST response, Use `json` for a typical REST endpoint. | +| url | required | The base URL that should be use for GET requests. For example _http://localhost:8080/client_ | +| asyncPolling | optional | true/false - determines whether Async Polling should be used. Mechanism is based on Flink's Async I/O. | +| lookup-method | optional | GET/POST/PUT (and any other) - determines what REST method should be used for lookup REST query. If not specified, `GET` method will be used. | +| lookup.cache | optional | Enum possible values: `NONE`, `PARTIAL`. The cache strategy for the lookup table. Currently supports `NONE` (no caching) and `PARTIAL` (caching entries on lookup operation in external API). | +| lookup.partial-cache.max-rows | optional | The max number of rows of lookup cache, over this value, the oldest rows will be expired. `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | +| lookup.partial-cache.expire-after-write | optional | The max time to live for each rows in lookup cache after writing into the cache. Specify as a [Duration](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#duration). `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | +| lookup.partial-cache.expire-after-access | optional | The max time to live for each rows in lookup cache after accessing the entry in the cache. Specify as a [Duration](https://nightlies.apache.org/flink/flink-docs-release-1.19/docs/deployment/config/#duration). `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | +| lookup.partial-cache.cache-missing-key | optional | This is a boolean that defaults to true. Whether to store an empty value into the cache if the lookup key doesn't match any rows in the table. `lookup.cache` must be set to `PARTIAL` to use this option. See the following Lookup Cache section for more details. | +| lookup.max-retries | optional | The max retry times if the lookup failed; default is 3. See the following Lookup Cache section for more detail. Set value 0 to disable retries. | +| lookup.error.code | optional | List of HTTP status codes that should be treated as errors by HTTP Source, separated with comma. | +| lookup.error.code.exclude | optional | List of HTTP status codes that should be excluded from the `http.lookup.error.code` list, separated with comma. | +| http.security.cert.server | optional | Comma separated paths to trusted HTTP server certificates that should be added to the connectors trust store. | +| http.security.cert.client | optional | Path to trusted certificate that should be used by connector's HTTP client for mTLS communication. | +| http.security.key.client | optional | Path to trusted private key that should be used by connector's HTTP client for mTLS communication. | +| http.security.cert.server.allowSelfSigned | optional | Accept untrusted certificates for TLS communication. | +| http.security.oidc.token.request | optional | OIDC `Token Request` body in `application/x-www-form-urlencoded` encoding | +| http.security.oidc.token.endpoint.url | optional | OIDC `Token Endpoint` url, to which the token request will be issued | +| http.security.oidc.token.expiry.reduction | optional | OIDC tokens will be requested if the current time is later than the cached token expiry time minus this value. | +| http.source.lookup.request.timeout | optional | Sets HTTP request timeout in seconds. If not specified, the default value of 30 seconds will be used. | +| http.source.lookup.request.thread-pool.size | optional | Sets the size of pool thread for HTTP lookup request processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 8 threads will be used. | +| http.source.lookup.response.thread-pool.size | optional | Sets the size of pool thread for HTTP lookup response processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 4 threads will be used. | +| http.source.lookup.use-raw-authorization-header | optional | If set to `'true'`, uses the raw value set for the `Authorization` header, without transformation for Basic Authentication (base64, addition of "Basic " prefix). If not specified, defaults to `'false'`. | +| http.source.lookup.request-callback | optional | Specify which `HttpLookupPostRequestCallback` implementation to use. By default, it is set to `slf4j-lookup-logger` corresponding to `Slf4jHttpLookupPostRequestCallback`. | +| http.source.lookup.connection.timeout | optional | Source table connection timeout. Default - no value. | +| http.source.lookup.success-codes | optional | Comma separated http codes considered as success response. Use [1-5]XX for groups and '!' character for excluding. | +| http.source.lookup.retry-codes | optional | Comma separated http codes considered as transient errors. Use [1-5]XX for groups and '!' character for excluding. | +| http.source.lookup.ignored-response-codes | optional | Comma separated http codes. Content for these responses will be ignored. Use [1-5]XX for groups and '!' character for excluding. Ignored responses togater with `http.source.lookup.success-codes` are considered as successful. | +| http.source.lookup.retry-strategy.type | optional | Auto retry strategy type: fixed-delay (default) or exponential-delay. | +| http.source.lookup.retry-strategy.fixed-delay.delay | optional | Fixed-delay interval between retries. Default 1 second. Use with`lookup.max-retries` parameter. | +| http.source.lookup.retry-strategy.exponential-delay.initial-backoff | optional | Exponential-delay initial delay. Default 1 second. | +| http.source.lookup.retry-strategy.exponential-delay.max-backoff | optional | Exponential-delay maximum delay. Default 1 minute. Use with `lookup.max-retries` parameter. | +| http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier | optional | Exponential-delay multiplier. Default value 1.5 | +| http.source.lookup.proxy.host | optional | Specify the hostname of the proxy. | +| http.source.lookup.proxy.port | optional | Specify the port of the proxy. | +| http.source.lookup.proxy.username | optional | Specify the username used for proxy authentication. | +| http.source.lookup.proxy.password | optional | Specify the password used for proxy authentication. | +| http.request.query-param-fields | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The names of the fields that will be mapped to query parameters. The parameters are separated by semicolons, such as `param1;param2`. | +| http.request.body-fields | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The names of the fields that will be mapped to the body. The parameters are separated by semicolons, such as `param1;param2`. | | +| http.request.url-map | optional | Used for the `GenericJsonAndUrlQueryCreator` query creator. The map of insert names to column names used as url segments. Parses a string as a map of strings. For example if there are table columns called `customerId` and `orderId`, then specifying value `customerId:cid1,orderID:oid` and a url of https://myendpoint/customers/{cid}/orders/{oid} will mean that the url used for the lookup query will dynamically pickup the values for `customerId`, `orderId` and use them in the url. The expected format of the map is: `key1:value1,key2:value2`. | + +### Query Creators + +In the above example we see that HTTP GET operations and HTTP POST operations result in different mapping of the columns to the +HTTP request content. In reality, you will want to have move control over how the SQL columns are mapped to the HTTP content. +The HTTP connector supplies a number of Query Creators that you can use define these mappings. + + + + + + + + + + + + + + + + + + + + + + + + +
NameQuery param mappingURL path mappingBody mapping
generic-json-url
✓✓✓
generic-get-query
<✓ for GETs/td> + ✓ for PUTs and POSTs
+ +### generic-json-url Query Creator + +The recommended Query creator for json is called _generic-json-url_, which allows column content to be mapped as URL, path, body and query parameter request values; it supports +POST, PUT and GET operations. This query creator allows you to issue json requests without needing to code +your own custom http connector. The mappings from columns to the json request are supplied in the query creator configuration +parameters `http.request.query-param-fields`, `http.request.body-fields` and `http.request.url-map`. + +### generic-json-url Query Creator + +The default Query Creator is called _generic-json-url_. For body based queries such as POST/PUT requests, the +([GenericGetQueryCreator](flink-connector-http/src/main/java/org/apache/flink/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java))is provided as a default query creator. This implementation uses Flink's [json-format](https://nightlies.apache.org/flink/flink-docs-master/docs/connectors/table/formats/json/) to convert RowData object into Json String. +For GET requests can be used for query parameter based queries. + +The _generic-json-url_ allows for using custom formats that will perform serialization to Json. Thanks to this, users can create their own logic for converting RowData to Json Strings suitable for their HTTP endpoints and use this logic as custom format +with HTTP Lookup connector and SQL queries. +To create a custom format user has to implement Flink's `SerializationSchema` and `SerializationFormatFactory` interfaces and register custom format factory along other factories in +`resources/META-INF.services/org.apache.flink.table.factories.Factory` file. This is common Flink mechanism for providing custom implementations for various factories. + +### Http headers +It is possible to set HTTP headers that will be added to HTTP request send by lookup source connector. +Headers are defined via property key `http.source.lookup.header.HEADER_NAME = header value` for example: +`http.source.lookup.header.X-Content-Type-Options = nosniff`. + +Headers can be set using http lookup source table DDL. In example below, HTTP request done for `http-lookup` table will contain three headers: +- `Origin` +- `X-Content-Type-Options` +- `Content-Type` + +```roomsql +CREATE TABLE http-lookup ( + id bigint, + some_field string +) WITH ( + 'connector' = 'rest-lookup', + 'format' = 'json', + 'url' = 'http://localhost:8080/client', + 'asyncPolling' = 'true', + 'http.source.lookup.header.Origin' = '*', + 'http.source.lookup.header.X-Content-Type-Options' = 'nosniff', + 'http.source.lookup.header.Content-Type' = 'application/json' +) +``` + +Note that when using OIDC, it adds an `Authentication` header with the bearer token; this will override +an existing `Authorization` header specified in configuration. + +### Timeouts +Lookup Source is guarded by two timeout timers. First one is specified by Flink's AsyncIO operator that executes `AsyncTableFunction`. +The default value of this timer is set to 3 minutes and can be changed via `table.exec.async-lookup.timeout` [option](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/table/config/#table-exec-async-lookup-timeout). + +The second one is set per individual HTTP requests by HTTP client. Its default value is set currently to 30 seconds and can be changed via `http.source.lookup.request.timeout` option. + +Flink's current implementation of `AsyncTableFunction` does not allow specifying custom logic for handling Flink AsyncIO timeouts as it is for Java API. +Because of that, if AsyncIO timer passes, Flink will throw TimeoutException which will cause job restart. + +### Source table HTTP status code +The source table categorizes HTTP responses into three groups based on status codes: +- Retry codes (`http.source.lookup.retry-codes`): + Responses in this group indicate a temporary issue (it can be e.g., HTTP 503 Service Unavailable). When such a response is received, the request should be retried. +- Success codes (`http.source.lookup.success-codes`): + These are expected responses that should be processed by table function. +- Ignored responses (`http.source.lookup.ignored-response-codes`): + Successful response, but its content will be ignored. For example, an HTTP 404 Not Found response is valid and indicates that the requested item does not exist, so its content can be ignored. +- Error codes: + Any response code that is not classified as a retry or success code falls into this category. Receiving such a response will result in a job failure. + + +### Retries (Lookup source) +Lookup source handles auto-retries for two scenarios: +1. IOException occurs (e.g. temporary network outage) +2. The response contains a HTTP error code that indicates a retriable error. These codes are defined in the table configuration (see `http.source.lookup.retry-codes`). + Retries are executed silently, without restarting the job. After reaching max retries attempts (per request) operation will fail and restart job. + +Notice that HTTP codes are categorized into into 3 groups: +- successful responses - response is returned immediately for further processing +- temporary errors - request will be retried up to the retry limit +- error responses - unexpected responses are not retried and will fail the job. Any HTTP error code which is not configured as successful or temporary error is treated as an unretriable error. + +##### Retry strategy +User can choose retry strategy type for source table: +- fixed-delay - http request will be re-sent after specified delay. +- exponential-delay - request will be re-sent with exponential backoff strategy, limited by `lookup.max-retries` attempts. The delay for each retry is calculated as the previous attempt's delay multiplied by the backoff multiplier (parameter `http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier`) up to `http.source.lookup.retry-strategy.exponential-delay.max-backoff`. The initial delay value is defined in the table configuration as `http.source.lookup.retry-strategy.exponential-delay.initial-backoff`. + + +#### Lookup multiple results + +Typically, join can return zero, one or more results. What is more, there are lots of possible REST API designs and +pagination methods. Currently, the connector supports only two simple approaches (`http.source.lookup.result-type`): + +- `single-value` - REST API returns single object. +- `array` - REST API returns array of objects. Pagination is not supported yet. + +## Working with HTTP sink tables + +### HTTP Sink +The following example shows the minimum Table API example to create a sink: + +```roomsql +CREATE TABLE http ( + id bigint, + some_field string +) WITH ( + 'connector' = 'http-sink', + 'url' = 'http://example.com/myendpoint', + 'format' = 'json' +) +``` + +Then use `INSERT` SQL statement to send data to your HTTP endpoint: + +```roomsql +INSERT INTO http VALUES (1, 'Ninette'), (2, 'Hedy') +``` + +When `'format' = 'json'` is specified on the table definition, the HTTP sink sends json payloads. It is possible to change the format of the payload by specifying +another format name. + +### Sink Connector Options + +| Option | Required | Description/Value | +|-------------------------------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| connector | required | Specify what connector to use. For HTTP Sink it should be set to _'http-sink'_. | +| format | required | Specify what format to use. | +| url | required | The base URL that should be use for HTTP requests. For example _http://localhost:8080/client_. | +| insert-method | optional | Specify which HTTP method to use in the request. The value should be set either to `POST` or `PUT`. | +| sink.batch.max-size | optional | Maximum number of elements that may be passed in a batch to be written downstream. | +| sink.requests.max-inflight | optional | The maximum number of in flight requests that may exist, if any more in flight requests need to be initiated once the maximum has been reached, then it will be blocked until some have completed. | +| sink.requests.max-buffered | optional | Maximum number of buffered records before applying backpressure. | +| sink.flush-buffer.size | optional | The maximum size of a batch of entries that may be sent to the HTTP endpoint measured in bytes. | +| sink.flush-buffer.timeout | optional | Threshold time in milliseconds for an element to be in a buffer before being flushed. | +| http.sink.request-callback | optional | Specify which `HttpPostRequestCallback` implementation to use. By default, it is set to `slf4j-logger` corresponding to `Slf4jHttpPostRequestCallback`. | +| http.sink.error.code | optional | List of HTTP status codes that should be treated as errors by HTTP Sink, separated with comma. | +| http.sink.error.code.exclude | optional | List of HTTP status codes that should be excluded from the `http.sink.error.code` list, separated with comma. | +| http.security.cert.server | optional | Path to trusted HTTP server certificate that should be add to connectors key store. More than one path can be specified using `,` as path delimiter. | +| http.security.cert.client | optional | Path to trusted certificate that should be used by connector's HTTP client for mTLS communication. | +| http.security.key.client | optional | Path to trusted private key that should be used by connector's HTTP client for mTLS communication. | +| http.security.cert.server.allowSelfSigned | optional | Accept untrusted certificates for TLS communication. | +| http.sink.request.timeout | optional | Sets HTTP request timeout in seconds. If not specified, the default value of 30 seconds will be used. | +| http.sink.writer.thread-pool.size | optional | Sets the size of pool thread for HTTP Sink request processing. Increasing this value would mean that more concurrent requests can be processed in the same time. If not specified, the default value of 1 thread will be used. | +| http.sink.writer.request.mode | optional | Sets Http Sink request submission mode. Two modes are available to select, `single` and `batch` which is the default mode if option is not specified. | +| http.sink.request.batch.size | optional | Applicable only for `http.sink.writer.request.mode = batch`. Sets number of individual events/requests that will be submitted as one HTTP request by HTTP sink. The default value is 500 which is same as HTTP Sink `maxBatchSize` | + +### Sink table HTTP status codes +You can configure a list of HTTP status codes that should be treated as errors for HTTP sink table. +By default all 400 and 500 response codes will be interpreted as error code. + +This behavior can be changed by using below properties in table definition. The property name are: +- `http.sink.error.code` used to defined HTTP status code value that should be treated as error for example 404. + Many status codes can be defined in one value, where each code should be separated with comma, for example: + `401, 402, 403`. User can use this property also to define a type code mask. In that case, all codes from given HTTP response type will be treated as errors. + An example of such a mask would be `3XX, 4XX, 5XX`. In this case, all 300s, 400s and 500s status codes will be treated as errors. +- `http.sink.error.code.exclude` used to exclude a HTTP code from error list. + Many status codes can be defined in one value, where each code should be separated with comma, for example: + `401, 402, 403`. In this example, codes 401, 402 and 403 would not be interpreted as error codes. + + +### Request submission +HTTP Sink by default submits events in batch. The submission mode can be changed using `http.sink.writer.request.mode` property using `single` or `batch` as property value. + +### Batch submission mode +In batch mode, a number of events (processed elements) will be batched and submitted in one HTTP request. +In this mode, HTTP PUT/POST request's body contains a Json array, where every element of this array represents +individual event. + +An example of Http Sink batch request body containing data for three events: +```json +[ + { + "id": 1, + "first_name": "Ninette", + "last_name": "Clee", + "gender": "Female", + "stock": "CDZI", + "currency": "RUB", + "tx_date": "2021-08-24 15:22:59" + }, + { + "id": 2, + "first_name": "Rob", + "last_name": "Zombie", + "gender": "Male", + "stock": "DGICA", + "currency": "GBP", + "tx_date": "2021-10-25 20:53:54" + }, + { + "id": 3, + "first_name": "Adam", + "last_name": "Jones", + "gender": "Male", + "stock": "DGICA", + "currency": "PLN", + "tx_date": "2021-10-26 20:53:54" + } +] +``` + +By default, batch size is set to 500 which is the same as Http Sink's `maxBatchSize` property and has value of 500. +The `maxBatchSize' property sets maximal number of events that will by buffered by Flink runtime before passing it to Http Sink for processing. + +```roomsql +CREATE TABLE http ( + id bigint, + some_field string +) WITH ( + 'connector' = 'http-sink', + 'url' = 'http://example.com/myendpoint', + 'format' = 'json', + 'http.sink.request.batch.size' = '50' +) +``` + +### Single submission mode +In this mode every processed event is submitted as individual HTTP POST/PUT request. + +SQL: +```roomsql +CREATE TABLE http ( + id bigint, + some_field string +) WITH ( + 'connector' = 'http-sink', + 'url' = 'http://example.com/myendpoint', + 'format' = 'json', + 'http.sink.writer.request.mode' = 'single' +) +``` + +## Available Metadata + +The is no available metadata for this connector. + +## HTTP status code handler + +Above parameters support include lists and exclude lists. A sample configuration may look like this: +`2XX,404,!203` - meaning all codes from group 2XX (200-299), with 404 and without 203 ('!' character). Group exclude listing e.g. !2XX is not supported. + +The same format is used in parameter `http.source.lookup.retry-codes`. + +Example with explanation: +```roomsql +CREATE TABLE [...] +WITH ( + [...], + 'http.source.lookup.success-codes' = '2XX', + 'http.source.lookup.retry-codes' = '5XX,!501,!505,!506', + 'http.source.lookup.ignored-response-codes' = '404' +) +``` +All 200s codes and 404 are considered as successful (`success-codes`, `ignored-response-codes`). These responses won't cause retry or job failure. 404 response is listed in `ignored-response-codes` parameter, what means content body will be ignored. Http with 404 code will produce just empty record. +When server returns response with 500s code except 501, 505 and 506 then connector will re-send request based on configuration in `http.source.lookup.retry-strategy` parameters. By default it's fixed-delay with 1 second delay, up to 3 times per request (parameter `lookup.max-retries`). After exceeding max-retries limit the job will fail. +A response with any other code than specified in params `success-codes` and `retry-codes` e.g. 400, 505, 301 will cause job failure. + + +```roomsql +CREATE TABLE [...] +WITH ( + [...], + 'http.source.lookup.success-codes' = '2XX', + 'http.source.lookup.retry-codes' = '', + 'http.source.lookup.ignored-response-codes' = '1XX,3XX,4XX,5XX' +) +``` +In this configuration, all HTTP responses are considered successful because the sets `success-codes` and `ignored-response-codes` together cover all possible status codes. As a result, no retries will be triggered based on HTTP response codes. However, only responses with status code 200 will be parsed and processed by the Flink operator. Responses with status codes in the 1xx, 3xx, 4xx, and 5xx ranges are classified under `ignored-response-codes`. +Note that retries remain enabled and will still occur on IOException. +To disable retries, set `'lookup.max-retries' = '0'`. + +## Security considerations + +### TLS (more secure replacement for SSL) and mTLS support + +Both Http Sink and Lookup Source connectors support HTTPS communication using TLS 1.2 and mTLS. +To enable Https communication simply use `https` protocol in endpoint's URL. + +To specify certificate(s) to be used by the server, use `http.security.cert.server` connector property; +the value is a comma separated list of paths to certificate(s), for example you can use your organization's CA +Root certificate, or a self-signed certificate. + +Note that if there are no security properties for a `https` url then, the JVMs default certificates are +used - allowing use of globally recognized CAs without the need for configuration. + +You can also configure the connector to use mTLS. For this simply use `http.security.cert.client` +and `http.security.key.client` connector properties to specify paths to the certificate and +private key. The key MUST be in `PCKS8` format. Both PEM and DER keys are +allowed. + +For non production environments it is sometimes necessary to use Https connection and accept all certificates. +In this special case, you can configure connector to trust all certificates without adding them to keystore. +To enable this option use `http.security.cert.server.allowSelfSigned` property setting its value to `true`. + +### Basic Authentication +The connector supports Basic Authentication using a HTTP `Authorization` header. +The header value can be set via properties, similarly as for other headers. The connector converts the passed value to Base64 and uses it for the request. +If the used value starts with the prefix `Basic`, or `http.source.lookup.use-raw-authorization-header` +is set to `'true'`, it will be used as header value as is, without any extra modification. + +### OIDC Bearer Authentication +The connector supports Bearer Authentication using a HTTP `Authorization` header. The [OAuth 2.0 rcf](https://datatracker.ietf.org/doc/html/rfc6749) mentions [Obtaining Authorization](https://datatracker.ietf.org/doc/html/rfc6749#section-4) +and an authorization grant. OIDC makes use of this [authorisation grant](https://datatracker.ietf.org/doc/html/rfc6749#section-1.3) in a [Token Request](https://openid.net/specs/openid-connect-core-1_0.html#TokenRequest) by including a [OAuth grant type](https://oauth.net/2/grant-types/) and associated properties, the response is the [token response](https://openid.net/specs/openid-connect-core-1_0.html#TokenResponse). + +If you want to use this authorization then you should supply the `Token Request` body in `application/x-www-form-urlencoded` encoding +in configuration property `http.security.oidc.token.request`. See [grant extension](https://datatracker.ietf.org/doc/html/rfc6749#section-4.5) for +an example of a customised grant type token request. The supplied `token request` will be issued to the +[token end point](https://datatracker.ietf.org/doc/html/rfc6749#section-3.2), whose url should be supplied in configuration property +`http.security.oidc.token.endpoint.url`. The returned `access token` is then cached and used for subsequent requests; if the token has expired then +a new one is requested. There is a property `http.security.oidc.token.expiry.reduction`, that defaults to 1 second; new tokens will +be requested if the current time is later than the cached token expiry time minus `http.security.oidc.token.expiry.reduction`. + +#### Restrictions at this time +* No authentication is applied to the token request. +* The processing does not use the refresh token if it present. + {{< top >}} diff --git a/flink-connector-http/pom.xml b/flink-connector-http/pom.xml new file mode 100644 index 00000000..3589cd9c --- /dev/null +++ b/flink-connector-http/pom.xml @@ -0,0 +1,487 @@ + + + + + 4.0.0 + + org.apache.flink + flink-connector-http-parent + 1.0-SNAPSHOT + + + flink-connector-http + Flink : Connectors : http + 1.0-SNAPSHOT + + jar + + + --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED + --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED + + + + https://github.com/apache/flink-connector-http + + + + The Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + + + 2025 + + + https://github.com/apache/flink-connector-http + git@github.com:apache/flink-connector-http.git + + scm:git:https://gitbox.apache.org/repos/asf/flink-connector-http.git + + + + + + ossrh + https://s01.oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + + apache.snapshots + Apache Development Snapshot Repository + https://repository.apache.org/content/repositories/snapshots/ + + false + + + true + + + + + + + + + org.apache.flink + flink-java + provided + + + + org.apache.flink + flink-annotations + provided + + + + org.apache.flink + flink-metrics-core + provided + + + + org.apache.flink + flink-core + provided + + + org.apache.flink + flink-shaded-jackson + + + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + org.apache.flink + flink-table-runtime + provided + + + + org.apache.flink + flink-json + provided + + + + org.apache.flink + flink-core-api + provided + + + + org.apache.flink + flink-shaded-jackson + ${flink.shaded.jackson.version}-${flink.shaded.version} + provided + + + + + + + org.apache.flink + flink-table-api-java-bridge + provided + true + + + + org.apache.flink + flink-table-api-java + provided + true + + + + org.apache.flink + flink-table-common + provided + true + + + + org.projectlombok + lombok + ${lombok.version} + provided + + + + com.google.code.findbugs + jsr305 + 1.3.9 + provided + + + + io.vavr + vavr + 0.10.2 + compile + + + + io.github.resilience4j + resilience4j-retry + ${resilence4j.version} + + + + io.github.resilience4j + resilience4j-core + ${resilence4j.version} + + + + + org.apache.flink + flink-connector-base + provided + + + + org.apache.flink + flink-connector-base + test-jar + test + + + + net.bytebuddy + byte-buddy + test + + + + org.mockito + mockito-junit-jupiter + ${mockito.version} + test + + + + org.mockito + mockito-core + ${mockito.version} + test + + + + org.mockito + mockito-inline + ${mockito-inline.version} + test + + + org.apache.flink + flink-clients + + + org.apache.flink + flink-shaded-jackson + + + test + + + + org.apache.flink + flink-table-common + test-jar + test + + + + + org.apache.flink + flink-test-utils-junit + test + + + + org.apache.flink + flink-streaming-java + provided + + + org.apache.flink + flink-shaded-jackson + + + + + + org.assertj + assertj-core + ${assertj.core.version} + test + + + + org.junit.jupiter + junit-jupiter-api + ${junit5.version} + test + + + + org.junit.jupiter + junit-jupiter-params + ${junit5.version} + test + + + + org.apache.flink + flink-table-planner_${scala.binary.version} + test + + + -org.scala-lang + scala-library + + + + + + net.minidev + json-smart + 2.5.2 + test + + + + com.github.tomakehurst + wiremock + ${wiremock.version} + test + + + + com.google.guava + guava + 32.0.1-jre + test + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 + + + + false + + 0${surefire.forkNumber} + + $${surefire.forkNumber} + US + en + ${project.basedir} + true + + -Xms256m -Xmx2048m -XX:+UseG1GC + + + + + default-test + test + + test + + + + ${test.unit.pattern} + + + ${additionalExcludes} + + + 1 + ${flink.surefire.baseArgLine} -Xmx${flink.XmxUnitTest} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + + io.github.zentol.japicmp + japicmp-maven-plugin + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + + com.diffplug.spotless + spotless-maven-plugin + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.apache.maven.plugins + maven-shade-plugin + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.7.1 + + + analyze-deps + + analyze + + verify + + true + true + + + org.mockito:mockito-inline:jar:5.2.0 + net.bytebuddy:byte-buddy:jar:1.14.17 + com.google.guava:guava:jar:32.0.1-jre + org.apache.flink:flink-clients:jar:1.20.0 + org.apache.flink:flink-table-planner_${scala.binary.version} + + org.apache.flink:flink-table-test-utils:jar:1.20.0 + + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-banned-deps + + enforce + + + + + + com.google.guava:guava + + org.powermock + + + com.google.guava:guava:*:*:test + + + + true + + + + + + + \ No newline at end of file diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallback.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallback.java new file mode 100644 index 00000000..fc73ec9a --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallback.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import java.io.Serializable; +import java.net.http.HttpResponse; +import java.util.Map; + +/** + * An interface for post request callback action, processing a response and its respective request. + * + *

One can customize the behaviour of such a callback by implementing both {@link + * HttpPostRequestCallback} and {@link HttpPostRequestCallbackFactory}. + * + * @param type of the HTTP request wrapper + */ +public interface HttpPostRequestCallback extends Serializable { + /** + * Process HTTP request and the matching response. + * + * @param response HTTP response + * @param requestEntry request's payload + * @param endpointUrl the URL of the endpoint + * @param headerMap mapping of header names to header values + */ + void call( + HttpResponse response, + RequestT requestEntry, + String endpointUrl, + Map headerMap); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactory.java new file mode 100644 index 00000000..155053c6 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactory.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.table.lookup.HttpLookupTableSource; +import org.apache.flink.connector.http.table.sink.HttpDynamicSink; +import org.apache.flink.table.factories.Factory; + +/** + * The {@link Factory} that dynamically creates and injects {@link HttpPostRequestCallback} to + * {@link HttpDynamicSink} and {@link HttpLookupTableSource}. + * + *

Custom implementations of {@link HttpPostRequestCallbackFactory} can be registered along other + * factories in + * + *

resources/META-INF/services/org.apache.flink.table.factories.Factory
+ * + *

file and then referenced by their identifiers in: + * + *

    + *
  • The HttpSink DDL property field http.sink.request-callback for HTTP sink. + *
  • The Http lookup DDL property field http.source.lookup.request-callback for HTTP + * lookup. + *
+ * + *
+ * + *

The following example shows the minimum Table API example to create a {@link HttpDynamicSink} + * that uses a custom callback created by a factory that returns my-callback as its + * identifier. + * + *

{@code
+ * CREATE TABLE http (
+ *   id bigint,
+ *   some_field string
+ * ) with (
+ *   'connector' = 'http-sink'
+ *   'url' = 'http://example.com/myendpoint'
+ *   'format' = 'json',
+ *   'http.sink.request-callback' = 'my-callback'
+ * )
+ * }
+ * + *

The following example shows the minimum Table API example to create a {@link + * HttpLookupTableSource} that uses a custom callback created by a factory that returns + * my-callback as its identifier. + * + *

{@code
+ * CREATE TABLE httplookup (
+ *   id bigint
+ * ) with (
+ *   'connector' = 'rest-lookup',
+ *   'url' = 'http://example.com/myendpoint',
+ *   'format' = 'json',
+ *   'http.source.lookup.request-callback' = 'my-callback'
+ * )
+ * }
+ * + * @param type of the HTTP request wrapper + */ +public interface HttpPostRequestCallbackFactory extends Factory { + /** @return {@link HttpPostRequestCallback} custom request callback instance */ + HttpPostRequestCallback createHttpPostRequestCallback(); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSink.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSink.java new file mode 100644 index 00000000..3afbee45 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSink.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.annotation.PublicEvolving; +import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.clients.SinkHttpClientBuilder; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.HttpSinkInternal; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; + +import java.util.Properties; + +/** + * A public implementation for {@code HttpSink} that performs async requests against a specified + * HTTP endpoint using the buffering protocol specified in {@link + * org.apache.flink.connector.base.sink.AsyncSinkBase}. + * + *

To create a new instance of this class use {@link HttpSinkBuilder}. An example would be: + * + *

{@code
+ * HttpSink httpSink =
+ *     HttpSink.builder()
+ *             .setEndpointUrl("http://example.com/myendpoint")
+ *             .setElementConverter(
+ *                 (s, _context) -> new HttpSinkRequestEntry("POST", "text/plain",
+ *                 s.getBytes(StandardCharsets.UTF_8)))
+ *             .build();
+ * }
+ * + * @param type of the elements that should be sent through HTTP request. + */ +@PublicEvolving +public class HttpSink extends HttpSinkInternal { + + HttpSink( + ElementConverter elementConverter, + int maxBatchSize, + int maxInFlightRequests, + int maxBufferedRequests, + long maxBatchSizeInBytes, + long maxTimeInBufferMS, + long maxRecordSizeInBytes, + String endpointUrl, + HttpPostRequestCallback httpPostRequestCallback, + HeaderPreprocessor headerPreprocessor, + SinkHttpClientBuilder sinkHttpClientBuilder, + Properties properties) { + + super( + elementConverter, + maxBatchSize, + maxInFlightRequests, + maxBufferedRequests, + maxBatchSizeInBytes, + maxTimeInBufferMS, + maxRecordSizeInBytes, + endpointUrl, + httpPostRequestCallback, + headerPreprocessor, + sinkHttpClientBuilder, + properties); + } + + /** + * Create a {@link HttpSinkBuilder} constructing a new {@link HttpSink}. + * + * @param type of the elements that should be sent through HTTP request + * @return {@link HttpSinkBuilder} + */ + public static HttpSinkBuilder builder() { + return new HttpSinkBuilder<>(); + } +} diff --git a/src/main/java/com/getindata/connectors/http/HttpSinkBuilder.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSinkBuilder.java similarity index 60% rename from src/main/java/com/getindata/connectors/http/HttpSinkBuilder.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSinkBuilder.java index 208bcf01..16b6e8cd 100644 --- a/src/main/java/com/getindata/connectors/http/HttpSinkBuilder.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpSinkBuilder.java @@ -1,20 +1,36 @@ -package com.getindata.connectors.http; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Optional; -import java.util.Properties; +package org.apache.flink.connector.http; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.connector.base.sink.AsyncSinkBaseBuilder; import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.clients.SinkHttpClient; +import org.apache.flink.connector.http.clients.SinkHttpClientBuilder; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.sink.httpclient.JavaNetSinkHttpClient; +import org.apache.flink.connector.http.table.sink.Slf4jHttpPostRequestCallback; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.SinkHttpClient; -import com.getindata.connectors.http.internal.SinkHttpClientBuilder; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.sink.httpclient.JavaNetSinkHttpClient; -import com.getindata.connectors.http.internal.table.sink.Slf4jHttpPostRequestCallback; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; +import java.util.Optional; +import java.util.Properties; /** * Builder to construct {@link HttpSink}. @@ -34,20 +50,22 @@ * } * *

If the following parameters are not set in this builder, the following defaults will be used: + * *

    - *
  • {@code maxBatchSize} will be 500,
  • - *
  • {@code maxInFlightRequests} will be 50,
  • - *
  • {@code maxBufferedRequests} will be 10000,
  • - *
  • {@code maxBatchSizeInBytes} will be 5 MB i.e. {@code 5 * 1024 * 1024},
  • - *
  • {@code maxTimeInBufferMS} will be 5000ms,
  • - *
  • {@code maxRecordSizeInBytes} will be 1 MB i.e. {@code 1024 * 1024}.
  • + *
  • {@code maxBatchSize} will be 500, + *
  • {@code maxInFlightRequests} will be 50, + *
  • {@code maxBufferedRequests} will be 10000, + *
  • {@code maxBatchSizeInBytes} will be 5 MB i.e. {@code 5 * 1024 * 1024}, + *
  • {@code maxTimeInBufferMS} will be 5000ms, + *
  • {@code maxRecordSizeInBytes} will be 1 MB i.e. {@code 1024 * 1024}. *
+ * * {@code endpointUrl} and {@code elementConverter} must be set by the user. * * @param type of the elements that should be sent through HTTP request. */ -public class HttpSinkBuilder extends - AsyncSinkBaseBuilder> { +public class HttpSinkBuilder + extends AsyncSinkBaseBuilder> { private static final int DEFAULT_MAX_BATCH_SIZE = 500; @@ -63,11 +81,11 @@ public class HttpSinkBuilder extends private static final SinkHttpClientBuilder DEFAULT_CLIENT_BUILDER = JavaNetSinkHttpClient::new; - private static final HttpPostRequestCallback - DEFAULT_POST_REQUEST_CALLBACK = new Slf4jHttpPostRequestCallback(); + private static final HttpPostRequestCallback DEFAULT_POST_REQUEST_CALLBACK = + new Slf4jHttpPostRequestCallback(); private static final HeaderPreprocessor DEFAULT_HEADER_PREPROCESSOR = - HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); + HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); private final Properties properties = new Properties(); @@ -103,11 +121,11 @@ public HttpSinkBuilder setEndpointUrl(String endpointUrl) { /** * @param sinkHttpClientBuilder builder for an implementation of {@link SinkHttpClient} that - * will be used by {@link HttpSink} + * will be used by {@link HttpSink} * @return {@link HttpSinkBuilder} itself */ public HttpSinkBuilder setSinkHttpClientBuilder( - SinkHttpClientBuilder sinkHttpClientBuilder) { + SinkHttpClientBuilder sinkHttpClientBuilder) { this.sinkHttpClientBuilder = sinkHttpClientBuilder; return this; } @@ -116,30 +134,30 @@ public HttpSinkBuilder setSinkHttpClientBuilder( * @param elementConverter the {@link ElementConverter} to be used for the sink * @return {@link HttpSinkBuilder} itself * @deprecated Converters set by this method might not work properly for Flink 1.16+. Use {@link - * #setElementConverter(SchemaLifecycleAwareElementConverter)} instead. + * #setElementConverter(SchemaLifecycleAwareElementConverter)} instead. */ @Deprecated @PublicEvolving public HttpSinkBuilder setElementConverter( - ElementConverter elementConverter) { + ElementConverter elementConverter) { this.elementConverter = elementConverter; return this; } /** * @param elementConverter the {@link SchemaLifecycleAwareElementConverter} to be used for the - * sink + * sink * @return {@link HttpSinkBuilder} itself */ @PublicEvolving public HttpSinkBuilder setElementConverter( - SchemaLifecycleAwareElementConverter elementConverter) { + SchemaLifecycleAwareElementConverter elementConverter) { this.elementConverter = elementConverter; return this; } public HttpSinkBuilder setHttpPostRequestCallback( - HttpPostRequestCallback httpPostRequestCallback) { + HttpPostRequestCallback httpPostRequestCallback) { this.httpPostRequestCallback = httpPostRequestCallback; return this; } @@ -152,6 +170,7 @@ public HttpSinkBuilder setHttpHeaderPreprocessor( /** * Set property for Http Sink. + * * @param propertyName property name * @param propertyValue property value * @return {@link HttpSinkBuilder} itself @@ -162,7 +181,8 @@ public HttpSinkBuilder setProperty(String propertyName, String propertyV } /** - * Add properties to Http Sink configuration + * Add properties to Http Sink configuration. + * * @param properties properties to add * @return {@link HttpSinkBuilder} itself */ @@ -174,18 +194,18 @@ public HttpSinkBuilder setProperties(Properties properties) { @Override public HttpSink build() { return new HttpSink<>( - elementConverter, - Optional.ofNullable(getMaxBatchSize()).orElse(DEFAULT_MAX_BATCH_SIZE), - Optional.ofNullable(getMaxInFlightRequests()).orElse(DEFAULT_MAX_IN_FLIGHT_REQUESTS), - Optional.ofNullable(getMaxBufferedRequests()).orElse(DEFAULT_MAX_BUFFERED_REQUESTS), - Optional.ofNullable(getMaxBatchSizeInBytes()).orElse(DEFAULT_MAX_BATCH_SIZE_IN_B), - Optional.ofNullable(getMaxTimeInBufferMS()).orElse(DEFAULT_MAX_TIME_IN_BUFFER_MS), - Optional.ofNullable(getMaxRecordSizeInBytes()).orElse(DEFAULT_MAX_RECORD_SIZE_IN_B), - endpointUrl, - httpPostRequestCallback, - headerPreprocessor, - sinkHttpClientBuilder, - properties - ); + elementConverter, + Optional.ofNullable(getMaxBatchSize()).orElse(DEFAULT_MAX_BATCH_SIZE), + Optional.ofNullable(getMaxInFlightRequests()) + .orElse(DEFAULT_MAX_IN_FLIGHT_REQUESTS), + Optional.ofNullable(getMaxBufferedRequests()).orElse(DEFAULT_MAX_BUFFERED_REQUESTS), + Optional.ofNullable(getMaxBatchSizeInBytes()).orElse(DEFAULT_MAX_BATCH_SIZE_IN_B), + Optional.ofNullable(getMaxTimeInBufferMS()).orElse(DEFAULT_MAX_TIME_IN_BUFFER_MS), + Optional.ofNullable(getMaxRecordSizeInBytes()).orElse(DEFAULT_MAX_RECORD_SIZE_IN_B), + endpointUrl, + httpPostRequestCallback, + headerPreprocessor, + sinkHttpClientBuilder, + properties); } } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpStatusCodeValidationFailedException.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpStatusCodeValidationFailedException.java new file mode 100644 index 00000000..b1acd45f --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/HttpStatusCodeValidationFailedException.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import lombok.Getter; + +import java.net.http.HttpResponse; + +/** Exception passing back a message and http response for error processing. */ +@Getter +public class HttpStatusCodeValidationFailedException extends Exception { + private final HttpResponse response; + + public HttpStatusCodeValidationFailedException(String message, HttpResponse response) { + super(message); + this.response = response; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupArg.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupArg.java new file mode 100644 index 00000000..b5a584d5 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupArg.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import lombok.Data; +import lombok.RequiredArgsConstructor; + +/** Transfer object that contains single lookup argument (column name) and its value. */ +@Data +@RequiredArgsConstructor +public class LookupArg { + + /** Lookup argument name. */ + private final String argName; + + /** Lookup argument value. */ + private final String argValue; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreator.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreator.java new file mode 100644 index 00000000..e31fe13e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreator.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.table.data.RowData; + +import java.io.Serializable; + +/** + * An interface for a creator of a lookup query in the Http Lookup Source (e.g., the query that gets + * appended as query parameters to the URI in GET request or supplied as the payload of a body-based + * request along with optional query parameters). + * + *

One can customize how those queries are built by implementing {@link LookupQueryCreator} and + * {@link LookupQueryCreatorFactory}. + */ +public interface LookupQueryCreator extends Serializable { + + /** + * Create a lookup query (like the query appended to path in GET request) out of the provided + * arguments. + * + * @param lookupDataRow a {@link RowData} containing request parameters. + * @return a lookup query. + */ + LookupQueryInfo createLookupQuery(RowData lookupDataRow); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreatorFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreatorFactory.java new file mode 100644 index 00000000..d3a6322b --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/LookupQueryCreatorFactory.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.table.lookup.HttpLookupTableSource; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.table.factories.DynamicTableFactory; +import org.apache.flink.table.factories.Factory; + +import java.io.Serializable; + +/** + * The {@link Factory} that dynamically creates and injects {@link LookupQueryCreator} to {@link + * HttpLookupTableSource}. + * + *

Custom implementations of {@link LookupQueryCreatorFactory} can be registered along other + * factories in + * + *

resources/META-INF.services/org.apache.flink.table.factories.Factory
+ * + *

file and then referenced by their identifiers in the HttpLookupSource DDL property field + * flink.connector.http.source.lookup.query-creator. + * + *

The following example shows the minimum Table API example to create a {@link + * HttpLookupTableSource} that uses a custom query creator created by a factory that returns + * my-query-creator as its identifier. + * + *

{@code
+ * CREATE TABLE http (
+ *   id bigint,
+ *   some_field string
+ * ) WITH (
+ *   'connector' = 'rest-lookup',
+ *   'format' = 'json',
+ *   'url' = 'http://example.com/myendpoint',
+ *   'http.source.lookup.query-creator' = 'my-query-creator'
+ * )
+ * }
+ */ +public interface LookupQueryCreatorFactory extends Factory, Serializable { + + /** + * @param readableConfig readable config + * @param lookupRow lookup row + * @param dynamicTableFactoryContext context + * @return {@link LookupQueryCreator} custom lookup query creator instance + */ + LookupQueryCreator createLookupQueryCreator( + ReadableConfig readableConfig, + LookupRow lookupRow, + DynamicTableFactory.Context dynamicTableFactoryContext); +} diff --git a/src/main/java/com/getindata/connectors/http/SchemaLifecycleAwareElementConverter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/SchemaLifecycleAwareElementConverter.java similarity index 52% rename from src/main/java/com/getindata/connectors/http/SchemaLifecycleAwareElementConverter.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/SchemaLifecycleAwareElementConverter.java index c4986bac..9849a0e4 100644 --- a/src/main/java/com/getindata/connectors/http/SchemaLifecycleAwareElementConverter.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/SchemaLifecycleAwareElementConverter.java @@ -1,4 +1,21 @@ -package com.getindata.connectors.http; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; import org.apache.flink.api.common.serialization.SerializationSchema.InitializationContext; import org.apache.flink.api.connector.sink2.Sink.InitContext; @@ -9,9 +26,9 @@ * that will be called by HTTP connect code to ensure that element converter is initialized * properly. This is required for cases when Flink's SerializationSchema and DeserializationSchema * objects like JsonRowDataSerializationSchema are used. - *

- * This interface specifies the mapping between elements of a stream to request entries that can be - * sent to the destination. The mapping is provided by the end-user of a sink, not the sink + * + *

This interface specifies the mapping between elements of a stream to request entries that can + * be sent to the destination. The mapping is provided by the end-user of a sink, not the sink * creator. * *

The request entries contain all relevant information required to create and sent the actual @@ -19,7 +36,7 @@ * key. */ public interface SchemaLifecycleAwareElementConverter - extends ElementConverter { + extends ElementConverter { /** * Initialization element converter for the schema. @@ -30,5 +47,4 @@ public interface SchemaLifecycleAwareElementConverter * @param context Contextual information that can be used during initialization. */ void open(InitContext context); - } diff --git a/src/main/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManager.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/auth/OidcAccessTokenManager.java similarity index 66% rename from src/main/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManager.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/auth/OidcAccessTokenManager.java index 9334870c..71458034 100644 --- a/src/main/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManager.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/auth/OidcAccessTokenManager.java @@ -1,12 +1,12 @@ - /* - * Copyright 2020 Red Hat - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,13 @@ * limitations under the License. */ -package com.getindata.connectors.http.internal.auth; +package org.apache.flink.connector.http.auth; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; + +import lombok.extern.slf4j.Slf4j; import java.io.IOException; import java.net.URI; @@ -25,20 +31,14 @@ import java.time.Duration; import java.time.Instant; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; - /** - * This class is inspired by - * https://github.com/Apicurio/apicurio-common-rest-client/blob/ + * This class is inspired by https://github.com/Apicurio/apicurio-common-rest-client/blob/ * 944ac9eb527c291a6083bd10ee012388e1684d20/rest-client-common/src/main/java/io/ * apicurio/rest/client/auth/OidcAuth.java. * - * The OIDC access token manager encapsulates the caching of an OIDC access token, - * which can be short lived, for example an hour. The authenticate method will return an - * un-expired access token, either from the cache or by requesting a new access token. + *

The OIDC access token manager encapsulates the caching of an OIDC access token, which can be + * short lived, for example an hour. The authenticate method will return an un-expired access token, + * either from the cache or by requesting a new access token. */ @Slf4j public class OidcAccessTokenManager { @@ -54,7 +54,8 @@ public class OidcAccessTokenManager { private Instant cachedAccessTokenExp; /** - * Construct an Oidc access token manager with the default token expiration reduction + * Construct an Oidc access token manager with the default token expiration reduction. + * * @param httpClient httpClient to use to call the token endpoint. * @param tokenRequest token request * @param url token endpoint url @@ -62,19 +63,22 @@ public class OidcAccessTokenManager { public OidcAccessTokenManager(HttpClient httpClient, String tokenRequest, String url) { this(httpClient, tokenRequest, url, DEFAULT_TOKEN_EXPIRATION_REDUCTION); } + /** - * Construct an Oidc access token manager with the supplied token expiration reduction + * Construct an Oidc access token manager with the supplied token expiration reduction. + * * @param httpClient httpClient to use to call the token endpoint. * @param tokenRequest token request this need to be form urlencoded * @param url token endpoint url - * @param tokenExpirationReduction token expiry reduction, request a new token if the - * current time is later than the cached access token - * expiry time reduced by this value. This means that - * we will not use the cached token if it is about - * to expire. + * @param tokenExpirationReduction token expiry reduction, request a new token if the current + * time is later than the cached access token expiry time reduced by this value. This means + * that we will not use the cached token if it is about to expire. */ - public OidcAccessTokenManager(HttpClient httpClient, String tokenRequest, String url, - Duration tokenExpirationReduction) { + public OidcAccessTokenManager( + HttpClient httpClient, + String tokenRequest, + String url, + Duration tokenExpirationReduction) { this.tokenRequest = tokenRequest; this.httpClient = httpClient; this.url = url; @@ -85,21 +89,19 @@ public OidcAccessTokenManager(HttpClient httpClient, String tokenRequest, String } } - /** - * Request an access token from the token endpoint - */ + /** Request an access token from the token endpoint. */ private void requestAccessToken() { try { HttpRequest httpRequest = HttpRequest.newBuilder() - .uri(URI.create(url)) - .header("Content-Type", "application/x-www-form-urlencoded") - .method("POST", HttpRequest.BodyPublishers.ofString(tokenRequest)) - .build(); + .uri(URI.create(url)) + .header("Content-Type", "application/x-www-form-urlencoded") + .method("POST", HttpRequest.BodyPublishers.ofString(tokenRequest)) + .build(); - HttpResponse response = httpClient.send(httpRequest, - HttpResponse.BodyHandlers.ofByteArray()); - //create ObjectMapper instance + HttpResponse response = + httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofByteArray()); + // create ObjectMapper instance final ObjectMapper objectMapper = new ObjectMapper(); if (200 == response.statusCode()) { byte[] bytes = response.body(); @@ -112,18 +114,20 @@ private void requestAccessToken() { */ Duration expiresIn = Duration.ofSeconds(expiresInNode.asInt()); if (expiresIn.compareTo(this.tokenExpirationReduction) > 0) { - //expiresIn is greater than tokenExpirationReduction + // expiresIn is greater than tokenExpirationReduction expiresIn = expiresIn.minus(this.tokenExpirationReduction); } this.cachedAccessTokenExp = Instant.now().plus(expiresIn); } else { - throw new IllegalStateException("Attempted to get an access token but got http" + - " status code " + response.statusCode()); + throw new IllegalStateException( + "Attempted to get an access token but got http" + + " status code " + + response.statusCode()); } } catch (JsonProcessingException e) { throw new IllegalStateException("Error found while trying to request a new token"); } catch (IOException e) { - throw new IllegalStateException("IO Exception occurred", e); + throw new IllegalStateException("IO Exception occurred", e); } catch (InterruptedException e) { throw new IllegalStateException("Interrupted Exception occurred", e); } @@ -131,6 +135,7 @@ private void requestAccessToken() { /** * Get a valid unexpired access token. + * * @return access token. */ public String authenticate() { diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClient.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClient.java new file mode 100644 index 00000000..6aa97038 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClient.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.clients; + +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.functions.FunctionContext; + +import java.util.Collection; + +/** A client that is used to get enrichment data from external component. */ +public interface PollingClient { + + /** + * Gets enrichment data from external component using provided lookup arguments. + * + * @param lookupRow A {@link RowData} containing request parameters. + * @return an optional result of data lookup. + */ + Collection pull(RowData lookupRow); + + /** + * Initialize the client. + * + * @param ctx function context + */ + void open(FunctionContext ctx); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClientFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClientFactory.java new file mode 100644 index 00000000..4874e604 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/PollingClientFactory.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.clients; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.connector.http.table.lookup.HttpLookupConfig; +import org.apache.flink.util.ConfigurationException; + +import java.io.Serializable; + +/** + * Polling client factory. + * + * @param polling client + */ +public interface PollingClientFactory extends Serializable { + + PollingClient createPollClient( + HttpLookupConfig options, DeserializationSchema schemaDecoder) + throws ConfigurationException; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClient.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClient.java new file mode 100644 index 00000000..ce9fc5b8 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClient.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.clients; + +import org.apache.flink.connector.http.sink.HttpSinkInternal; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.sink.HttpSinkWriter; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** + * An HTTP client that is used by {@link HttpSinkWriter} to send HTTP requests processed by {@link + * HttpSinkInternal}. + */ +public interface SinkHttpClient { + + /** + * Sends HTTP requests to an external web service. + * + * @param requestEntries a set of request entries that should be sent to the destination + * @param endpointUrl the URL of the endpoint + * @return the new {@link CompletableFuture} wrapping {@link SinkHttpClientResponse} that + * completes when all requests have been sent and returned their statuses + */ + CompletableFuture putRequests( + List requestEntries, String endpointUrl); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientBuilder.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientBuilder.java new file mode 100644 index 00000000..26352c33 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientBuilder.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.clients; + +import org.apache.flink.annotation.PublicEvolving; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.sink.httpclient.RequestSubmitterFactory; + +import java.io.Serializable; +import java.util.Properties; + +/** Builder building {@link SinkHttpClient}. */ +@PublicEvolving +public interface SinkHttpClientBuilder extends Serializable { + + // TODO Consider moving HttpPostRequestCallback and HeaderPreprocessor, RequestSubmitter to be a + // SinkHttpClientBuilder fields. This method is getting more and more arguments. + SinkHttpClient build( + Properties properties, + HttpPostRequestCallback httpPostRequestCallback, + HeaderPreprocessor headerPreprocessor, + RequestSubmitterFactory requestSubmitterFactory); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientResponse.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientResponse.java new file mode 100644 index 00000000..9fbf1695 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/clients/SinkHttpClientResponse.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.clients; + +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; + +import lombok.Data; +import lombok.NonNull; +import lombok.ToString; + +import java.util.List; + +/** + * Data class holding {@link HttpSinkRequestEntry} instances that {@link SinkHttpClient} attempted + * to write, divided into two lists — successful and failed ones. + */ +@Data +@ToString +public class SinkHttpClientResponse { + + /** A list of successfully written requests. */ + @NonNull private final List successfulRequests; + + /** A list of requests that {@link SinkHttpClient} failed to write. */ + @NonNull private final List failedRequests; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/ConfigException.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/ConfigException.java new file mode 100644 index 00000000..cd296a78 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/ConfigException.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.config; + +/** + * A Runtime exception throw when there is any issue with configuration properties for Http + * Connector. + */ +public class ConfigException extends RuntimeException { + + private static final long serialVersionUID = 1L; + + public ConfigException(String message) { + super(message); + } + + public ConfigException(String message, Throwable t) { + super(message, t); + } + + /** + * Creates an exception object using predefined exception message template: {@code Invalid value + * + (value) + for configuration + (property name) + (additional message) }. + * + * @param name configuration property name. + * @param value configuration property value. + * @param message custom message appended to the end of exception message. + */ + public ConfigException(String name, Object value, String message) { + super( + "Invalid value " + + value + + " for configuration " + + name + + (message == null ? "" : ": " + message)); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/HttpConnectorConfigConstants.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/HttpConnectorConfigConstants.java new file mode 100644 index 00000000..9f7721ab --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/HttpConnectorConfigConstants.java @@ -0,0 +1,159 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.config; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.experimental.UtilityClass; + +/** A dictionary class containing properties or properties prefixes for Http connector. */ +@UtilityClass +@NoArgsConstructor(access = AccessLevel.NONE) +// TODO Change this name to HttpConnectorConfigProperties +public final class HttpConnectorConfigConstants { + + public static final String PROP_DELIM = ","; + + /** A property prefix for http connector. */ + public static final String FLINK_CONNECTOR_HTTP = "http."; + + private static final String SOURCE_LOOKUP_PREFIX = FLINK_CONNECTOR_HTTP + "source.lookup."; + + /** A property prefix for http connector header properties. */ + public static final String SINK_HEADER_PREFIX = FLINK_CONNECTOR_HTTP + "sink.header."; + + public static final String LOOKUP_SOURCE_HEADER_PREFIX = SOURCE_LOOKUP_PREFIX + "header."; + public static final String OIDC_AUTH_TOKEN_REQUEST = + FLINK_CONNECTOR_HTTP + "security.oidc.token.request"; + + public static final String OIDC_AUTH_TOKEN_ENDPOINT_URL = + FLINK_CONNECTOR_HTTP + "security.oidc.token.endpoint.url"; + + public static final String OIDC_AUTH_TOKEN_EXPIRY_REDUCTION = + FLINK_CONNECTOR_HTTP + "security.oidc.token.expiry.reduction"; + /** + * Whether to use the raw value of the Authorization header. If set, it prevents the special + * treatment of the header for Basic Authentication, thus preserving the passed raw value. + * Defaults to false. + */ + public static final String LOOKUP_SOURCE_HEADER_USE_RAW = + SOURCE_LOOKUP_PREFIX + "use-raw-authorization-header"; + + public static final String RESULT_TYPE = SOURCE_LOOKUP_PREFIX + "result-type"; + + // --------- Error code handling configuration --------- + + // TODO copied from + // https://github.com/getindata/flink-http-connector/blob/e00d57607f7d1a0d72c6ca48abe[…]nnectors/http/internal/config/HttpConnectorConfigConstants.java + // Changing label name to INCLUDE, but the value is exclude. Needs investigating. + public static final String HTTP_ERROR_SINK_CODE_INCLUDE_LIST = + FLINK_CONNECTOR_HTTP + "sink.error.code.exclude"; + + public static final String HTTP_ERROR_SINK_CODES_LIST = + FLINK_CONNECTOR_HTTP + "sink.error.code"; + // ----------------------------------------------------- + + public static final String SOURCE_LOOKUP_REQUEST_CALLBACK_IDENTIFIER = + SOURCE_LOOKUP_PREFIX + "request-callback"; + + public static final String SINK_REQUEST_CALLBACK_IDENTIFIER = + FLINK_CONNECTOR_HTTP + "sink.request-callback"; + + public static final String SOURCE_LOOKUP_QUERY_CREATOR_IDENTIFIER = + SOURCE_LOOKUP_PREFIX + "query-creator"; + + // -------------- HTTPS security settings -------------- + public static final String ALLOW_SELF_SIGNED = + FLINK_CONNECTOR_HTTP + "security.cert.server.allowSelfSigned"; + + public static final String SERVER_TRUSTED_CERT = FLINK_CONNECTOR_HTTP + "security.cert.server"; + + public static final String CLIENT_CERT = FLINK_CONNECTOR_HTTP + "security.cert.client"; + + public static final String CLIENT_PRIVATE_KEY = FLINK_CONNECTOR_HTTP + "security.key.client"; + + public static final String KEY_STORE_PATH = FLINK_CONNECTOR_HTTP + "security.keystore.path"; + + public static final String KEY_STORE_PASSWORD = + FLINK_CONNECTOR_HTTP + "security.keystore.password"; + + public static final String KEY_STORE_TYPE = FLINK_CONNECTOR_HTTP + "security.keystore.type"; + + // ----------------------------------------------------- + + // ------ HTTPS timeouts and thread pool settings ------ + + public static final String LOOKUP_HTTP_TIMEOUT_SECONDS = + SOURCE_LOOKUP_PREFIX + "request.timeout"; + + public static final String SOURCE_CONNECTION_TIMEOUT = + SOURCE_LOOKUP_PREFIX + "connection.timeout"; + + public static final String SOURCE_PROXY_HOST = SOURCE_LOOKUP_PREFIX + "proxy.host"; + + public static final String SOURCE_PROXY_PORT = SOURCE_LOOKUP_PREFIX + "proxy.port"; + + public static final String SOURCE_PROXY_USERNAME = SOURCE_LOOKUP_PREFIX + "proxy.username"; + + public static final String SOURCE_PROXY_PASSWORD = SOURCE_LOOKUP_PREFIX + "proxy.password"; + + public static final String SINK_HTTP_TIMEOUT_SECONDS = + FLINK_CONNECTOR_HTTP + "sink.request.timeout"; + + public static final String LOOKUP_HTTP_PULING_THREAD_POOL_SIZE = + SOURCE_LOOKUP_PREFIX + "request.thread-pool.size"; + + public static final String LOOKUP_HTTP_RESPONSE_THREAD_POOL_SIZE = + SOURCE_LOOKUP_PREFIX + "response.thread-pool.size"; + + public static final String SINK_HTTP_WRITER_THREAD_POOL_SIZE = + FLINK_CONNECTOR_HTTP + "sink.writer.thread-pool.size"; + + // ----------------------------------------------------- + + // ------ Sink request submitter settings ------ + public static final String SINK_HTTP_REQUEST_MODE = + FLINK_CONNECTOR_HTTP + "sink.writer.request.mode"; + + public static final String SINK_HTTP_BATCH_REQUEST_SIZE = + FLINK_CONNECTOR_HTTP + "sink.request.batch.size"; + + // --------------------------------------------- + public static final String SOURCE_RETRY_SUCCESS_CODES = SOURCE_LOOKUP_PREFIX + "success-codes"; + public static final String SOURCE_RETRY_RETRY_CODES = SOURCE_LOOKUP_PREFIX + "retry-codes"; + public static final String SOURCE_IGNORE_RESPONSE_CODES = + SOURCE_LOOKUP_PREFIX + "ignored-response-codes"; + + public static final String SOURCE_RETRY_STRATEGY_PREFIX = + SOURCE_LOOKUP_PREFIX + "retry-strategy."; + public static final String SOURCE_RETRY_STRATEGY_TYPE = SOURCE_RETRY_STRATEGY_PREFIX + "type"; + + private static final String SOURCE_RETRY_FIXED_DELAY_PREFIX = + SOURCE_RETRY_STRATEGY_PREFIX + "fixed-delay."; + public static final String SOURCE_RETRY_FIXED_DELAY_DELAY = + SOURCE_RETRY_FIXED_DELAY_PREFIX + "delay"; + + private static final String SOURCE_RETRY_EXP_DELAY_PREFIX = + SOURCE_RETRY_STRATEGY_PREFIX + "exponential-delay."; + public static final String SOURCE_RETRY_EXP_DELAY_INITIAL_BACKOFF = + SOURCE_RETRY_EXP_DELAY_PREFIX + "initial-backoff"; + public static final String SOURCE_RETRY_EXP_DELAY_MAX_BACKOFF = + SOURCE_RETRY_EXP_DELAY_PREFIX + "max-backoff"; + public static final String SOURCE_RETRY_EXP_DELAY_MULTIPLIER = + SOURCE_RETRY_EXP_DELAY_PREFIX + "backoff-multiplier"; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/SinkRequestSubmitMode.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/SinkRequestSubmitMode.java new file mode 100644 index 00000000..71e516df --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/config/SinkRequestSubmitMode.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.config; + +/** Sink request submit mode. */ +public enum SinkRequestSubmitMode { + SINGLE("single"), + BATCH("batch"); + + private final String mode; + + SinkRequestSubmitMode(String mode) { + this.mode = mode; + } + + public String getMode() { + return mode; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/BasicAuthHeaderValuePreprocessor.java similarity index 51% rename from src/main/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessor.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/BasicAuthHeaderValuePreprocessor.java index 8c18c1c1..22c53c83 100644 --- a/src/main/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessor.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/BasicAuthHeaderValuePreprocessor.java @@ -1,11 +1,28 @@ -package com.getindata.connectors.http.internal; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.preprocessor; import java.util.Base64; import java.util.Objects; /** - * Header processor for HTTP Basic Authentication mechanism. - * Only "Basic" authentication is supported currently. + * Header processor for HTTP Basic Authentication mechanism. Only "Basic" authentication is + * supported currently. */ public class BasicAuthHeaderValuePreprocessor implements HeaderValuePreprocessor { @@ -14,8 +31,8 @@ public class BasicAuthHeaderValuePreprocessor implements HeaderValuePreprocessor private boolean useRawAuthHeader = false; /** - * Creates a new instance of BasicAuthHeaderValuePreprocessor that uses - * the default processing of the Authorization header. + * Creates a new instance of BasicAuthHeaderValuePreprocessor that uses the default processing + * of the Authorization header. */ public BasicAuthHeaderValuePreprocessor() { this(false); @@ -25,8 +42,7 @@ public BasicAuthHeaderValuePreprocessor() { * Creates a new instance of BasicAuthHeaderValuePreprocessor. * * @param useRawAuthHeader If set to true, the Authorization header is kept as-is, - * untransformed. Otherwise, uses the default processing of the - * Authorization header. + * untransformed. Otherwise, uses the default processing of the Authorization header. */ public BasicAuthHeaderValuePreprocessor(boolean useRawAuthHeader) { this.useRawAuthHeader = useRawAuthHeader; @@ -35,14 +51,14 @@ public BasicAuthHeaderValuePreprocessor(boolean useRawAuthHeader) { /** * Calculates {@link Base64} value of provided header value. For Basic authentication mechanism, * the raw value is expected to match user:password pattern. - *

- * If rawValue starts with "Basic " prefix, or useRawAuthHeader has been set to true, it is + * + *

If rawValue starts with "Basic " prefix, or useRawAuthHeader has been set to true, it is * assumed that this value is already converted to the expected "Authorization" header value. * * @param rawValue header original value to modify. * @return value of "Authorization" header with format "Basic " + Base64 from rawValue or - * rawValue without any changes if it starts with "Basic " prefix or useRawAuthHeader is - * set to true. + * rawValue without any changes if it starts with "Basic " prefix or useRawAuthHeader is set + * to true. */ @Override public String preprocessHeaderValue(String rawValue) { diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/ComposeHeaderPreprocessor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/ComposeHeaderPreprocessor.java new file mode 100644 index 00000000..142e27fc --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/ComposeHeaderPreprocessor.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.preprocessor; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +/** + * This implementation of {@link HeaderPreprocessor} acts as a registry for all {@link + * HeaderValuePreprocessor} that should be applied on HTTP request. + */ +public class ComposeHeaderPreprocessor implements HeaderPreprocessor { + + /** + * Default, pass through header value preprocessor used whenever dedicated preprocessor for a + * given header does not exist. + */ + private static final HeaderValuePreprocessor DEFAULT_VALUE_PREPROCESSOR = rawValue -> rawValue; + + /** Map with {@link HeaderValuePreprocessor} to apply. */ + private final Map valuePreprocessors; + + /** + * Creates a new instance of ComposeHeaderPreprocessor for provided {@link + * HeaderValuePreprocessor} map. + * + * @param valuePreprocessors map of {@link HeaderValuePreprocessor} that should be used for this + * processor. If null, then default, pass through header value processor will be used for + * every header. + */ + public ComposeHeaderPreprocessor(Map valuePreprocessors) { + this.valuePreprocessors = + (valuePreprocessors == null) + ? Collections.emptyMap() + : new HashMap<>(valuePreprocessors); + } + + @Override + public String preprocessValueForHeader(String headerName, String headerRawValue) { + return valuePreprocessors + .getOrDefault(headerName, DEFAULT_VALUE_PREPROCESSOR) + .preprocessHeaderValue(headerRawValue); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderPreprocessor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderPreprocessor.java new file mode 100644 index 00000000..2f5f82a0 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderPreprocessor.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.preprocessor; + +import java.io.Serializable; + +/** Interface for header preprocessing. */ +public interface HeaderPreprocessor extends Serializable { + + /** + * Preprocess value of a header.Preprocessing can change or validate header value. + * + * @param headerName header name which value should be preprocessed. + * @param headerRawValue header value to process. + * @return preprocessed header value. + */ + String preprocessValueForHeader(String headerName, String headerRawValue); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderValuePreprocessor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderValuePreprocessor.java new file mode 100644 index 00000000..94d3c80a --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/HeaderValuePreprocessor.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.preprocessor; + +import java.io.Serializable; + +/** + * Processor interface which modifies header value based on implemented logic. An example would be + * calculation of Value of Authorization header. + */ +public interface HeaderValuePreprocessor extends Serializable { + + /** + * Modifies header rawValue according to the implemented logic. + * + * @param rawValue header original value to modify + * @return modified header value. + */ + String preprocessHeaderValue(String rawValue); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/OIDCAuthHeaderValuePreprocessor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/OIDCAuthHeaderValuePreprocessor.java new file mode 100644 index 00000000..a032f988 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/preprocessor/OIDCAuthHeaderValuePreprocessor.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.preprocessor; + +import org.apache.flink.connector.http.auth.OidcAccessTokenManager; + +import lombok.extern.slf4j.Slf4j; + +import java.net.http.HttpClient; +import java.time.Duration; +import java.util.Optional; + +/** Header processor for HTTP OIDC Authentication mechanism. */ +@Slf4j +public class OIDCAuthHeaderValuePreprocessor implements HeaderValuePreprocessor { + + private final String oidcAuthURL; + private final String oidcTokenRequest; + private Duration oidcExpiryReduction = Duration.ofSeconds(1); + + /** + * Add the access token to the request using OidcAuth authenticate method that gives us a valid + * access token. + * + * @param oidcAuthURL OIDC token endpoint + * @param oidcTokenRequest OIDC Token Request + * @param oidcExpiryReduction OIDC token expiry reduction + */ + public OIDCAuthHeaderValuePreprocessor( + String oidcAuthURL, String oidcTokenRequest, Optional oidcExpiryReduction) { + this.oidcAuthURL = oidcAuthURL; + this.oidcTokenRequest = oidcTokenRequest; + if (oidcExpiryReduction.isPresent()) { + this.oidcExpiryReduction = oidcExpiryReduction.get(); + } + } + + @Override + public String preprocessHeaderValue(String rawValue) { + OidcAccessTokenManager auth = + new OidcAccessTokenManager( + HttpClient.newBuilder().build(), + oidcTokenRequest, + oidcAuthURL, + oidcExpiryReduction); + // apply the OIDC authentication by adding the dynamically calculated header value. + return "BEARER " + auth.authenticate(); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/HttpClientWithRetry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/HttpClientWithRetry.java new file mode 100644 index 00000000..e3c30c5e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/HttpClientWithRetry.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; + +import org.apache.flink.connector.http.HttpStatusCodeValidationFailedException; +import org.apache.flink.connector.http.status.HttpResponseChecker; +import org.apache.flink.metrics.MetricGroup; + +import io.github.resilience4j.retry.Retry; +import io.github.resilience4j.retry.RetryConfig; +import lombok.Builder; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.function.Supplier; + +/** {@link HttpClient} with retry. */ +@Slf4j +public class HttpClientWithRetry { + + private final HttpClient httpClient; + @Getter private final HttpResponseChecker responseChecker; + private final Retry retry; + + @Builder + HttpClientWithRetry( + HttpClient httpClient, RetryConfig retryConfig, HttpResponseChecker responseChecker) { + this.httpClient = httpClient; + this.responseChecker = responseChecker; + var adjustedRetryConfig = + RetryConfig.from(retryConfig) + .retryExceptions(IOException.class) + .retryOnResult(this::isTemporalError) + .build(); + this.retry = Retry.of("http-lookup-connector", adjustedRetryConfig); + } + + public void registerMetrics(MetricGroup metrics) { + var group = metrics.addGroup("http_lookup_connector"); + group.gauge( + "successfulCallsWithRetryAttempt", + () -> retry.getMetrics().getNumberOfSuccessfulCallsWithRetryAttempt()); + group.gauge( + "successfulCallsWithoutRetryAttempt", + () -> retry.getMetrics().getNumberOfSuccessfulCallsWithoutRetryAttempt()); + } + + public HttpResponse send( + Supplier requestSupplier, HttpResponse.BodyHandler responseBodyHandler) + throws IOException, InterruptedException, HttpStatusCodeValidationFailedException { + try { + var response = + Retry.decorateCheckedSupplier( + retry, + () -> + httpClient.send( + requestSupplier.get(), responseBodyHandler)) + .apply(); + if (!responseChecker.isSuccessful(response)) { + throw new HttpStatusCodeValidationFailedException( + "Incorrect response code: " + response.statusCode(), response); + } + return response; + } catch (IOException | InterruptedException | HttpStatusCodeValidationFailedException e) { + throw e; // re-throw without wrapping + } catch (Throwable t) { + throw new RuntimeException("Unexpected exception", t); + } + } + + private boolean isTemporalError(Object response) { + return responseChecker.isTemporalError((HttpResponse) response); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/retry/RetryConfigProvider.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryConfigProvider.java similarity index 50% rename from src/main/java/com/getindata/connectors/http/internal/retry/RetryConfigProvider.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryConfigProvider.java index 0a09d7d9..424113bb 100644 --- a/src/main/java/com/getindata/connectors/http/internal/retry/RetryConfigProvider.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryConfigProvider.java @@ -1,19 +1,37 @@ -package com.getindata.connectors.http.internal.retry; +/* Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; + +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.table.connector.source.lookup.LookupOptions; import io.github.resilience4j.core.IntervalFunction; import io.github.resilience4j.retry.RetryConfig; import lombok.AccessLevel; import lombok.RequiredArgsConstructor; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.connector.source.lookup.LookupOptions; -import static io.github.resilience4j.core.IntervalFunction.ofExponentialBackoff; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_STRATEGY; +import static io.github.resilience4j.core.IntervalFunction.ofExponentialBackoff; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_STRATEGY; +/** Configuration for Retry. */ @RequiredArgsConstructor(access = AccessLevel.PRIVATE) public class RetryConfigProvider { @@ -24,9 +42,7 @@ public static RetryConfig create(ReadableConfig config) { } private RetryConfig create() { - return createBuilder() - .maxAttempts(config.get(LookupOptions.MAX_RETRIES) + 1) - .build(); + return createBuilder().maxAttempts(config.get(LookupOptions.MAX_RETRIES) + 1).build(); } private RetryConfig.Builder createBuilder() { @@ -45,7 +61,8 @@ private RetryStrategyType getRetryStrategy() { private RetryConfig.Builder configureFixedDelay() { return RetryConfig.custom() - .intervalFunction(IntervalFunction.of(config.get(SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY))); + .intervalFunction( + IntervalFunction.of(config.get(SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY))); } private RetryConfig.Builder configureExponentialDelay() { diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryStrategyType.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryStrategyType.java new file mode 100644 index 00000000..d3d21d9f --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/retry/RetryStrategyType.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; + +import lombok.AccessLevel; +import lombok.Getter; +import lombok.RequiredArgsConstructor; + +/** Retry strategy type enum. */ +@Getter +@RequiredArgsConstructor(access = AccessLevel.PRIVATE) +public enum RetryStrategyType { + FIXED_DELAY("fixed-delay"), + EXPONENTIAL_DELAY("exponential-delay"), + ; + + private final String code; + + public static RetryStrategyType fromCode(String code) { + if (code == null) { + throw new NullPointerException("Code is null"); + } + for (var strategy : RetryStrategyType.values()) { + if (strategy.getCode().equalsIgnoreCase(code)) { + return strategy; + } + } + throw new IllegalArgumentException("No enum constant for " + code); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/security/SecurityContext.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SecurityContext.java similarity index 68% rename from src/main/java/com/getindata/connectors/http/internal/security/SecurityContext.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SecurityContext.java index 1fed044c..35bb73dc 100644 --- a/src/main/java/com/getindata/connectors/http/internal/security/SecurityContext.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SecurityContext.java @@ -1,4 +1,30 @@ -package com.getindata.connectors.http.internal.security; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.security; + +import org.apache.flink.connector.http.utils.ConfigUtils; + +import lombok.extern.slf4j.Slf4j; + +import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; import java.io.ByteArrayInputStream; import java.io.File; @@ -17,14 +43,6 @@ import java.util.Base64; import java.util.Collection; import java.util.UUID; -import javax.net.ssl.KeyManagerFactory; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; - -import lombok.extern.slf4j.Slf4j; - -import com.getindata.connectors.http.internal.utils.ConfigUtils; /** * This class represents a security context for given Http connector instance. The Security context @@ -48,16 +66,15 @@ public class SecurityContext { private final KeyStore keystore; - /** - * Creates instance of {@link SecurityContext} and initialize {@link KeyStore} instance. - */ + /** Creates instance of {@link SecurityContext} and initialize {@link KeyStore} instance. */ private SecurityContext(KeyStore keystore, char[] storePassword) { this.keystore = keystore; this.storePassword = storePassword; } /** - * Creates a {@link SecurityContext} with empty {@link KeyStore} + * Creates a {@link SecurityContext} with empty {@link KeyStore}. + * * @return new instance of {@link SecurityContext} */ public static SecurityContext create() { @@ -71,14 +88,13 @@ public static SecurityContext create() { return new SecurityContext(keystore, storePasswordCharArr); } catch (Exception e) { throw new RuntimeException( - "Unable to create KeyStore for Http Connector Security Context.", - e - ); + "Unable to create KeyStore for Http Connector Security Context.", e); } } /** * Creates a {@link SecurityContext} with {@link KeyStore} loaded from provided path. + * * @param keyStorePath Path to keystore. * @param storePassword password for keystore. * @return new instance of {@link SecurityContext} @@ -89,13 +105,11 @@ public static SecurityContext createFromKeyStore(String keyStorePath, char[] sto File file = new File(keyStorePath); InputStream stream = new FileInputStream(file); KeyStore keystore = KeyStore.getInstance(JKS_STORE_TYPE); - keystore.load(stream,storePassword); + keystore.load(stream, storePassword); return new SecurityContext(keystore, storePassword); } catch (Exception e) { throw new RuntimeException( - "Unable to create KeyStore for Http Connector Security Context.", - e - ); + "Unable to create KeyStore for Http Connector Security Context.", e); } } @@ -122,10 +136,11 @@ public SSLContext getSslContext(TrustManager[] trustManagers) { } /** - * Creates TrustManagers for given {@link KeyStore} managed by this instance of - * {@link SSLContext}. It is important that all keys and certificates should be added - * before calling this method. Any key/certificate added after calling this method - * will not be visible by previously created TrustManager objects. + * Creates TrustManagers for given {@link KeyStore} managed by this instance of {@link + * SSLContext}. It is important that all keys and certificates should be added before calling + * this method. Any key/certificate added after calling this method will not be visible by + * previously created TrustManager objects. + * * @return an array of {@link TrustManager} */ public TrustManager[] getTrustManagers() { @@ -138,39 +153,38 @@ public TrustManager[] getTrustManagers() { return trustManagerFactory.getTrustManagers(); } catch (GeneralSecurityException e) { throw new RuntimeException( - "Unable to created Trust Managers for Http Connector security context.", - e - ); + "Unable to created Trust Managers for Http Connector security context.", e); } } /** * Adds certificate to as trusted. Certificate is added only to this Context's {@link KeyStore} * and not for entire JVM. + * * @param certPath path to certificate that should be added as trusted. */ public void addCertToTrustStore(String certPath) { log.info("Trying to add certificate to Security Context - " + certPath); try (FileInputStream certInputStream = new FileInputStream(certPath)) { - CertificateFactory certificateFactory = CertificateFactory.getInstance( - X_509_CERTIFICATE_TYPE); + CertificateFactory certificateFactory = + CertificateFactory.getInstance(X_509_CERTIFICATE_TYPE); Certificate certificate = certificateFactory.generateCertificate(certInputStream); this.keystore.setCertificateEntry(UUID.randomUUID().toString(), certificate); log.info("Certificated added to keyStore ass trusted - " + certPath); } catch (Exception e) { throw new RuntimeException( - "Unable to add certificate as trusted to Http Connector security context - " - + certPath, - e - ); + "Unable to add certificate as trusted to Http Connector security context - " + + certPath, + e); } } /** * Add certificate and private key that should be used by anny Http Connector instance that uses - * this {@link SSLContext} instance. Certificate and key are added only to this Context's - * {@link KeyStore} and not for entire JVM. + * this {@link SSLContext} instance. Certificate and key are added only to this Context's {@link + * KeyStore} and not for entire JVM. + * * @param publicKeyPath path to public key/certificate used for mTLS. * @param privateKeyPath path to private key used for mTLS. */ @@ -182,31 +196,30 @@ public void addMTlsCerts(String publicKeyPath, String privateKeyPath) { byte[] decodedPrivateData = decodePrivateData(privateKeyPath, privateData); CertificateFactory certificateFactory = - CertificateFactory.getInstance(X_509_CERTIFICATE_TYPE); - Collection chain = certificateFactory.generateCertificates( - new ByteArrayInputStream(publicData)); + CertificateFactory.getInstance(X_509_CERTIFICATE_TYPE); + Collection chain = + certificateFactory.generateCertificates(new ByteArrayInputStream(publicData)); - Key key = KeyFactory - .getInstance(KEY_ALGORITHM) - .generatePrivate(new PKCS8EncodedKeySpec(decodedPrivateData)); + Key key = + KeyFactory.getInstance(KEY_ALGORITHM) + .generatePrivate(new PKCS8EncodedKeySpec(decodedPrivateData)); this.keystore.setKeyEntry( - UUID.randomUUID().toString(), - key, - this.storePassword, - chain.toArray(new Certificate[0]) - ); + UUID.randomUUID().toString(), + key, + this.storePassword, + chain.toArray(new Certificate[0])); } catch (Exception e) { throw new RuntimeException( - "Unable to add client private key/public certificate to Http Connector KeyStore. " - + String.join(",", privateKeyPath, publicKeyPath), - e - ); + "Unable to add client private key/public certificate to Http Connector KeyStore. " + + String.join(",", privateKeyPath, publicKeyPath), + e); } } /** * Reads private key data. Key can be in PEM and DER coding and in PKCS8 format. + * * @param privateKeyPath path to private key. * @param privateData read bytes from private key, * @return decoded key data. @@ -217,10 +230,11 @@ private byte[] decodePrivateData(String privateKeyPath, byte[] privateData) { // openssl pkcs8 -topk8 -inform PEM -outform PEM -in client.pem // -out clientPrivateKey.pem -nocrypt if (privateKeyPath.endsWith(".pem")) { - String privateString = new String(privateData, Charset.defaultCharset()) - .replace(PRIVATE_KEY_HEADER, "") - .replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "") - .replace(PRIVATE_KEY_FOOTER, ""); + String privateString = + new String(privateData, Charset.defaultCharset()) + .replace(PRIVATE_KEY_HEADER, "") + .replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "") + .replace(PRIVATE_KEY_FOOTER, ""); return Base64.getDecoder().decode(privateString); } else { diff --git a/src/main/java/com/getindata/connectors/http/internal/security/SelfSignedTrustManager.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SelfSignedTrustManager.java similarity index 59% rename from src/main/java/com/getindata/connectors/http/internal/security/SelfSignedTrustManager.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SelfSignedTrustManager.java index 56c9a4bc..51684eb0 100644 --- a/src/main/java/com/getindata/connectors/http/internal/security/SelfSignedTrustManager.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/security/SelfSignedTrustManager.java @@ -1,14 +1,33 @@ -package com.getindata.connectors.http.internal.security; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.security; + +import lombok.extern.slf4j.Slf4j; -import java.net.Socket; -import java.security.cert.CertificateException; -import java.security.cert.X509Certificate; import javax.net.ssl.SSLEngine; import javax.net.ssl.X509ExtendedTrustManager; import javax.net.ssl.X509TrustManager; -import lombok.extern.slf4j.Slf4j; +import java.net.Socket; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +/** Self signed trust manager. */ @Slf4j public class SelfSignedTrustManager extends X509ExtendedTrustManager { @@ -23,12 +42,12 @@ public void checkClientTrusted(X509Certificate[] chain, String s) throws Certifi } public void checkClientTrusted(X509Certificate[] chain, String s, Socket socket) - throws CertificateException { + throws CertificateException { this.delegate.checkClientTrusted(chain, s); } public void checkClientTrusted(X509Certificate[] chain, String s, SSLEngine sslEngine) - throws CertificateException { + throws CertificateException { this.delegate.checkClientTrusted(chain, s); } @@ -37,12 +56,12 @@ public void checkServerTrusted(X509Certificate[] chain, String s) throws Certifi } public void checkServerTrusted(X509Certificate[] chain, String s, Socket socket) - throws CertificateException { + throws CertificateException { log.info("Allowing self signed server certificates."); } public void checkServerTrusted(X509Certificate[] chain, String s, SSLEngine sslEngine) - throws CertificateException { + throws CertificateException { log.info("Allowing self signed server certificates."); } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkInternal.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkInternal.java new file mode 100644 index 00000000..a597cb15 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkInternal.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.connector.base.sink.AsyncSinkBase; +import org.apache.flink.connector.base.sink.writer.BufferedRequestState; +import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.HttpSinkBuilder; +import org.apache.flink.connector.http.SchemaLifecycleAwareElementConverter; +import org.apache.flink.connector.http.clients.SinkHttpClientBuilder; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.config.SinkRequestSubmitMode; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.httpclient.BatchRequestSubmitterFactory; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.sink.httpclient.PerRequestRequestSubmitterFactory; +import org.apache.flink.connector.http.sink.httpclient.RequestSubmitterFactory; +import org.apache.flink.core.io.SimpleVersionedSerializer; +import org.apache.flink.util.Preconditions; +import org.apache.flink.util.StringUtils; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.Properties; + +/** + * An internal implementation of HTTP Sink that performs async requests against a specified HTTP + * endpoint using the buffering protocol specified in {@link AsyncSinkBase}. + * + *

API of this class can change without any concerns as long as it does not have any influence on + * methods defined in {@link HttpSink} and {@link HttpSinkBuilder} classes. + * + *

The behaviour of the buffering may be specified by providing configuration during the sink + * build time. + * + *

    + *
  • {@code maxBatchSize}: the maximum size of a batch of entries that may be sent to the HTTP + * endpoint; + *
  • {@code maxInFlightRequests}: the maximum number of in flight requests that may exist, if + * any more in flight requests need to be initiated once the maximum has been reached, then it + * will be blocked until some have completed; + *
  • {@code maxBufferedRequests}: the maximum number of elements held in the buffer, requests to + * add elements will be blocked while the number of elements in the buffer is at the maximum; + *
  • {@code maxBatchSizeInBytes}: the maximum size of a batch of entries that may be sent to the + * HTTP endpoint measured in bytes; + *
  • {@code maxTimeInBufferMS}: the maximum amount of time an entry is allowed to live in the + * buffer, if any element reaches this age, the entire buffer will be flushed immediately; + *
  • {@code maxRecordSizeInBytes}: the maximum size of a record the sink will accept into the + * buffer, a record of size larger than this will be rejected when passed to the sink. + *
  • {@code httpPostRequestCallback}: the {@link HttpPostRequestCallback} implementation for + * processing of requests and responses; + *
  • {@code properties}: properties related to the Http Sink. + *
+ * + * @param type of the elements that should be sent through HTTP request. + */ +public class HttpSinkInternal extends AsyncSinkBase { + + private final String endpointUrl; + + // having Builder instead of an instance of `SinkHttpClient` + // makes it possible to serialize `HttpSink` + private final SinkHttpClientBuilder sinkHttpClientBuilder; + + private final HttpPostRequestCallback httpPostRequestCallback; + + private final HeaderPreprocessor headerPreprocessor; + + private final Properties properties; + + protected HttpSinkInternal( + ElementConverter elementConverter, + int maxBatchSize, + int maxInFlightRequests, + int maxBufferedRequests, + long maxBatchSizeInBytes, + long maxTimeInBufferMS, + long maxRecordSizeInBytes, + String endpointUrl, + HttpPostRequestCallback httpPostRequestCallback, + HeaderPreprocessor headerPreprocessor, + SinkHttpClientBuilder sinkHttpClientBuilder, + Properties properties) { + + super( + elementConverter, + maxBatchSize, + maxInFlightRequests, + maxBufferedRequests, + maxBatchSizeInBytes, + maxTimeInBufferMS, + maxRecordSizeInBytes); + + Preconditions.checkArgument( + !StringUtils.isNullOrWhitespaceOnly(endpointUrl), + "The endpoint URL must be set when initializing HTTP Sink."); + this.endpointUrl = endpointUrl; + this.httpPostRequestCallback = + Preconditions.checkNotNull( + httpPostRequestCallback, + "Post request callback must be set when initializing HTTP Sink."); + this.headerPreprocessor = + Preconditions.checkNotNull( + headerPreprocessor, + "Header Preprocessor must be set when initializing HTTP Sink."); + this.sinkHttpClientBuilder = + Preconditions.checkNotNull( + sinkHttpClientBuilder, + "The HTTP client builder must not be null when initializing HTTP Sink."); + this.properties = properties; + } + + @Override + public StatefulSinkWriter> createWriter( + InitContext context) throws IOException { + + ElementConverter elementConverter = getElementConverter(); + if (elementConverter instanceof SchemaLifecycleAwareElementConverter) { + // This cast is needed for Flink 1.15.3 build + ((SchemaLifecycleAwareElementConverter) elementConverter).open(context); + } + + return new HttpSinkWriter<>( + elementConverter, + context, + getMaxBatchSize(), + getMaxInFlightRequests(), + getMaxBufferedRequests(), + getMaxBatchSizeInBytes(), + getMaxTimeInBufferMS(), + getMaxRecordSizeInBytes(), + endpointUrl, + sinkHttpClientBuilder.build( + properties, + httpPostRequestCallback, + headerPreprocessor, + getRequestSubmitterFactory()), + Collections.emptyList(), + properties); + } + + @Override + public StatefulSinkWriter> restoreWriter( + InitContext context, + Collection> recoveredState) + throws IOException { + + return new HttpSinkWriter<>( + getElementConverter(), + context, + getMaxBatchSize(), + getMaxInFlightRequests(), + getMaxBufferedRequests(), + getMaxBatchSizeInBytes(), + getMaxTimeInBufferMS(), + getMaxRecordSizeInBytes(), + endpointUrl, + sinkHttpClientBuilder.build( + properties, + httpPostRequestCallback, + headerPreprocessor, + getRequestSubmitterFactory()), + recoveredState, + properties); + } + + @Override + public SimpleVersionedSerializer> + getWriterStateSerializer() { + return new HttpSinkWriterStateSerializer(); + } + + private RequestSubmitterFactory getRequestSubmitterFactory() { + + if (SinkRequestSubmitMode.SINGLE + .getMode() + .equalsIgnoreCase( + properties.getProperty( + HttpConnectorConfigConstants.SINK_HTTP_REQUEST_MODE))) { + return new PerRequestRequestSubmitterFactory(); + } + return new BatchRequestSubmitterFactory(getMaxBatchSize()); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkRequestEntry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkRequestEntry.java new file mode 100644 index 00000000..0b61e17a --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkRequestEntry.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.connector.http.HttpSink; + +import lombok.EqualsAndHashCode; +import lombok.NonNull; +import lombok.RequiredArgsConstructor; +import lombok.ToString; + +import java.io.Serializable; + +/** + * Represents a single {@link HttpSink} request. Contains the HTTP method name, Content-Type header + * value, and byte representation of the body of the request. + */ +@RequiredArgsConstructor +@EqualsAndHashCode +@ToString +public final class HttpSinkRequestEntry implements Serializable { + + /** HTTP method name to use when sending the request. */ + @NonNull public final String method; + + /** Body of the request, encoded as byte array. */ + public final byte[] element; + + /** @return the size of the {@link HttpSinkRequestEntry#element} */ + public long getSizeInBytes() { + return element.length; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriter.java new file mode 100644 index 00000000..5dbc6cd3 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriter.java @@ -0,0 +1,161 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.api.connector.sink2.Sink; +import org.apache.flink.connector.base.sink.writer.AsyncSinkWriter; +import org.apache.flink.connector.base.sink.writer.BufferedRequestState; +import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.clients.SinkHttpClient; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.utils.ThreadUtils; +import org.apache.flink.metrics.Counter; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import lombok.extern.slf4j.Slf4j; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.Consumer; + +/** + * Sink writer created by {@link HttpSink} to write to an HTTP endpoint. + * + *

More details on the internals of this sink writer may be found in {@link AsyncSinkWriter} + * documentation. + * + * @param type of the elements that should be sent through HTTP request. + */ +@Slf4j +public class HttpSinkWriter extends AsyncSinkWriter { + + private static final String HTTP_SINK_WRITER_THREAD_POOL_SIZE = "4"; + + /** Thread pool to handle HTTP response from HTTP client. */ + private final ExecutorService sinkWriterThreadPool; + + private final String endpointUrl; + + private final SinkHttpClient sinkHttpClient; + + private final Counter numRecordsSendErrorsCounter; + + public HttpSinkWriter( + ElementConverter elementConverter, + Sink.InitContext context, + int maxBatchSize, + int maxInFlightRequests, + int maxBufferedRequests, + long maxBatchSizeInBytes, + long maxTimeInBufferMS, + long maxRecordSizeInBytes, + String endpointUrl, + SinkHttpClient sinkHttpClient, + Collection> bufferedRequestStates, + Properties properties) { + + super( + elementConverter, + context, + maxBatchSize, + maxInFlightRequests, + maxBufferedRequests, + maxBatchSizeInBytes, + maxTimeInBufferMS, + maxRecordSizeInBytes, + bufferedRequestStates); + this.endpointUrl = endpointUrl; + this.sinkHttpClient = sinkHttpClient; + + var metrics = context.metricGroup(); + this.numRecordsSendErrorsCounter = metrics.getNumRecordsSendErrorsCounter(); + + int sinkWriterThreadPollSize = + Integer.parseInt( + properties.getProperty( + HttpConnectorConfigConstants.SINK_HTTP_WRITER_THREAD_POOL_SIZE, + HTTP_SINK_WRITER_THREAD_POOL_SIZE)); + + this.sinkWriterThreadPool = + Executors.newFixedThreadPool( + sinkWriterThreadPollSize, + new ExecutorThreadFactory( + "http-sink-writer-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + } + + // TODO: Reintroduce retries by adding backoff policy + @Override + protected void submitRequestEntries( + List requestEntries, + Consumer> requestResult) { + var future = sinkHttpClient.putRequests(requestEntries, endpointUrl); + future.whenCompleteAsync( + (response, err) -> { + if (err != null) { + int failedRequestsNumber = requestEntries.size(); + log.error( + "Http Sink fatally failed to write all {} requests", + failedRequestsNumber); + numRecordsSendErrorsCounter.inc(failedRequestsNumber); + + // TODO: Make `HttpSinkInternal` retry the failed requests. + // Currently, it does not retry those at all, only adds their count + // to the `numRecordsSendErrors` metric. It is due to the fact we do not + // have + // a clear image how we want to do it, so it would be both efficient and + // correct. + // requestResult.accept(requestEntries); + } else if (response.getFailedRequests().size() > 0) { + int failedRequestsNumber = response.getFailedRequests().size(); + log.error( + "Http Sink failed to write and will retry {} requests", + failedRequestsNumber); + numRecordsSendErrorsCounter.inc(failedRequestsNumber); + + // TODO: Make `HttpSinkInternal` retry the failed requests. Currently, + // it does not retry those at all, only adds their count to the + // `numRecordsSendErrors` metric. It is due to the fact we do not have + // a clear image how we want to do it, so it would be both efficient and + // correct. + + // requestResult.accept(response.getFailedRequests()); + // } else { + // requestResult.accept(Collections.emptyList()); + // } + } + requestResult.accept(Collections.emptyList()); + }, + sinkWriterThreadPool); + } + + @Override + protected long getSizeInBytes(HttpSinkRequestEntry s) { + return s.getSizeInBytes(); + } + + @Override + public void close() { + sinkWriterThreadPool.shutdownNow(); + super.close(); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializer.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializer.java new file mode 100644 index 00000000..c41e5e94 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializer.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.connector.base.sink.writer.AsyncSinkWriterStateSerializer; + +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +/** + * An implementation of {@link AsyncSinkWriterStateSerializer} for {@link HttpSinkInternal} and its + * {@link HttpSinkWriter}. + */ +public class HttpSinkWriterStateSerializer + extends AsyncSinkWriterStateSerializer { + + @Override + protected void serializeRequestToStream(HttpSinkRequestEntry s, DataOutputStream out) + throws IOException { + out.writeUTF(s.method); + out.write(s.element); + } + + @Override + protected HttpSinkRequestEntry deserializeRequestFromStream( + long requestSize, DataInputStream in) throws IOException { + var method = in.readUTF(); + var bytes = new byte[(int) requestSize]; + in.read(bytes); + return new HttpSinkRequestEntry(method, bytes); + } + + @Override + public int getVersion() { + return 1; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/AbstractRequestSubmitter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/AbstractRequestSubmitter.java new file mode 100644 index 00000000..10f99060 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/AbstractRequestSubmitter.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.utils.ThreadUtils; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import java.net.http.HttpClient; +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** Abstract request submitter. */ +public abstract class AbstractRequestSubmitter implements RequestSubmitter { + + protected static final int HTTP_CLIENT_PUBLISHING_THREAD_POOL_SIZE = 1; + + protected static final String DEFAULT_REQUEST_TIMEOUT_SECONDS = "30"; + + /** Thread pool to handle HTTP response from HTTP client. */ + protected final ExecutorService publishingThreadPool; + + protected final int httpRequestTimeOutSeconds; + + protected final String[] headersAndValues; + + protected final HttpClient httpClient; + + public AbstractRequestSubmitter( + Properties properties, String[] headersAndValues, HttpClient httpClient) { + + this.headersAndValues = headersAndValues; + this.publishingThreadPool = + Executors.newFixedThreadPool( + HTTP_CLIENT_PUBLISHING_THREAD_POOL_SIZE, + new ExecutorThreadFactory( + "http-sink-client-response-worker", + ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + + this.httpRequestTimeOutSeconds = + Integer.parseInt( + properties.getProperty( + HttpConnectorConfigConstants.SINK_HTTP_TIMEOUT_SECONDS, + DEFAULT_REQUEST_TIMEOUT_SECONDS)); + + this.httpClient = httpClient; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitter.java similarity index 60% rename from src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitter.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitter.java index 51652331..19b45d65 100644 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitter.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitter.java @@ -1,4 +1,27 @@ -package com.getindata.connectors.http.internal.sink.httpclient; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.annotation.VisibleForTesting; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import lombok.extern.slf4j.Slf4j; import java.net.URI; import java.net.http.HttpClient; @@ -15,12 +38,6 @@ import java.util.Properties; import java.util.concurrent.CompletableFuture; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.annotation.VisibleForTesting; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - /** * This implementation groups received events in batches and submits each batch as individual HTTP * requests. Batch is created based on batch size or based on HTTP method type. @@ -37,21 +54,19 @@ public class BatchRequestSubmitter extends AbstractRequestSubmitter { private final int httpRequestBatchSize; public BatchRequestSubmitter( - Properties properties, - String[] headersAndValue, - HttpClient httpClient) { + Properties properties, String[] headersAndValue, HttpClient httpClient) { super(properties, headersAndValue, httpClient); - this.httpRequestBatchSize = Integer.parseInt( - properties.getProperty(HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE) - ); + this.httpRequestBatchSize = + Integer.parseInt( + properties.getProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE)); } @Override public List> submit( - String endpointUrl, - List requestsToSubmit) { + String endpointUrl, List requestsToSubmit) { if (requestsToSubmit.isEmpty()) { return Collections.emptyList(); @@ -63,7 +78,7 @@ public List> submit( for (var entry : requestsToSubmit) { if (requestBatch.size() == httpRequestBatchSize - || !previousReqeustMethod.equalsIgnoreCase(entry.method)) { + || !previousReqeustMethod.equalsIgnoreCase(entry.method)) { // break batch and submit responseFutures.add(sendBatch(endpointUrl, requestBatch)); requestBatch.clear(); @@ -83,23 +98,21 @@ int getBatchSize() { } private CompletableFuture sendBatch( - String endpointUrl, - List reqeustBatch) { + String endpointUrl, List reqeustBatch) { HttpRequest httpRequest = buildHttpRequest(reqeustBatch, URI.create(endpointUrl)); return httpClient - .sendAsync( - httpRequest.getHttpRequest(), - HttpResponse.BodyHandlers.ofString()) - .exceptionally(ex -> { - // TODO This will be executed on a ForkJoinPool Thread... refactor this someday. - log.error("Request fatally failed because of an exception", ex); - return null; - }) - .thenApplyAsync( - res -> new JavaNetHttpResponseWrapper(httpRequest, res), - publishingThreadPool - ); + .sendAsync(httpRequest.getHttpRequest(), HttpResponse.BodyHandlers.ofString()) + .exceptionally( + ex -> { + // TODO This will be executed on a ForkJoinPool Thread... refactor this + // someday. + log.error("Request fatally failed because of an exception", ex); + return null; + }) + .thenApplyAsync( + res -> new JavaNetHttpResponseWrapper(httpRequest, res), + publishingThreadPool); } private HttpRequest buildHttpRequest(List reqeustBatch, URI endpointUri) { @@ -121,12 +134,12 @@ private HttpRequest buildHttpRequest(List reqeustBatch, UR elements.set(elements.size() - 1, BATCH_END_BYTES); publisher = BodyPublishers.ofByteArrays(elements); - Builder requestBuilder = java.net.http.HttpRequest - .newBuilder() - .uri(endpointUri) - .version(Version.HTTP_1_1) - .timeout(Duration.ofSeconds(httpRequestTimeOutSeconds)) - .method(method, publisher); + Builder requestBuilder = + java.net.http.HttpRequest.newBuilder() + .uri(endpointUri) + .version(Version.HTTP_1_1) + .timeout(Duration.ofSeconds(httpRequestTimeOutSeconds)) + .method(method, publisher); if (headersAndValues.length != 0) { requestBuilder.headers(headersAndValues); diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactory.java new file mode 100644 index 00000000..6e87fa19 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactory.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.config.ConfigException; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.utils.JavaNetHttpClientFactory; +import org.apache.flink.connector.http.utils.ThreadUtils; +import org.apache.flink.util.StringUtils; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** Batch request submitter factory. */ +public class BatchRequestSubmitterFactory implements RequestSubmitterFactory { + + // TODO Add this property to config. Make sure to add note in README.md that will describe that + // any value greater than one will break order of messages. + static final int HTTP_CLIENT_THREAD_POOL_SIZE = 1; + + private final String maxBatchSize; + + public BatchRequestSubmitterFactory(int maxBatchSize) { + if (maxBatchSize < 1) { + throw new IllegalArgumentException( + "Batch Request submitter batch size must be greater than zero."); + } + this.maxBatchSize = String.valueOf(maxBatchSize); + } + + @Override + public BatchRequestSubmitter createSubmitter(Properties properties, String[] headersAndValues) { + String batchRequestSize = + properties.getProperty(HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE); + if (StringUtils.isNullOrWhitespaceOnly(batchRequestSize)) { + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, maxBatchSize); + } else { + try { + // TODO Create property validator someday. + int batchSize = Integer.parseInt(batchRequestSize); + if (batchSize < 1) { + throw new ConfigException( + String.format( + "Property %s must be greater than 0 but was: %s", + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, + batchRequestSize)); + } + } catch (NumberFormatException e) { + // TODO Create property validator someday. + throw new ConfigException( + String.format( + "Property %s must be an integer but was: %s", + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, + batchRequestSize), + e); + } + } + + ExecutorService httpClientExecutor = + Executors.newFixedThreadPool( + HTTP_CLIENT_THREAD_POOL_SIZE, + new ExecutorThreadFactory( + "http-sink-client-batch-request-worker", + ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + + return new BatchRequestSubmitter( + properties, + headersAndValues, + JavaNetHttpClientFactory.createClient(properties, httpClientExecutor)); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/HttpRequest.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/HttpRequest.java new file mode 100644 index 00000000..c494e8fe --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/HttpRequest.java @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import lombok.Data; + +import java.util.List; + +/** Http request. */ +@Data +public class HttpRequest { + + public final java.net.http.HttpRequest httpRequest; + + public final List elements; + + public final String method; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetHttpResponseWrapper.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetHttpResponseWrapper.java new file mode 100644 index 00000000..a7b8b5f2 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetHttpResponseWrapper.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import lombok.Data; +import lombok.NonNull; + +import java.net.http.HttpResponse; +import java.util.Optional; + +/** + * A wrapper structure around an HTTP response, keeping a reference to a particular {@link + * HttpSinkRequestEntry}. Used internally by the {@code HttpSinkWriter} to pass {@code + * HttpSinkRequestEntry} along some other element that it is logically connected with. + */ +@Data +final class JavaNetHttpResponseWrapper { + + /** A representation of a single {@link HttpSink} request. */ + @NonNull private final HttpRequest httpRequest; + + /** A response to an HTTP request based on {@link HttpSinkRequestEntry}. */ + private final HttpResponse response; + + public Optional> getResponse() { + return Optional.ofNullable(response); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClient.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClient.java new file mode 100644 index 00000000..62162881 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClient.java @@ -0,0 +1,137 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.annotation.VisibleForTesting; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.clients.SinkHttpClient; +import org.apache.flink.connector.http.clients.SinkHttpClientResponse; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.status.ComposeHttpStatusCodeChecker; +import org.apache.flink.connector.http.status.ComposeHttpStatusCodeChecker.ComposeHttpStatusCodeCheckerConfig; +import org.apache.flink.connector.http.status.HttpStatusCodeChecker; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; + +import lombok.extern.slf4j.Slf4j; + +import java.net.http.HttpClient; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +/** + * An implementation of {@link SinkHttpClient} that uses Java 11's {@link HttpClient}. This + * implementation supports HTTP traffic only. + */ +@Slf4j +public class JavaNetSinkHttpClient implements SinkHttpClient { + + private final String[] headersAndValues; + + private final Map headerMap; + + private final HttpStatusCodeChecker statusCodeChecker; + + private final HttpPostRequestCallback httpPostRequestCallback; + + private final RequestSubmitter requestSubmitter; + + public JavaNetSinkHttpClient( + Properties properties, + HttpPostRequestCallback httpPostRequestCallback, + HeaderPreprocessor headerPreprocessor, + RequestSubmitterFactory requestSubmitterFactory) { + + this.httpPostRequestCallback = httpPostRequestCallback; + this.headerMap = + HttpHeaderUtils.prepareHeaderMap( + HttpConnectorConfigConstants.SINK_HEADER_PREFIX, + properties, + headerPreprocessor); + + // TODO Inject this via constructor when implementing a response processor. + // Processor will be injected and it will wrap statusChecker implementation. + ComposeHttpStatusCodeCheckerConfig checkerConfig = + ComposeHttpStatusCodeCheckerConfig.builder() + .properties(properties) + .includeListPrefix( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_INCLUDE_LIST) + .errorCodePrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST) + .build(); + + this.statusCodeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); + + this.headersAndValues = HttpHeaderUtils.toHeaderAndValueArray(this.headerMap); + this.requestSubmitter = + requestSubmitterFactory.createSubmitter(properties, headersAndValues); + } + + @Override + public CompletableFuture putRequests( + List requestEntries, String endpointUrl) { + return submitRequests(requestEntries, endpointUrl) + .thenApply(responses -> prepareSinkHttpClientResponse(responses, endpointUrl)); + } + + private CompletableFuture> submitRequests( + List requestEntries, String endpointUrl) { + + var responseFutures = requestSubmitter.submit(endpointUrl, requestEntries); + var allFutures = CompletableFuture.allOf(responseFutures.toArray(new CompletableFuture[0])); + return allFutures.thenApply( + _void -> + responseFutures.stream() + .map(CompletableFuture::join) + .collect(Collectors.toList())); + } + + private SinkHttpClientResponse prepareSinkHttpClientResponse( + List responses, String endpointUrl) { + var successfulResponses = new ArrayList(); + var failedResponses = new ArrayList(); + + for (var response : responses) { + var sinkRequestEntry = response.getHttpRequest(); + var optResponse = response.getResponse(); + + httpPostRequestCallback.call( + optResponse.orElse(null), sinkRequestEntry, endpointUrl, headerMap); + + // TODO Add response processor here and orchestrate it with statusCodeChecker. + if (optResponse.isEmpty() + || statusCodeChecker.isErrorCode(optResponse.get().statusCode())) { + failedResponses.add(sinkRequestEntry); + } else { + successfulResponses.add(sinkRequestEntry); + } + } + + return new SinkHttpClientResponse(successfulResponses, failedResponses); + } + + @VisibleForTesting + String[] getHeadersAndValues() { + return Arrays.copyOf(headersAndValues, headersAndValues.length); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestRequestSubmitterFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestRequestSubmitterFactory.java new file mode 100644 index 00000000..f63361b4 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestRequestSubmitterFactory.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.utils.JavaNetHttpClientFactory; +import org.apache.flink.connector.http.utils.ThreadUtils; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** Per request submitter factory. */ +public class PerRequestRequestSubmitterFactory implements RequestSubmitterFactory { + + // TODO Add this property to config. Make sure to add note in README.md that will describe that + // any value greater than one will break order of messages. + static final int HTTP_CLIENT_THREAD_POOL_SIZE = 1; + + @Override + public RequestSubmitter createSubmitter(Properties properties, String[] headersAndValues) { + + ExecutorService httpClientExecutor = + Executors.newFixedThreadPool( + HTTP_CLIENT_THREAD_POOL_SIZE, + new ExecutorThreadFactory( + "http-sink-client-per-request-worker", + ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + + return new PerRequestSubmitter( + properties, + headersAndValues, + JavaNetHttpClientFactory.createClient(properties, httpClientExecutor)); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestSubmitter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestSubmitter.java new file mode 100644 index 00000000..aee85c98 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/PerRequestSubmitter.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import lombok.extern.slf4j.Slf4j; + +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Version; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpRequest.Builder; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; + +/** This implementation creates HTTP requests for every processed event. */ +@Slf4j +public class PerRequestSubmitter extends AbstractRequestSubmitter { + + public PerRequestSubmitter( + Properties properties, String[] headersAndValues, HttpClient httpClient) { + + super(properties, headersAndValues, httpClient); + } + + @Override + public List> submit( + String endpointUrl, List requestToSubmit) { + + var endpointUri = URI.create(endpointUrl); + var responseFutures = new ArrayList>(); + + for (var entry : requestToSubmit) { + HttpRequest httpRequest = buildHttpRequest(entry, endpointUri); + var response = + httpClient + .sendAsync( + httpRequest.getHttpRequest(), + HttpResponse.BodyHandlers.ofString()) + .exceptionally( + ex -> { + // TODO This will be executed on a ForkJoinPool Thread... + // refactor this someday. + log.error( + "Request fatally failed because of an exception", + ex); + return null; + }) + .thenApplyAsync( + res -> new JavaNetHttpResponseWrapper(httpRequest, res), + publishingThreadPool); + responseFutures.add(response); + } + return responseFutures; + } + + private HttpRequest buildHttpRequest(HttpSinkRequestEntry requestEntry, URI endpointUri) { + Builder requestBuilder = + java.net.http.HttpRequest.newBuilder() + .uri(endpointUri) + .version(Version.HTTP_1_1) + .timeout(Duration.ofSeconds(httpRequestTimeOutSeconds)) + .method( + requestEntry.method, + BodyPublishers.ofByteArray(requestEntry.element)); + + if (headersAndValues.length != 0) { + requestBuilder.headers(headersAndValues); + } + + return new HttpRequest( + requestBuilder.build(), List.of(requestEntry.element), requestEntry.method); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitter.java new file mode 100644 index 00000000..98352487 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitter.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** Submits request via HTTP. */ +public interface RequestSubmitter { + + List> submit( + String endpointUrl, List requestToSubmit); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitterFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitterFactory.java new file mode 100644 index 00000000..188298a4 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/sink/httpclient/RequestSubmitterFactory.java @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import java.util.Properties; + +/** Request submitter factory. */ +public interface RequestSubmitterFactory { + + RequestSubmitter createSubmitter(Properties properties, String[] headersAndValues); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/ComposeHttpStatusCodeChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/ComposeHttpStatusCodeChecker.java new file mode 100644 index 00000000..db9f4fe9 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/ComposeHttpStatusCodeChecker.java @@ -0,0 +1,176 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.util.Preconditions; +import org.apache.flink.util.StringUtils; + +import lombok.AccessLevel; +import lombok.Builder; +import lombok.Data; +import lombok.RequiredArgsConstructor; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Properties; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * An implementation of {@link HttpStatusCodeChecker} that checks Http Status code against include + * list, concrete value or {@link HttpResponseCodeType}. + */ +public class ComposeHttpStatusCodeChecker implements HttpStatusCodeChecker { + + private static final Set DEFAULT_ERROR_CODES = + Set.of( + new TypeStatusCodeChecker(HttpResponseCodeType.CLIENT_ERROR), + new TypeStatusCodeChecker(HttpResponseCodeType.SERVER_ERROR)); + + private static final int MIN_HTTP_STATUS_CODE = 100; + + /** Set of {@link HttpStatusCodeChecker} for include listed status codes. */ + private final Set includedCodes; + + /** + * Set of {@link HttpStatusCodeChecker} that check status code againts value match or {@link + * HttpResponseCodeType} match. + */ + private final Set errorCodes; + + public ComposeHttpStatusCodeChecker(ComposeHttpStatusCodeCheckerConfig config) { + includedCodes = prepareIncludeList(config); + errorCodes = prepareErrorCodes(config); + } + + /** + * Checks whether given status code is considered as a error code. This implementation checks if + * status code matches any single value mask like "404" or http type mask such as "4XX". Code + * that matches one of those masks and is not on an include list will be considered as error + * code. + * + * @param statusCode http status code to assess. + * @return true if status code is considered as error or false if not. + */ + public boolean isErrorCode(int statusCode) { + + Preconditions.checkArgument( + statusCode >= MIN_HTTP_STATUS_CODE, + String.format( + "Provided invalid Http status code %s," + + " status code should be equal or bigger than %d.", + statusCode, MIN_HTTP_STATUS_CODE)); + + boolean isOnIncludeList = + includedCodes.stream().anyMatch(check -> check.isOnIncludeList(statusCode)); + + return !isOnIncludeList + && errorCodes.stream() + .anyMatch( + httpStatusCodeChecker -> + httpStatusCodeChecker.isErrorCode(statusCode)); + } + + private Set prepareErrorCodes( + ComposeHttpStatusCodeCheckerConfig config) { + + Properties properties = config.getProperties(); + String errorCodePrefix = config.getErrorCodePrefix(); + + String errorCodes = properties.getProperty(errorCodePrefix, ""); + + if (StringUtils.isNullOrWhitespaceOnly(errorCodes)) { + return DEFAULT_ERROR_CODES; + } else { + String[] splitCodes = errorCodes.split(HttpConnectorConfigConstants.PROP_DELIM); + return prepareErrorCodes(splitCodes); + } + } + + /** + * Process given array of status codes and assign them to {@link + * SingleValueHttpStatusCodeChecker} for full codes such as 100, 404 etc. or to {@link + * TypeStatusCodeChecker} for codes that were constructed with "XX" mask + */ + private Set prepareErrorCodes(String[] statusCodes) { + + Set errorCodes = new HashSet<>(); + for (String sCode : statusCodes) { + if (!StringUtils.isNullOrWhitespaceOnly(sCode)) { + String trimCode = sCode.toUpperCase().trim(); + Preconditions.checkArgument( + trimCode.length() == 3, + "Status code should contain three characters. Provided [%s]", + trimCode); + + // at this point we have trim, upper case 3 character status code. + if (isTypeCode(trimCode)) { + int code = Integer.parseInt(trimCode.replace("X", "")); + errorCodes.add(new TypeStatusCodeChecker(HttpResponseCodeType.getByCode(code))); + } else { + errorCodes.add( + new SingleValueHttpStatusCodeChecker(Integer.parseInt(trimCode))); + } + } + } + return (errorCodes.isEmpty()) ? DEFAULT_ERROR_CODES : errorCodes; + } + + private Set prepareIncludeList( + ComposeHttpStatusCodeCheckerConfig config) { + + Properties properties = config.getProperties(); + String includeListPrefix = config.getIncludeListPrefix(); + + return Arrays.stream( + properties + .getProperty(includeListPrefix, "") + .split(HttpConnectorConfigConstants.PROP_DELIM)) + .filter(sCode -> !StringUtils.isNullOrWhitespaceOnly(sCode)) + .map(String::trim) + .mapToInt(Integer::parseInt) + .mapToObj(IncludeListHttpStatusCodeChecker::new) + .collect(Collectors.toSet()); + } + + /** + * This method checks if "code" param matches "digit + XX" mask. This method expects that + * provided string will be 3 elements long, trim and upper case. + * + * @param code to check if it contains XX on second and third position. Parameter is expected to + * be 3 characters long, trim and uppercase. + * @return true if string matches "anything + XX" and false if not. + */ + private boolean isTypeCode(final String code) { + return code.charAt(1) == 'X' && code.charAt(2) == 'X'; + } + + /** config. */ + @Data + @Builder + @RequiredArgsConstructor(access = AccessLevel.PRIVATE) + public static class ComposeHttpStatusCodeCheckerConfig { + + private final String includeListPrefix; + + private final String errorCodePrefix; + + private final Properties properties; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpCodesParser.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpCodesParser.java new file mode 100644 index 00000000..dcb7dece --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpCodesParser.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import org.apache.flink.util.ConfigurationException; + +import lombok.experimental.UtilityClass; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static java.lang.String.format; + +/** Utility class for parsing http codes. */ +@UtilityClass +public class HttpCodesParser { + + private static final Pattern CODE_GROUP_EXPRESSION = Pattern.compile("[1-5][xX]{2}"); + private static final String DELIMITER = Pattern.quote(","); + private static final int HTTP_CODE_MIN = 100; + private static final int HTTP_CODE_MAX = 599; + + public Set parse(String codesExpression) throws ConfigurationException { + var includelist = new HashSet(); + var excludelist = new HashSet(); + for (var rawCode : codesExpression.split(DELIMITER)) { + var code = rawCode.trim(); + if (code.isEmpty()) { + continue; + } + if (code.startsWith("!")) { + try { + excludelist.add(parseHttpCode(code.substring(1))); + continue; + } catch (NumberFormatException e) { + throw new ConfigurationException("Can not parse code " + code); + } + } + try { + includelist.add(parseHttpCode(code)); + } catch (NumberFormatException e) { + if (CODE_GROUP_EXPRESSION.matcher(code).matches()) { + var firstGroupCode = Integer.parseInt(code.substring(0, 1)) * 100; + var groupCodes = + IntStream.range(firstGroupCode, firstGroupCode + 100) + .boxed() + .collect(Collectors.toList()); + includelist.addAll(groupCodes); + } else { + throw new ConfigurationException("Can not parse code " + code); + } + } + } + + includelist.removeAll(excludelist); + return Collections.unmodifiableSet(includelist); + } + + private Integer parseHttpCode(String str) throws ConfigurationException { + var parsed = Integer.parseInt(str); + if (parsed < HTTP_CODE_MIN || parsed > HTTP_CODE_MAX) { + throw new ConfigurationException(format("Http code out of the range [%s]", parsed)); + } + return parsed; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/HttpResponseChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseChecker.java similarity index 56% rename from src/main/java/com/getindata/connectors/http/internal/status/HttpResponseChecker.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseChecker.java index 6b59c4bf..5239f6c5 100644 --- a/src/main/java/com/getindata/connectors/http/internal/status/HttpResponseChecker.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseChecker.java @@ -1,13 +1,32 @@ -package com.getindata.connectors.http.internal.status; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.net.http.HttpResponse; -import java.util.HashSet; -import java.util.Set; +package org.apache.flink.connector.http.status; + +import org.apache.flink.util.ConfigurationException; import lombok.Getter; import lombok.NonNull; -import org.apache.flink.util.ConfigurationException; +import java.net.http.HttpResponse; +import java.util.HashSet; +import java.util.Set; + +/** Http response checker. */ @Getter public class HttpResponseChecker { @@ -19,7 +38,8 @@ public class HttpResponseChecker { this(HttpCodesParser.parse(successCodeExpr), HttpCodesParser.parse(temporalErrorCodeExpr)); } - public HttpResponseChecker(@NonNull Set successCodes, @NonNull Set temporalErrorCodes) + public HttpResponseChecker( + @NonNull Set successCodes, @NonNull Set temporalErrorCodes) throws ConfigurationException { this.successCodes = successCodes; this.temporalErrorCodes = temporalErrorCodes; @@ -49,8 +69,10 @@ private void validate() throws ConfigurationException { var intersection = new HashSet<>(successCodes); intersection.retainAll(temporalErrorCodes); if (!intersection.isEmpty()) { - throw new ConfigurationException("Http codes " + intersection + - " can not be used as both success and retry codes"); + throw new ConfigurationException( + "Http codes " + + intersection + + " can not be used as both success and retry codes"); } } } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseCodeType.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseCodeType.java new file mode 100644 index 00000000..27f719ef --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpResponseCodeType.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import java.util.HashMap; +import java.util.Map; + +/** This enum represents HTTP response code types, grouped by "hundreds" digit. */ +public enum HttpResponseCodeType { + INFO(1), + SUCCESS(2), + REDIRECTION(3), + CLIENT_ERROR(4), + SERVER_ERROR(5); + + private static final Map map; + + static { + map = new HashMap<>(); + for (HttpResponseCodeType httpResponseCodeType : HttpResponseCodeType.values()) { + map.put(httpResponseCodeType.httpTypeCode, httpResponseCodeType); + } + } + + private final int httpTypeCode; + + HttpResponseCodeType(int httpTypeCode) { + this.httpTypeCode = httpTypeCode; + } + + /** + * @param statusCode Http status code to get the {@link HttpResponseCodeType} instance for. + * @return a {@link HttpResponseCodeType} instance based on http type code, for example {@code + * HttpResponseCodeType.getByCode(1)} will return {@link HttpResponseCodeType#INFO} type. + */ + public static HttpResponseCodeType getByCode(int statusCode) { + return map.get(statusCode); + } + + /** + * @return a "hundreds" digit that represents given {@link HttpResponseCodeType} instance. For + * example {@code HttpResponseCodeType.INFO.getHttpTypeCode()} will return 1 since HTTP + * information repossess have status codes in range 100 - 199. + */ + public int getHttpTypeCode() { + return this.httpTypeCode; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpStatusCodeChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpStatusCodeChecker.java new file mode 100644 index 00000000..6569b253 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/HttpStatusCodeChecker.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +/** + * Base interface for all classes that would validate HTTP status code whether it is an error or + * not. + */ +public interface HttpStatusCodeChecker { + + /** + * Validates http status code wheter it is considered as error code. The logic for what status + * codes are considered as "errors" depends on the concreted implementation + * + * @param statusCode http status code to assess. + * @return true if statusCode is considered as Error and false if not. + */ + boolean isErrorCode(int statusCode); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/IncludeListHttpStatusCodeChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/IncludeListHttpStatusCodeChecker.java new file mode 100644 index 00000000..4d26d08c --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/IncludeListHttpStatusCodeChecker.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; + +/** Class that implements logic of an include list against single constant value. */ +@RequiredArgsConstructor +@EqualsAndHashCode +public class IncludeListHttpStatusCodeChecker { + + private final int includeListCode; + + /** + * Checks if given statusCode is considered as included. + * + * @param statusCode status code to check. + * @return true if given statusCode is on the include list and false if not. + */ + public boolean isOnIncludeList(int statusCode) { + return includeListCode == statusCode; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/SingleValueHttpStatusCodeChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/SingleValueHttpStatusCodeChecker.java new file mode 100644 index 00000000..193b9ae0 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/SingleValueHttpStatusCodeChecker.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import lombok.EqualsAndHashCode; +import lombok.RequiredArgsConstructor; + +/** + * An implementation of {@link HttpStatusCodeChecker} that validates status code against constant + * value. + */ +@RequiredArgsConstructor +@EqualsAndHashCode +public class SingleValueHttpStatusCodeChecker implements HttpStatusCodeChecker { + + /** A reference http status code to compare with. */ + private final int errorCode; + + /** + * Validates given statusCode against constant value. + * + * @param statusCode http status code to assess. + * @return true if status code is considered as error or false if not. + */ + @Override + public boolean isErrorCode(int statusCode) { + return errorCode == statusCode; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/TypeStatusCodeChecker.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/TypeStatusCodeChecker.java new file mode 100644 index 00000000..4e5311d8 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/status/TypeStatusCodeChecker.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; + +import lombok.EqualsAndHashCode; + +/** + * Implementation of {@link HttpStatusCodeChecker} that verifies if given Http status code belongs + * to specific HTTP code type family. For example if it any of 100's 200's or 500's code. + */ +@EqualsAndHashCode +public class TypeStatusCodeChecker implements HttpStatusCodeChecker { + + /** + * First digit from HTTP status code that describes a type of code, for example 1 for all 100's, + * 5 for all 500's. + */ + private final int httpTypeCode; + + /** + * Creates TypeStatusCodeChecker for given {@link HttpResponseCodeType}. + * + * @param httpResponseCodeType {@link HttpResponseCodeType} for this {@link + * TypeStatusCodeChecker} instance. + */ + public TypeStatusCodeChecker(HttpResponseCodeType httpResponseCodeType) { + this.httpTypeCode = httpResponseCodeType.getHttpTypeCode(); + } + + /** + * Checks whether given status code belongs to Http code status type. For example: + * + *

{@code
+     * TypeStatusCodeChecker checker =  new TypeStatusCodeChecker(5);
+     * checker.isErrorCode(505); <- will return true.
+     *
+     * }
+ * + * @param statusCode http status code to assess. + * @return true if status code is considered as error or false if not. + */ + @Override + public boolean isErrorCode(int statusCode) { + return statusCode / 100 == httpTypeCode; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/SerializationSchemaElementConverter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/SerializationSchemaElementConverter.java new file mode 100644 index 00000000..8d5e178e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/SerializationSchemaElementConverter.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table; + +import org.apache.flink.api.common.serialization.SerializationSchema; +import org.apache.flink.api.connector.sink2.Sink.InitContext; +import org.apache.flink.api.connector.sink2.SinkWriter.Context; +import org.apache.flink.connector.http.SchemaLifecycleAwareElementConverter; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.table.data.RowData; +import org.apache.flink.util.FlinkRuntimeException; + +/** Serialization Schema Element Converter. */ +public class SerializationSchemaElementConverter + implements SchemaLifecycleAwareElementConverter { + + private final String insertMethod; + + private final SerializationSchema serializationSchema; + + private boolean schemaOpened = false; + + public SerializationSchemaElementConverter( + String insertMethod, SerializationSchema serializationSchema) { + + this.insertMethod = insertMethod; + this.serializationSchema = serializationSchema; + } + + @Override + public void open(InitContext context) { + if (!schemaOpened) { + try { + serializationSchema.open(context.asSerializationSchemaInitializationContext()); + schemaOpened = true; + } catch (Exception e) { + throw new FlinkRuntimeException("Failed to initialize serialization schema.", e); + } + } + } + + @Override + public HttpSinkRequestEntry apply(RowData rowData, Context context) { + return new HttpSinkRequestEntry(insertMethod, serializationSchema.serialize(rowData)); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunction.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunction.java new file mode 100644 index 00000000..1b1061da --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunction.java @@ -0,0 +1,131 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.utils.ThreadUtils; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.functions.AsyncLookupFunction; +import org.apache.flink.table.functions.FunctionContext; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +/** Async Lookup function. */ +@Slf4j +@RequiredArgsConstructor +public class AsyncHttpTableLookupFunction extends AsyncLookupFunction { + + private static final String PULLING_THREAD_POOL_SIZE = "8"; + + private static final String PUBLISHING_THREAD_POOL_SIZE = "4"; + + /** + * The {@link org.apache.flink.table.functions.TableFunction} we want to decorate with async + * framework. + */ + private final HttpTableLookupFunction decorate; + + /** Thread pool for polling data from Http endpoint. */ + private transient ExecutorService pullingThreadPool; + + /** Thread pool for publishing data to Flink. */ + private transient ExecutorService publishingThreadPool; + + @Override + public void open(FunctionContext context) throws Exception { + super.open(context); + decorate.open(context); + + int pullingThreadPoolSize = + Integer.parseInt( + decorate.getOptions() + .getProperties() + .getProperty( + HttpConnectorConfigConstants + .LOOKUP_HTTP_PULING_THREAD_POOL_SIZE, + PULLING_THREAD_POOL_SIZE)); + + int publishingThreadPoolSize = + Integer.parseInt( + decorate.getOptions() + .getProperties() + .getProperty( + HttpConnectorConfigConstants + .LOOKUP_HTTP_RESPONSE_THREAD_POOL_SIZE, + PUBLISHING_THREAD_POOL_SIZE)); + + pullingThreadPool = + Executors.newFixedThreadPool( + pullingThreadPoolSize, + new ExecutorThreadFactory( + "http-async-lookup-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + + publishingThreadPool = + Executors.newFixedThreadPool( + publishingThreadPoolSize, + new ExecutorThreadFactory( + "http-async-publishing-worker", + ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + } + + @Override + public CompletableFuture> asyncLookup(RowData keyRow) { + CompletableFuture> future = new CompletableFuture<>(); + future.completeAsync(() -> decorate.lookup(keyRow), pullingThreadPool); + + // We don't want to use ForkJoinPool at all. We are using a different thread pool + // for publishing here intentionally to avoid thread starvation. + CompletableFuture> resultFuture = new CompletableFuture<>(); + future.whenCompleteAsync( + (result, throwable) -> { + if (throwable != null) { + log.error("Exception while processing Http Async request", throwable); + resultFuture.completeExceptionally( + new RuntimeException( + "Exception while processing Http Async request", + throwable)); + } else { + resultFuture.complete(result); + } + }, + publishingThreadPool); + return resultFuture; + } + + public LookupRow getLookupRow() { + return decorate.getLookupRow(); + } + + public HttpLookupConfig getOptions() { + return decorate.getOptions(); + } + + @Override + public void close() throws Exception { + this.publishingThreadPool.shutdownNow(); + this.pullingThreadPool.shutdownNow(); + super.close(); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactory.java new file mode 100644 index 00000000..dab7a389 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactory.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.utils.uri.URIBuilder; + +import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; + +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.BodyPublishers; +import java.net.http.HttpRequest.Builder; +import java.time.Duration; + +/** + * Implementation of {@link HttpRequestFactory} for REST calls that sends their parameters using + * request body or in the path. + */ +@Slf4j +public class BodyBasedRequestFactory extends RequestFactoryBase { + + private final String methodName; + + public BodyBasedRequestFactory( + String methodName, + LookupQueryCreator lookupQueryCreator, + HeaderPreprocessor headerPreprocessor, + HttpLookupConfig options) { + + super(lookupQueryCreator, headerPreprocessor, options); + this.methodName = methodName.toUpperCase(); + } + + /** + * Method for preparing {@link Builder} for REST request that sends their parameters in request + * body, for example PUT or POST methods. + * + * @param lookupQueryInfo lookup query info used for request body. + * @return {@link Builder} for given lookupQuery. + */ + @Override + protected Builder setUpRequestMethod(LookupQueryInfo lookupQueryInfo) { + return HttpRequest.newBuilder() + .uri(constructUri(lookupQueryInfo)) + .method(methodName, BodyPublishers.ofString(lookupQueryInfo.getLookupQuery())) + .timeout(Duration.ofSeconds(this.httpRequestTimeOutSeconds)); + } + + @Override + protected Logger getLogger() { + return log; + } + + URI constructUri(LookupQueryInfo lookupQueryInfo) { + StringBuilder resolvedUrl = new StringBuilder(baseUrl); + if (lookupQueryInfo.hasBodyBasedUrlQueryParameters()) { + resolvedUrl + .append(baseUrl.contains("?") ? "&" : "?") + .append(lookupQueryInfo.getBodyBasedUrlQueryParameters()); + } + resolvedUrl = resolvePathParameters(lookupQueryInfo, resolvedUrl); + + try { + return new URIBuilder(resolvedUrl.toString()).build(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/GetRequestFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/GetRequestFactory.java new file mode 100644 index 00000000..694f2f5b --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/GetRequestFactory.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.utils.uri.URIBuilder; + +import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; + +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.Builder; +import java.time.Duration; + +/** Implementation of {@link HttpRequestFactory} for GET REST calls. */ +@Slf4j +public class GetRequestFactory extends RequestFactoryBase { + + public GetRequestFactory( + LookupQueryCreator lookupQueryCreator, + HeaderPreprocessor headerPreprocessor, + HttpLookupConfig options) { + + super(lookupQueryCreator, headerPreprocessor, options); + } + + @Override + protected Logger getLogger() { + return log; + } + + /** + * Method for preparing {@link Builder} for REST GET request. Where lookupQueryInfo is used as + * query parameters for GET requests. for example: + * + *
+     *     http:localhost:8080/service?id=1
+     * 
+ * + *

or as payload for body-based requests with optional parameters, for example: + * + *

+     *     http:localhost:8080/service?id=1
+     *     body payload: { "uid": 2 }
+     * 
+ * + * @param lookupQueryInfo lookup query info used for request query parameters. + * @return {@link Builder} for given GET lookupQuery + */ + @Override + protected Builder setUpRequestMethod(LookupQueryInfo lookupQueryInfo) { + return HttpRequest.newBuilder() + .uri(constructGetUri(lookupQueryInfo)) + .GET() + .timeout(Duration.ofSeconds(this.httpRequestTimeOutSeconds)); + } + + URI constructGetUri(LookupQueryInfo lookupQueryInfo) { + StringBuilder resolvedUrl = new StringBuilder(baseUrl); + if (lookupQueryInfo.hasLookupQuery()) { + resolvedUrl + .append(baseUrl.contains("?") ? "&" : "?") + .append(lookupQueryInfo.getLookupQuery()); + } + resolvedUrl = resolvePathParameters(lookupQueryInfo, resolvedUrl); + try { + return new URIBuilder(resolvedUrl.toString()).build(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConfig.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConfig.java new file mode 100644 index 00000000..d6412784 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConfig.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.HttpPostRequestCallback; + +import lombok.Builder; +import lombok.Data; +import lombok.RequiredArgsConstructor; + +import java.io.Serializable; +import java.util.Properties; + +/** Http lookup config. */ +@Builder +@Data +@RequiredArgsConstructor +public class HttpLookupConfig implements Serializable { + + private final String lookupMethod; + + private final String url; + + @Builder.Default private final boolean useAsync = false; + + @Builder.Default private final Properties properties = new Properties(); + + @Builder.Default private final ReadableConfig readableConfig = new Configuration(); + + private final HttpPostRequestCallback httpPostRequestCallback; +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConnectorOptions.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConnectorOptions.java new file mode 100644 index 00000000..849e3505 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupConnectorOptions.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; +import org.apache.flink.connector.http.retry.RetryStrategyType; + +import java.time.Duration; + +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.OIDC_AUTH_TOKEN_ENDPOINT_URL; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.OIDC_AUTH_TOKEN_REQUEST; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_CONNECTION_TIMEOUT; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_IGNORE_RESPONSE_CODES; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_LOOKUP_QUERY_CREATOR_IDENTIFIER; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_LOOKUP_REQUEST_CALLBACK_IDENTIFIER; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_HOST; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_PASSWORD; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_PORT; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_USERNAME; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_EXP_DELAY_INITIAL_BACKOFF; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_EXP_DELAY_MAX_BACKOFF; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_EXP_DELAY_MULTIPLIER; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_FIXED_DELAY_DELAY; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_RETRY_CODES; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_STRATEGY_TYPE; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_RETRY_SUCCESS_CODES; + +/** http lookup connector options. */ +public class HttpLookupConnectorOptions { + + public static final ConfigOption URL = + ConfigOptions.key("url") + .stringType() + .noDefaultValue() + .withDescription("The HTTP endpoint URL."); + + public static final ConfigOption URL_ARGS = + ConfigOptions.key("url-args") + .stringType() + .noDefaultValue() + .withDescription("The arguments that should be used for HTTP GET Request."); + + public static final ConfigOption ASYNC_POLLING = + ConfigOptions.key("asyncPolling") + .booleanType() + .defaultValue(false) + .withDescription("Whether to use Sync and Async polling mechanism"); + + public static final ConfigOption LOOKUP_METHOD = + ConfigOptions.key("lookup-method") + .stringType() + .defaultValue("GET") + .withDescription("Method used for REST executed by lookup connector."); + + public static final ConfigOption LOOKUP_QUERY_CREATOR_IDENTIFIER = + ConfigOptions.key(SOURCE_LOOKUP_QUERY_CREATOR_IDENTIFIER).stringType().noDefaultValue(); + + public static final ConfigOption LOOKUP_REQUEST_FORMAT = + ConfigOptions.key("lookup-request.format").stringType().defaultValue("json"); + + public static final ConfigOption USE_RAW_AUTH_HEADER = + ConfigOptions.key(LOOKUP_SOURCE_HEADER_USE_RAW) + .booleanType() + .defaultValue(false) + .withDescription("Whether to use the raw value of Authorization header"); + + public static final ConfigOption REQUEST_CALLBACK_IDENTIFIER = + ConfigOptions.key(SOURCE_LOOKUP_REQUEST_CALLBACK_IDENTIFIER) + .stringType() + .defaultValue(Slf4jHttpLookupPostRequestCallbackFactory.IDENTIFIER); + + public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL = + ConfigOptions.key(OIDC_AUTH_TOKEN_ENDPOINT_URL) + .stringType() + .noDefaultValue() + .withDescription("OIDC Token endpoint url."); + + public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST = + ConfigOptions.key(OIDC_AUTH_TOKEN_REQUEST) + .stringType() + .noDefaultValue() + .withDescription("OIDC token request."); + + public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION = + ConfigOptions.key(OIDC_AUTH_TOKEN_EXPIRY_REDUCTION) + .durationType() + .defaultValue(Duration.ofSeconds(1)) + .withDescription( + "OIDC authorization access token expiry" + + " reduction as a Duration." + + " A new access token is obtained if the token" + + " is older than it's expiry time minus this value."); + + public static final ConfigOption SOURCE_LOOKUP_CONNECTION_TIMEOUT = + ConfigOptions.key(SOURCE_CONNECTION_TIMEOUT) + .durationType() + .noDefaultValue() + .withDescription("Http client connection timeout."); + + public static final ConfigOption SOURCE_LOOKUP_PROXY_HOST = + ConfigOptions.key(SOURCE_PROXY_HOST) + .stringType() + .noDefaultValue() + .withDescription("Http client proxy host."); + + public static final ConfigOption SOURCE_LOOKUP_PROXY_PORT = + ConfigOptions.key(SOURCE_PROXY_PORT) + .intType() + .noDefaultValue() + .withDescription("Http client proxy port."); + + public static final ConfigOption SOURCE_LOOKUP_PROXY_USERNAME = + ConfigOptions.key(SOURCE_PROXY_USERNAME) + .stringType() + .noDefaultValue() + .withDescription("Http client proxy username for authentication."); + + public static final ConfigOption SOURCE_LOOKUP_PROXY_PASSWORD = + ConfigOptions.key(SOURCE_PROXY_PASSWORD) + .stringType() + .noDefaultValue() + .withDescription("Http client proxy password for authentication."); + + public static final ConfigOption SOURCE_LOOKUP_RETRY_STRATEGY = + ConfigOptions.key(SOURCE_RETRY_STRATEGY_TYPE) + .stringType() + .defaultValue(RetryStrategyType.FIXED_DELAY.getCode()) + .withDescription( + "Auto retry strategy type: fixed-delay (default) or exponential-delay."); + + public static final ConfigOption SOURCE_LOOKUP_HTTP_SUCCESS_CODES = + ConfigOptions.key(SOURCE_RETRY_SUCCESS_CODES) + .stringType() + .defaultValue("2XX") + .withDescription( + "Comma separated http codes considered as success response. " + + "Use [1-5]XX for groups and '!' character for excluding."); + + public static final ConfigOption SOURCE_LOOKUP_HTTP_RETRY_CODES = + ConfigOptions.key(SOURCE_RETRY_RETRY_CODES) + .stringType() + .defaultValue("500,503,504") + .withDescription( + "Comma separated http codes considered as transient errors. " + + "Use [1-5]XX for groups and '!' character for excluding."); + + public static final ConfigOption SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY = + ConfigOptions.key(SOURCE_RETRY_FIXED_DELAY_DELAY) + .durationType() + .defaultValue(Duration.ofSeconds(1)) + .withDescription("Fixed-delay interval between retries."); + + public static final ConfigOption + SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF = + ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_INITIAL_BACKOFF) + .durationType() + .defaultValue(Duration.ofSeconds(1)) + .withDescription("Exponential-delay initial delay."); + + public static final ConfigOption SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF = + ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_MAX_BACKOFF) + .durationType() + .defaultValue(Duration.ofMinutes(1)) + .withDescription("Exponential-delay maximum delay."); + + public static final ConfigOption SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER = + ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_MULTIPLIER) + .doubleType() + .defaultValue(1.5) + .withDescription("Exponential-delay multiplier."); + + public static final ConfigOption SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES = + ConfigOptions.key(SOURCE_IGNORE_RESPONSE_CODES) + .stringType() + .defaultValue("") + .withDescription( + "Comma separated http codes. Content for these responses will be ignored. " + + "Use [1-5]XX for groups and '!' character for excluding. " + + "Ignored responses togater with `" + + SOURCE_RETRY_SUCCESS_CODES + + "` are considered as successful."); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupSourceRequestEntry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupSourceRequestEntry.java new file mode 100644 index 00000000..98f9f9d3 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupSourceRequestEntry.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import lombok.Data; +import lombok.ToString; + +import java.net.http.HttpRequest; + +/** + * Wrapper class around {@link HttpRequest} that contains information about an actual lookup request + * body or request parameters. + */ +@Data +@ToString +public class HttpLookupSourceRequestEntry { + + /** Wrapped {@link HttpRequest} object. */ + private final HttpRequest httpRequest; + + /** + * This field represents lookup query. Depending on used REST request method, this field can + * represent a request body, for example a Json string when PUT/POST requests method was used, + * or it can represent a query parameters if GET method was used. + */ + private final LookupQueryInfo lookupQueryInfo; + + public HttpLookupSourceRequestEntry(HttpRequest httpRequest, LookupQueryInfo lookupQueryInfo) { + this.httpRequest = httpRequest; + this.lookupQueryInfo = lookupQueryInfo; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSource.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSource.java similarity index 59% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSource.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSource.java index 129837c3..810dd5d7 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSource.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSource.java @@ -1,12 +1,31 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; -import java.util.ArrayList; -import java.util.List; -import javax.annotation.Nullable; - -import lombok.extern.slf4j.Slf4j; import org.apache.flink.api.common.serialization.DeserializationSchema; import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.LookupQueryCreatorFactory; +import org.apache.flink.connector.http.clients.PollingClientFactory; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericGetQueryCreatorFactory; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonQueryCreatorFactory; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.DataTypes.Field; import org.apache.flink.table.connector.Projection; @@ -15,7 +34,7 @@ import org.apache.flink.table.connector.source.LookupTableSource; import org.apache.flink.table.connector.source.abilities.SupportsLimitPushDown; import org.apache.flink.table.connector.source.abilities.SupportsProjectionPushDown; -import org.apache.flink.table.connector.source.lookup.AsyncLookupFunctionProvider ; +import org.apache.flink.table.connector.source.lookup.AsyncLookupFunctionProvider; import org.apache.flink.table.connector.source.lookup.LookupFunctionProvider; import org.apache.flink.table.connector.source.lookup.PartialCachingAsyncLookupProvider; import org.apache.flink.table.connector.source.lookup.PartialCachingLookupProvider; @@ -30,19 +49,20 @@ import org.apache.flink.table.types.logical.RowType.RowField; import org.apache.flink.table.types.logical.utils.LogicalTypeChecks; -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.LookupQueryCreatorFactory; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.PollingClientFactory; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericGetQueryCreatorFactory; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonQueryCreatorFactory; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.*; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; + +import java.util.ArrayList; +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_QUERY_CREATOR_IDENTIFIER; +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +/** http lookyp table source. */ @Slf4j public class HttpLookupTableSource - implements LookupTableSource, SupportsProjectionPushDown, SupportsLimitPushDown { + implements LookupTableSource, SupportsProjectionPushDown, SupportsLimitPushDown { private DataType physicalRowDataType; @@ -51,8 +71,7 @@ public class HttpLookupTableSource private final DynamicTableFactory.Context dynamicTableFactoryContext; private final DecodingFormat> decodingFormat; - @Nullable - private final LookupCache cache; + @Nullable private final LookupCache cache; public HttpLookupTableSource( DataType physicalRowDataType, @@ -79,46 +98,38 @@ public LookupRuntimeProvider getLookupRuntimeProvider(LookupContext lookupContex LookupRow lookupRow = extractLookupRow(lookupContext.getKeys()); DeserializationSchema responseSchemaDecoder = - decodingFormat.createRuntimeDecoder(lookupContext, physicalRowDataType); + decodingFormat.createRuntimeDecoder(lookupContext, physicalRowDataType); LookupQueryCreatorFactory lookupQueryCreatorFactory = - FactoryUtil.discoverFactory( - this.dynamicTableFactoryContext.getClassLoader(), - LookupQueryCreatorFactory.class, - lookupConfig.getReadableConfig().getOptional(LOOKUP_QUERY_CREATOR_IDENTIFIER) - .orElse( - (lookupConfig.getLookupMethod().equalsIgnoreCase("GET") ? - GenericGetQueryCreatorFactory.IDENTIFIER : - GenericJsonQueryCreatorFactory.IDENTIFIER) - ) - ); + FactoryUtil.discoverFactory( + this.dynamicTableFactoryContext.getClassLoader(), + LookupQueryCreatorFactory.class, + lookupConfig + .getReadableConfig() + .getOptional(LOOKUP_QUERY_CREATOR_IDENTIFIER) + .orElse( + (lookupConfig.getLookupMethod().equalsIgnoreCase("GET") + ? GenericGetQueryCreatorFactory.IDENTIFIER + : GenericJsonQueryCreatorFactory.IDENTIFIER))); ReadableConfig readableConfig = lookupConfig.getReadableConfig(); LookupQueryCreator lookupQueryCreator = - lookupQueryCreatorFactory.createLookupQueryCreator( - readableConfig, - lookupRow, - dynamicTableFactoryContext - ); + lookupQueryCreatorFactory.createLookupQueryCreator( + readableConfig, lookupRow, dynamicTableFactoryContext); PollingClientFactory pollingClientFactory = - createPollingClientFactory(lookupQueryCreator, lookupConfig); + createPollingClientFactory(lookupQueryCreator, lookupConfig); return getLookupRuntimeProvider(lookupRow, responseSchemaDecoder, pollingClientFactory); } - protected LookupRuntimeProvider getLookupRuntimeProvider(LookupRow lookupRow, - DeserializationSchema - responseSchemaDecoder, - PollingClientFactory - pollingClientFactory) { + protected LookupRuntimeProvider getLookupRuntimeProvider( + LookupRow lookupRow, + DeserializationSchema responseSchemaDecoder, + PollingClientFactory pollingClientFactory) { HttpTableLookupFunction dataLookupFunction = new HttpTableLookupFunction( - pollingClientFactory, - responseSchemaDecoder, - lookupRow, - lookupConfig - ); + pollingClientFactory, responseSchemaDecoder, lookupRow, lookupConfig); if (lookupConfig.isUseAsync()) { AsyncLookupFunction asyncLookupFunction = new AsyncHttpTableLookupFunction(dataLookupFunction); @@ -143,12 +154,11 @@ protected LookupRuntimeProvider getLookupRuntimeProvider(LookupRow lookupRow, @Override public DynamicTableSource copy() { return new HttpLookupTableSource( - physicalRowDataType, - lookupConfig, - decodingFormat, - dynamicTableFactoryContext, - cache - ); + physicalRowDataType, + lookupConfig, + decodingFormat, + dynamicTableFactoryContext, + cache); } @Override @@ -157,8 +167,7 @@ public String asSummaryString() { } @Override - public void applyLimit(long limit) { - } + public void applyLimit(long limit) {} @Override public boolean supportsNestedProjection() { @@ -166,24 +175,18 @@ public boolean supportsNestedProjection() { } private PollingClientFactory createPollingClientFactory( - LookupQueryCreator lookupQueryCreator, - HttpLookupConfig lookupConfig) { + LookupQueryCreator lookupQueryCreator, HttpLookupConfig lookupConfig) { - HeaderPreprocessor headerPreprocessor = HttpHeaderUtils.createHeaderPreprocessor( - lookupConfig.getReadableConfig()); + HeaderPreprocessor headerPreprocessor = + HttpHeaderUtils.createHeaderPreprocessor(lookupConfig.getReadableConfig()); String lookupMethod = lookupConfig.getLookupMethod(); - HttpRequestFactory requestFactory = (lookupMethod.equalsIgnoreCase("GET")) ? - new GetRequestFactory( - lookupQueryCreator, - headerPreprocessor, - lookupConfig) : - new BodyBasedRequestFactory( - lookupMethod, - lookupQueryCreator, - headerPreprocessor, - lookupConfig - ); + HttpRequestFactory requestFactory = + (lookupMethod.equalsIgnoreCase("GET")) + ? new GetRequestFactory( + lookupQueryCreator, headerPreprocessor, lookupConfig) + : new BodyBasedRequestFactory( + lookupMethod, lookupQueryCreator, headerPreprocessor, lookupConfig); log.info("requestFactory is " + requestFactory); return new JavaNetHttpPollingClientFactory(requestFactory); } @@ -193,10 +196,10 @@ private LookupRow extractLookupRow(int[][] keys) { LookupRow lookupRow = new LookupRow(); List fieldNames = - TableSourceHelper.getFieldNames(physicalRowDataType.getLogicalType()); + TableSourceHelper.getFieldNames(physicalRowDataType.getLogicalType()); List fieldTypes = - LogicalTypeChecks.getFieldTypes(physicalRowDataType.getLogicalType()); + LogicalTypeChecks.getFieldTypes(physicalRowDataType.getLogicalType()); List lookupDataTypes = new ArrayList<>(); List physicalRowDataTypes = physicalRowDataType.getChildren(); @@ -216,15 +219,12 @@ private LookupRow extractLookupRow(int[][] keys) { } private LookupSchemaEntry extractKeyColumn( - String name, - int parentIndex, - LogicalType type) { + String name, int parentIndex, LogicalType type) { if (type instanceof RowType) { - RowTypeLookupSchemaEntry rowLookupEntry = new RowTypeLookupSchemaEntry( - name, - RowData.createFieldGetter(type, parentIndex) - ); + RowTypeLookupSchemaEntry rowLookupEntry = + new RowTypeLookupSchemaEntry( + name, RowData.createFieldGetter(type, parentIndex)); List fields = ((RowType) type).getFields(); int index = 0; for (RowField rowField : fields) { @@ -233,9 +233,7 @@ private LookupSchemaEntry extractKeyColumn( return rowLookupEntry; } else { return new RowDataSingleValueLookupSchemaEntry( - name, - RowData.createFieldGetter(type, parentIndex) - ); + name, RowData.createFieldGetter(type, parentIndex)); } } @@ -243,8 +241,9 @@ private LookupSchemaEntry processRow(RowField rowField, int parentIndex LogicalType type1 = rowField.getType(); String name = rowField.getName(); if (type1 instanceof RowType) { - RowTypeLookupSchemaEntry rowLookupEntry = new RowTypeLookupSchemaEntry(name, - RowData.createFieldGetter(type1, parentIndex)); + RowTypeLookupSchemaEntry rowLookupEntry = + new RowTypeLookupSchemaEntry( + name, RowData.createFieldGetter(type1, parentIndex)); int index = 0; List rowFields = ((RowType) type1).getFields(); for (RowField rowField1 : rowFields) { @@ -252,8 +251,8 @@ private LookupSchemaEntry processRow(RowField rowField, int parentIndex } return rowLookupEntry; } else { - return new RowDataSingleValueLookupSchemaEntry(name, - RowData.createFieldGetter(type1, parentIndex)); + return new RowDataSingleValueLookupSchemaEntry( + name, RowData.createFieldGetter(type1, parentIndex)); } } } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactory.java new file mode 100644 index 00000000..f9b07ab5 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactory.java @@ -0,0 +1,232 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.HttpPostRequestCallbackFactory; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.utils.ConfigUtils; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.api.DataTypes.Field; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ResolvedSchema; +import org.apache.flink.table.connector.format.DecodingFormat; +import org.apache.flink.table.connector.source.DynamicTableSource; +import org.apache.flink.table.connector.source.lookup.LookupOptions; +import org.apache.flink.table.connector.source.lookup.cache.DefaultLookupCache; +import org.apache.flink.table.connector.source.lookup.cache.LookupCache; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.factories.DeserializationFormatFactory; +import org.apache.flink.table.factories.DynamicTableSourceFactory; +import org.apache.flink.table.factories.FactoryUtil; +import org.apache.flink.table.types.DataType; + +import javax.annotation.Nullable; + +import java.util.List; +import java.util.Properties; +import java.util.Set; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.ASYNC_POLLING; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_METHOD; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_REQUEST_FORMAT; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.REQUEST_CALLBACK_IDENTIFIER; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_CONNECTION_TIMEOUT; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_RETRY_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_SUCCESS_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_HOST; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PASSWORD; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PORT; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_USERNAME; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_RETRY_STRATEGY; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.URL; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.URL_ARGS; +import static org.apache.flink.table.api.DataTypes.FIELD; +import static org.apache.flink.table.types.utils.DataTypeUtils.removeTimeAttribute; + +/** Http Lookup Table Source Factory. */ +public class HttpLookupTableSourceFactory implements DynamicTableSourceFactory { + + private static DataTypes.Field columnToField(Column column) { + return FIELD( + column.getName(), + // only a column in a schema should have a time attribute, + // a field should not propagate the attribute because it might be used in a + // completely different context + removeTimeAttribute(column.getDataType())); + } + + public static DataType row(List fields) { + return DataTypes.ROW(fields.toArray(new Field[0])); + } + + @Override + public DynamicTableSource createDynamicTableSource(Context dynamicTableContext) { + FactoryUtil.TableFactoryHelper helper = + FactoryUtil.createTableFactoryHelper(this, dynamicTableContext); + + ReadableConfig readable = helper.getOptions(); + helper.validateExcept( + // properties coming from org.apache.flink.table.api.config.ExecutionConfigOptions + "table.", + HttpConnectorConfigConstants.FLINK_CONNECTOR_HTTP, + LOOKUP_REQUEST_FORMAT.key()); + validateHttpLookupSourceOptions(readable); + + DecodingFormat> decodingFormat = + helper.discoverDecodingFormat( + DeserializationFormatFactory.class, FactoryUtil.FORMAT); + + HttpLookupConfig lookupConfig = getHttpLookupOptions(dynamicTableContext, readable); + + ResolvedSchema resolvedSchema = dynamicTableContext.getCatalogTable().getResolvedSchema(); + + DataType physicalRowDataType = + toRowDataType(resolvedSchema.getColumns(), Column::isPhysical); + + return new HttpLookupTableSource( + physicalRowDataType, + lookupConfig, + decodingFormat, + dynamicTableContext, + getLookupCache(readable)); + } + + protected void validateHttpLookupSourceOptions(ReadableConfig tableOptions) + throws IllegalArgumentException { + // ensure that there is an OIDC token request if we have an OIDC token endpoint + tableOptions + .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL) + .ifPresent( + url -> { + if (tableOptions + .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST) + .isEmpty()) { + throw new IllegalArgumentException( + "Config option " + + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key() + + " is required, if " + + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key() + + " is configured."); + } + }); + } + + @Override + public String factoryIdentifier() { + return "rest-lookup"; + } + + @Override + public Set> requiredOptions() { + return Set.of(URL, FactoryUtil.FORMAT); + } + + @Override + public Set> optionalOptions() { + return Set.of( + URL_ARGS, + ASYNC_POLLING, + LOOKUP_METHOD, + REQUEST_CALLBACK_IDENTIFIER, + LookupOptions.CACHE_TYPE, + LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS, + LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE, + LookupOptions.PARTIAL_CACHE_MAX_ROWS, + LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY, + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST, + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL, + LookupOptions.MAX_RETRIES, + SOURCE_LOOKUP_RETRY_STRATEGY, + SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY, + SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF, + SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER, + SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF, + SOURCE_LOOKUP_HTTP_SUCCESS_CODES, + SOURCE_LOOKUP_HTTP_RETRY_CODES, + SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES, + SOURCE_LOOKUP_PROXY_HOST, + SOURCE_LOOKUP_PROXY_PORT, + SOURCE_LOOKUP_PROXY_USERNAME, + SOURCE_LOOKUP_PROXY_PASSWORD, + SOURCE_LOOKUP_CONNECTION_TIMEOUT // TODO: add request timeout from properties + ); + } + + private HttpLookupConfig getHttpLookupOptions(Context context, ReadableConfig readableConfig) { + + Properties httpConnectorProperties = + ConfigUtils.getHttpConnectorProperties(context.getCatalogTable().getOptions()); + + final HttpPostRequestCallbackFactory + postRequestCallbackFactory = + FactoryUtil.discoverFactory( + context.getClassLoader(), + HttpPostRequestCallbackFactory.class, + readableConfig.get(REQUEST_CALLBACK_IDENTIFIER)); + + return HttpLookupConfig.builder() + .lookupMethod(readableConfig.get(LOOKUP_METHOD)) + .url(readableConfig.get(URL)) + .useAsync(readableConfig.get(ASYNC_POLLING)) + .properties(httpConnectorProperties) + .readableConfig(readableConfig) + .httpPostRequestCallback(postRequestCallbackFactory.createHttpPostRequestCallback()) + .build(); + } + + @Nullable + private LookupCache getLookupCache(ReadableConfig tableOptions) { + LookupCache cache = null; + // Do not support legacy cache options + if (tableOptions + .get(LookupOptions.CACHE_TYPE) + .equals(LookupOptions.LookupCacheType.PARTIAL)) { + cache = DefaultLookupCache.fromConfig(tableOptions); + } + return cache; + } + + // TODO verify this since we are on 1.15 now. + // Backport from Flink 1.15-Master + private DataType toRowDataType(List columns, Predicate columnPredicate) { + return columns.stream() + .filter(columnPredicate) + .map(HttpLookupTableSourceFactory::columnToField) + .collect( + Collectors.collectingAndThen( + Collectors.toList(), HttpLookupTableSourceFactory::row)) + // the row should never be null + .notNull(); + } + + // Backport End +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpRequestFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpRequestFactory.java new file mode 100644 index 00000000..4f22245e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpRequestFactory.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.table.data.RowData; + +import java.io.Serializable; +import java.net.http.HttpRequest; + +/** Factory for creating {@link HttpRequest} objects for Rest clients. */ +public interface HttpRequestFactory extends Serializable { + + /** + * Creates a {@link HttpRequest} from given {@link RowData}. + * + * @param lookupRow {@link RowData} object used for building http request. + * @return {@link HttpRequest} created from {@link RowData} + */ + HttpLookupSourceRequestEntry buildLookupRequest(RowData lookupRow); +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpTableLookupFunction.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpTableLookupFunction.java similarity index 59% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpTableLookupFunction.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpTableLookupFunction.java index 9c87ff47..d88e1a6f 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpTableLookupFunction.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/HttpTableLookupFunction.java @@ -1,21 +1,39 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; -import java.util.Collection; -import java.util.concurrent.atomic.AtomicInteger; - -import lombok.AccessLevel; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.connector.http.clients.PollingClient; +import org.apache.flink.connector.http.clients.PollingClientFactory; +import org.apache.flink.connector.http.utils.SerializationSchemaUtils; import org.apache.flink.table.data.RowData; import org.apache.flink.table.functions.FunctionContext; import org.apache.flink.table.functions.LookupFunction; -import com.getindata.connectors.http.internal.PollingClient; -import com.getindata.connectors.http.internal.PollingClientFactory; -import com.getindata.connectors.http.internal.utils.SerializationSchemaUtils; +import lombok.AccessLevel; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +import java.util.Collection; +import java.util.concurrent.atomic.AtomicInteger; +/** lookup function. */ @Slf4j public class HttpTableLookupFunction extends LookupFunction { @@ -50,12 +68,11 @@ public HttpTableLookupFunction( @Override public void open(FunctionContext context) throws Exception { this.responseSchemaDecoder.open( - SerializationSchemaUtils - .createDeserializationInitContext(HttpTableLookupFunction.class)); + SerializationSchemaUtils.createDeserializationInitContext( + HttpTableLookupFunction.class)); this.localHttpCallCounter = new AtomicInteger(0); - this.client = pollingClientFactory - .createPollClient(options, responseSchemaDecoder); + this.client = pollingClientFactory.createPollClient(options, responseSchemaDecoder); context.getMetricGroup() .gauge("http-table-lookup-call-counter", () -> localHttpCallCounter.intValue()); diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClient.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClient.java similarity index 58% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClient.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClient.java index 956b2f02..4e904e89 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClient.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClient.java @@ -1,4 +1,44 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.annotation.VisibleForTesting; +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.clients.PollingClient; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.retry.HttpClientWithRetry; +import org.apache.flink.connector.http.retry.RetryConfigProvider; +import org.apache.flink.connector.http.status.HttpCodesParser; +import org.apache.flink.connector.http.status.HttpResponseChecker; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.functions.FunctionContext; +import org.apache.flink.util.ConfigurationException; +import org.apache.flink.util.StringUtils; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.type.TypeReference; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.NullNode; + +import lombok.extern.slf4j.Slf4j; import java.io.IOException; import java.net.http.HttpClient; @@ -15,36 +55,15 @@ import java.util.Optional; import java.util.Set; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.NullNode; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.annotation.VisibleForTesting; -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.functions.FunctionContext; -import org.apache.flink.util.ConfigurationException; -import org.apache.flink.util.StringUtils; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.PollingClient; -import com.getindata.connectors.http.internal.retry.HttpClientWithRetry; -import com.getindata.connectors.http.internal.retry.RetryConfigProvider; -import com.getindata.connectors.http.internal.status.HttpCodesParser; -import com.getindata.connectors.http.internal.status.HttpResponseChecker; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants.RESULT_TYPE; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_RETRY_CODES; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_SUCCESS_CODES; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.RESULT_TYPE; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_RETRY_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_SUCCESS_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; /** - * An implementation of {@link PollingClient} that uses Java 11's {@link HttpClient}. - * This implementation supports HTTP traffic only. + * An implementation of {@link PollingClient} that uses Java 11's {@link HttpClient}. This + * implementation supports HTTP traffic only. */ @Slf4j public class JavaNetHttpPollingClient implements PollingClient { @@ -64,7 +83,8 @@ public JavaNetHttpPollingClient( HttpClient httpClient, DeserializationSchema responseBodyDecoder, HttpLookupConfig options, - HttpRequestFactory requestFactory) throws ConfigurationException { + HttpRequestFactory requestFactory) + throws ConfigurationException { this.responseBodyDecoder = responseBodyDecoder; this.requestFactory = requestFactory; @@ -74,24 +94,25 @@ public JavaNetHttpPollingClient( var config = options.getReadableConfig(); - this.ignoredErrorCodes = HttpCodesParser.parse(config.get(SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES)); + this.ignoredErrorCodes = + HttpCodesParser.parse(config.get(SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES)); var errorCodes = HttpCodesParser.parse(config.get(SOURCE_LOOKUP_HTTP_RETRY_CODES)); var successCodes = new HashSet(); successCodes.addAll(HttpCodesParser.parse(config.get(SOURCE_LOOKUP_HTTP_SUCCESS_CODES))); successCodes.addAll(ignoredErrorCodes); - this.httpClient = HttpClientWithRetry.builder() - .httpClient(httpClient) - .retryConfig(RetryConfigProvider.create(config)) - .responseChecker(new HttpResponseChecker(successCodes, errorCodes)) - .build(); + this.httpClient = + HttpClientWithRetry.builder() + .httpClient(httpClient) + .retryConfig(RetryConfigProvider.create(config)) + .responseChecker(new HttpResponseChecker(successCodes, errorCodes)) + .build(); } public void open(FunctionContext context) { httpClient.registerMetrics(context.getMetricGroup()); } - @Override public Collection pull(RowData lookupRow) { if (lookupRow == null) { @@ -108,28 +129,31 @@ public Collection pull(RowData lookupRow) { private Collection queryAndProcess(RowData lookupData) throws Exception { var request = requestFactory.buildLookupRequest(lookupData); - var oidcProcessor = HttpHeaderUtils.createOIDCHeaderPreprocessor(options.getReadableConfig()); - var response = httpClient.send( - () -> updateHttpRequestIfRequired(request, oidcProcessor), BodyHandlers.ofString()); + var oidcProcessor = + HttpHeaderUtils.createOIDCHeaderPreprocessor(options.getReadableConfig()); + var response = + httpClient.send( + () -> updateHttpRequestIfRequired(request, oidcProcessor), + BodyHandlers.ofString()); return processHttpResponse(response, request); } /** * If using OIDC, update the http request using the oidc header pre processor to supply the * authentication header, with a short lived bearer token. + * * @param request http reauest to amend * @param oidcHeaderPreProcessor OIDC header pre processor * @return http request, which for OIDC will have the bearer token as the authentication header */ - protected HttpRequest updateHttpRequestIfRequired(HttpLookupSourceRequestEntry request, - HeaderPreprocessor oidcHeaderPreProcessor) { + protected HttpRequest updateHttpRequestIfRequired( + HttpLookupSourceRequestEntry request, HeaderPreprocessor oidcHeaderPreProcessor) { // We need to check the config and if required amend the value of the // authentication header to the short lived bearer token HttpRequest httpRequest = request.getHttpRequest(); ReadableConfig readableConfig = options.getReadableConfig(); if (oidcHeaderPreProcessor != null) { - HttpRequest.Builder builder = HttpRequest.newBuilder() - .uri(httpRequest.uri()); + HttpRequest.Builder builder = HttpRequest.newBuilder().uri(httpRequest.uri()); if (httpRequest.timeout().isPresent()) { builder.timeout(httpRequest.timeout().get()); } @@ -140,19 +164,20 @@ protected HttpRequest updateHttpRequestIfRequired(HttpLookupSourceRequestEntry r } Map headerMap = new HashMap<>(); if (httpRequest.headers() != null && !httpRequest.headers().map().isEmpty()) { - for (Map.Entry> header - :httpRequest.headers().map().entrySet()) { - List values = header.getValue(); + for (Map.Entry> header : + httpRequest.headers().map().entrySet()) { + List values = header.getValue(); if (values.size() == 1) { headerMap.put(header.getKey(), header.getValue().get(0)); } // the existing design does not handle multiple values for headers } } - Optional oidcTokenRequest = readableConfig - .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST); - String bearerToken = oidcHeaderPreProcessor.preprocessValueForHeader( - HttpHeaderUtils.AUTHORIZATION, oidcTokenRequest.get()); + Optional oidcTokenRequest = + readableConfig.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST); + String bearerToken = + oidcHeaderPreProcessor.preprocessValueForHeader( + HttpHeaderUtils.AUTHORIZATION, oidcTokenRequest.get()); headerMap.put(HttpHeaderUtils.AUTHORIZATION, bearerToken); String[] headerAndValueArray = HttpHeaderUtils.toHeaderAndValueArray(headerMap); builder.headers(headerAndValueArray); @@ -162,15 +187,17 @@ protected HttpRequest updateHttpRequestIfRequired(HttpLookupSourceRequestEntry r } private Collection processHttpResponse( - HttpResponse response, - HttpLookupSourceRequestEntry request) throws IOException { + HttpResponse response, HttpLookupSourceRequestEntry request) + throws IOException { this.httpPostRequestCallback.call(response, request, "endpoint", Collections.emptyMap()); var responseBody = response.body(); - log.debug("Received status code [{}] for RestTableSource request with Server response body [{}] ", - response.statusCode(), responseBody); + log.debug( + "Received status code [{}] for RestTableSource request with Server response body [{}] ", + response.statusCode(), + responseBody); if (StringUtils.isNullOrWhitespaceOnly(responseBody) || ignoreResponse(response)) { return Collections.emptyList(); @@ -186,32 +213,30 @@ HttpRequestFactory getRequestFactory() { private Collection deserialize(String responseBody) throws IOException { byte[] rawBytes = responseBody.getBytes(); String resultType = - options.getProperties().getProperty(RESULT_TYPE, RESULT_TYPE_SINGLE_VALUE); + options.getProperties().getProperty(RESULT_TYPE, RESULT_TYPE_SINGLE_VALUE); if (resultType.equals(RESULT_TYPE_SINGLE_VALUE)) { return deserializeSingleValue(rawBytes); } else if (resultType.equals(RESULT_TYPE_ARRAY)) { return deserializeArray(rawBytes); } else { throw new IllegalStateException( - String.format("Unknown lookup source result type '%s'.", resultType)); + String.format("Unknown lookup source result type '%s'.", resultType)); } } private List deserializeSingleValue(byte[] rawBytes) throws IOException { return Optional.ofNullable(responseBodyDecoder.deserialize(rawBytes)) - .map(Collections::singletonList) - .orElse(Collections.emptyList()); + .map(Collections::singletonList) + .orElse(Collections.emptyList()); } private List deserializeArray(byte[] rawBytes) throws IOException { - List rawObjects = - objectMapper.readValue(rawBytes, new TypeReference<>() { - }); + List rawObjects = objectMapper.readValue(rawBytes, new TypeReference<>() {}); List result = new ArrayList<>(); for (JsonNode rawObject : rawObjects) { if (!(rawObject instanceof NullNode)) { RowData deserialized = - responseBodyDecoder.deserialize(rawObject.toString().getBytes()); + responseBodyDecoder.deserialize(rawObject.toString().getBytes()); // deserialize() returns null if deserialization fails if (deserialized != null) { result.add(deserialized); diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactory.java new file mode 100644 index 00000000..e0025329 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactory.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.connector.http.clients.PollingClientFactory; +import org.apache.flink.connector.http.utils.JavaNetHttpClientFactory; +import org.apache.flink.table.data.RowData; +import org.apache.flink.util.ConfigurationException; + +import java.net.http.HttpClient; + +/** JavaNetHttpPollingClientFactory. */ +public class JavaNetHttpPollingClientFactory implements PollingClientFactory { + + private final HttpRequestFactory requestFactory; + + public JavaNetHttpPollingClientFactory(HttpRequestFactory requestFactory) { + this.requestFactory = requestFactory; + } + + @Override + public JavaNetHttpPollingClient createPollClient( + HttpLookupConfig options, DeserializationSchema schemaDecoder) + throws ConfigurationException { + + HttpClient httpClient = JavaNetHttpClientFactory.createClient(options); + + return new JavaNetHttpPollingClient(httpClient, schemaDecoder, options, requestFactory); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfo.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfo.java new file mode 100644 index 00000000..c314e32a --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfo.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.utils.uri.NameValuePair; +import org.apache.flink.connector.http.utils.uri.URLEncodedUtils; + +import lombok.Getter; +import lombok.ToString; + +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Holds the lookup query for an HTTP request. The {@code lookupQuery} either contain the query + * parameters for a GET operation or the payload of a body-based request. The {@code + * bodyBasedUrlQueryParams} contains the optional query parameters of a body-based request in + * addition to its payload supplied with {@code lookupQuery}. + */ +@ToString +public class LookupQueryInfo implements Serializable { + @Getter private final String lookupQuery; + + private final Map bodyBasedUrlQueryParams; + + private final Map pathBasedUrlParams; + + public LookupQueryInfo(String lookupQuery) { + this(lookupQuery, null, null); + } + + public LookupQueryInfo( + String lookupQuery, + Map bodyBasedUrlQueryParams, + Map pathBasedUrlParams) { + this.lookupQuery = lookupQuery == null ? "" : lookupQuery; + this.bodyBasedUrlQueryParams = + bodyBasedUrlQueryParams == null ? Collections.emptyMap() : bodyBasedUrlQueryParams; + this.pathBasedUrlParams = + pathBasedUrlParams == null ? Collections.emptyMap() : pathBasedUrlParams; + } + + public String getBodyBasedUrlQueryParameters() { + return URLEncodedUtils.format( + bodyBasedUrlQueryParams.entrySet().stream() + // sort the map by key to ensure there is a reliable order for unit tests + .sorted(Map.Entry.comparingByKey()) + .map(entry -> new NameValuePair(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()), + StandardCharsets.UTF_8); + } + + public Map getPathBasedUrlParameters() { + return pathBasedUrlParams; + } + + public boolean hasLookupQuery() { + return !lookupQuery.isBlank(); + } + + public boolean hasBodyBasedUrlQueryParameters() { + return !bodyBasedUrlQueryParams.isEmpty(); + } + + public boolean hasPathBasedUrlParameters() { + return !pathBasedUrlParams.isEmpty(); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupRow.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupRow.java similarity index 59% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupRow.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupRow.java index bdd3f041..c84e3eb9 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupRow.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupRow.java @@ -1,27 +1,44 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.io.Serializable; -import java.util.Collection; -import java.util.LinkedList; -import java.util.List; +package org.apache.flink.connector.http.table.lookup; -import lombok.Getter; -import lombok.Setter; -import lombok.ToString; import org.apache.flink.annotation.VisibleForTesting; +import org.apache.flink.connector.http.LookupArg; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.DataType; -import com.getindata.connectors.http.LookupArg; +import lombok.Getter; +import lombok.Setter; +import lombok.ToString; + +import java.io.Serializable; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +/** lookup row. */ @ToString public class LookupRow implements Serializable { private final List> lookupEntries; - @Getter - @Setter - private DataType lookupPhysicalRowDataType; + @Getter @Setter private DataType lookupPhysicalRowDataType; public LookupRow() { this.lookupEntries = new LinkedList<>(); @@ -32,7 +49,7 @@ public LookupRow() { * {@link RowData} is converted to {@link LookupArg}. * * @param lookupDataRow A {@link RowData} to get the values from for {@code - * LookupArg#getArgValue()}. + * LookupArg#getArgValue()}. * @return Collection of {@link LookupArg} objects created from lookupDataRow. */ public Collection convertToLookupArgs(RowData lookupDataRow) { diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupSchemaEntry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupSchemaEntry.java new file mode 100644 index 00000000..37694906 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/LookupSchemaEntry.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.LookupArg; + +import java.io.Serializable; +import java.util.List; + +/** + * Represents Lookup entry with its name and provides conversion method to collection of {@link + * LookupArg} elements. + * + * @param type of lookupKeyRow used for converting to {@link LookupArg}. + */ +public interface LookupSchemaEntry extends Serializable { + + /** @return lookup Field name. */ + String getFieldName(); + + /** + * Creates a collection of {@link LookupArg} elements from provided T lookupKeyRow. + * + * @param lookupKeyRow Element to get the values from for {@code LookupArg#getArgValue()}. + * @return Collection of {@link LookupArg} objects created from lookupKeyRow. + */ + List convertToLookupArg(T lookupKeyRow); +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RequestFactoryBase.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RequestFactoryBase.java similarity index 55% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/RequestFactoryBase.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RequestFactoryBase.java index 7b822280..56ffbd1c 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RequestFactoryBase.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RequestFactoryBase.java @@ -1,43 +1,55 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.Builder; -import java.util.Arrays; -import java.util.Map; -import java.util.stream.Collectors; +package org.apache.flink.connector.http.table.lookup; -import lombok.extern.slf4j.Slf4j; import org.apache.flink.annotation.VisibleForTesting; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; import org.apache.flink.table.data.RowData; import org.apache.flink.util.FlinkRuntimeException; + +import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; +import java.net.http.HttpRequest; +import java.net.http.HttpRequest.Builder; +import java.util.Arrays; +import java.util.Map; +import java.util.stream.Collectors; -/** - * Base class for {@link HttpRequest} factories. - */ +/** Base class for {@link HttpRequest} factories. */ @Slf4j public abstract class RequestFactoryBase implements HttpRequestFactory { public static final String DEFAULT_REQUEST_TIMEOUT_SECONDS = "30"; - /** - * Base url used for {@link HttpRequest} for example "http://localhost:8080" - */ + /** Base url used for {@link HttpRequest} for example "http://localhost:8080". */ protected final String baseUrl; protected final LookupQueryCreator lookupQueryCreator; protected final int httpRequestTimeOutSeconds; - /** - * HTTP headers that should be used for {@link HttpRequest} created by factory. - */ + /** HTTP headers that should be used for {@link HttpRequest} created by factory. */ private final String[] headersAndValues; + private final HttpLookupConfig options; public RequestFactoryBase( @@ -52,25 +64,25 @@ public RequestFactoryBase( // issues a network call to the authentication server. This code is driven for // explain select. Explain should not issue network calls. // We setup the OIDC authentication header at lookup query time. - var headerMap = HttpHeaderUtils - .prepareHeaderMap( - HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX, - options.getProperties(), - headerPreprocessor - ); + var headerMap = + HttpHeaderUtils.prepareHeaderMap( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX, + options.getProperties(), + headerPreprocessor); this.headersAndValues = HttpHeaderUtils.toHeaderAndValueArray(headerMap); - log.debug("RequestFactoryBase headersAndValues: " + - Arrays.stream(headersAndValues) - .map(Object::toString) - .collect(Collectors.joining(","))); - this.httpRequestTimeOutSeconds = Integer.parseInt( - options.getProperties().getProperty( - HttpConnectorConfigConstants.LOOKUP_HTTP_TIMEOUT_SECONDS, - DEFAULT_REQUEST_TIMEOUT_SECONDS - ) - ); + log.debug( + "RequestFactoryBase headersAndValues: " + + Arrays.stream(headersAndValues) + .map(Object::toString) + .collect(Collectors.joining(","))); + this.httpRequestTimeOutSeconds = + Integer.parseInt( + options.getProperties() + .getProperty( + HttpConnectorConfigConstants.LOOKUP_HTTP_TIMEOUT_SECONDS, + DEFAULT_REQUEST_TIMEOUT_SECONDS)); } @Override @@ -90,14 +102,15 @@ public HttpLookupSourceRequestEntry buildLookupRequest(RowData lookupRow) { protected abstract Logger getLogger(); /** - * Method for preparing {@link HttpRequest.Builder} for concrete REST method. + * Method for preparing {@link Builder} for concrete REST method. + * * @param lookupQuery lookup query used for request query parameters or body. - * @return {@link HttpRequest.Builder} for given lookupQuery. + * @return {@link Builder} for given lookupQuery. */ protected abstract Builder setUpRequestMethod(LookupQueryInfo lookupQuery); - protected static StringBuilder resolvePathParameters(LookupQueryInfo lookupQueryInfo, - StringBuilder resolvedUrl) { + protected static StringBuilder resolvePathParameters( + LookupQueryInfo lookupQueryInfo, StringBuilder resolvedUrl) { if (lookupQueryInfo.hasPathBasedUrlParameters()) { for (Map.Entry entry : lookupQueryInfo.getPathBasedUrlParameters().entrySet()) { diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataLookupSchemaEntryBase.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataLookupSchemaEntryBase.java new file mode 100644 index 00000000..03927c4f --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataLookupSchemaEntryBase.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.table.data.RowData; + +/** Base implementation of {@link LookupSchemaEntry} for {@link RowData} type. */ +public abstract class RowDataLookupSchemaEntryBase implements LookupSchemaEntry { + + /** Lookup field name represented by this instance. */ + protected final String fieldName; + + /** {@link RowData.FieldGetter} matching RowData type for field represented by this instance. */ + protected final RowData.FieldGetter fieldGetter; + + /** + * Creates new instance. + * + * @param fieldName field name that this instance represents, matching {@link RowData} column + * name. + * @param fieldGetter {@link RowData.FieldGetter} for data type matching {@link RowData} column + * type that this instance represents. + */ + public RowDataLookupSchemaEntryBase(String fieldName, RowData.FieldGetter fieldGetter) { + this.fieldName = fieldName; + this.fieldGetter = fieldGetter; + } + + public String getFieldName() { + return this.fieldName; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntry.java similarity index 57% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntry.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntry.java index e977ca76..31f31e73 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntry.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntry.java @@ -1,14 +1,31 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; -import java.util.List; +package org.apache.flink.connector.http.table.lookup; -import lombok.extern.slf4j.Slf4j; +import org.apache.flink.connector.http.LookupArg; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.RowData.FieldGetter; import org.apache.flink.table.data.binary.BinaryStringData; -import com.getindata.connectors.http.LookupArg; +import lombok.extern.slf4j.Slf4j; + +import java.util.Collections; +import java.util.List; /** * Implementation of {@link LookupSchemaEntry} for {@link RowData} type that represents single @@ -20,10 +37,10 @@ public class RowDataSingleValueLookupSchemaEntry extends RowDataLookupSchemaEntr /** * Creates new instance. * - * @param fieldName field name that this instance represents, matching {@link RowData} column - * name. + * @param fieldName field name that this instance represents, matching {@link RowData} column + * name. * @param fieldGetter {@link RowData.FieldGetter} for data type matching {@link RowData} column - * type that this instance represents. + * type that this instance represents. */ public RowDataSingleValueLookupSchemaEntry(String fieldName, FieldGetter fieldGetter) { super(fieldName, fieldGetter); @@ -47,8 +64,7 @@ public List convertToLookupArg(RowData lookupKeyRow) { } if (!(value instanceof BinaryStringData)) { - log.debug("Unsupported Key Type {}. Trying simple toString().", - value.getClass()); + log.debug("Unsupported Key Type {}. Trying simple toString().", value.getClass()); } return Collections.singletonList(new LookupArg(getFieldName(), value.toString())); @@ -59,16 +75,19 @@ private Object tryGetValue(RowData lookupKeyRow) { return fieldGetter.getFieldOrNull(lookupKeyRow); } catch (ClassCastException e) { throw new RuntimeException( - "Class cast exception on field getter for field " + getFieldName(), e); + "Class cast exception on field getter for field " + getFieldName(), e); } } @lombok.Generated @Override public String toString() { - return "RowDataSingleValueLookupSchemaEntry{" + - "fieldName='" + fieldName + '\'' + - ", fieldGetter=" + fieldGetter + - '}'; + return "RowDataSingleValueLookupSchemaEntry{" + + "fieldName='" + + fieldName + + '\'' + + ", fieldGetter=" + + fieldGetter + + '}'; } } diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntry.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntry.java similarity index 63% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntry.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntry.java index d9812bbf..7276c93a 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntry.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntry.java @@ -1,13 +1,29 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; +package org.apache.flink.connector.http.table.lookup; +import org.apache.flink.connector.http.LookupArg; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.RowData.FieldGetter; -import com.getindata.connectors.http.LookupArg; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; /** * Implementation of {@link LookupSchemaEntry} for {@link RowData} type that represents multiple @@ -24,10 +40,10 @@ public class RowTypeLookupSchemaEntry extends RowDataLookupSchemaEntryBase convertToLookupArg(RowData lookupKeyRow) { @lombok.Generated @Override public String toString() { - return "RowTypeLookupSchemaEntry{" + - "fieldName='" + fieldName + '\'' + - ", fieldGetter=" + fieldGetter + - ", keyColumns=" + keyColumns + - '}'; + return "RowTypeLookupSchemaEntry{" + + "fieldName='" + + fieldName + + '\'' + + ", fieldGetter=" + + fieldGetter + + ", keyColumns=" + + keyColumns + + '}'; } } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4JHttpLookupPostRequestCallback.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4JHttpLookupPostRequestCallback.java new file mode 100644 index 00000000..3e66d9cd --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4JHttpLookupPostRequestCallback.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.utils.ConfigUtils; + +import lombok.extern.slf4j.Slf4j; + +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.StringJoiner; + +/** + * A {@link HttpPostRequestCallback} that logs pairs of request and response as INFO level + * logs using Slf4j. + * + *

Serving as a default implementation of {@link HttpPostRequestCallback} for the {@link + * HttpLookupTableSource}. + */ +@Slf4j +public class Slf4JHttpLookupPostRequestCallback + implements HttpPostRequestCallback { + + @Override + public void call( + HttpResponse response, + HttpLookupSourceRequestEntry requestEntry, + String endpointUrl, + Map headerMap) { + + HttpRequest httpRequest = requestEntry.getHttpRequest(); + StringJoiner headers = new StringJoiner(";"); + + for (Entry> reqHeaders : httpRequest.headers().map().entrySet()) { + StringJoiner values = new StringJoiner(";"); + for (String value : reqHeaders.getValue()) { + values.add(value); + } + String header = reqHeaders.getKey() + ": [" + values + "]"; + headers.add(header); + } + + if (response == null) { + log.warn("Null Http response for request " + httpRequest.uri().toString()); + + log.info( + "Got response for a request.\n Request:\n URL: {}\n " + + "Method: {}\n Headers: {}\n Params/Body: {}\nResponse: null", + httpRequest.uri().toString(), + httpRequest.method(), + headers, + requestEntry.getLookupQueryInfo()); + } else { + log.info( + "Got response for a request.\n Request:\n URL: {}\n " + + "Method: {}\n Headers: {}\n Params/Body: {}\nResponse: {}\n Body: {}", + httpRequest.uri().toString(), + httpRequest.method(), + headers, + requestEntry.getLookupQueryInfo(), + response, + response.body().replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "")); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java new file mode 100644 index 00000000..39e839ed --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.HttpPostRequestCallbackFactory; + +import java.util.HashSet; +import java.util.Set; + +/** Factory for creating {@link Slf4JHttpLookupPostRequestCallback}. */ +public class Slf4jHttpLookupPostRequestCallbackFactory + implements HttpPostRequestCallbackFactory { + + public static final String IDENTIFIER = "slf4j-lookup-logger"; + + @Override + public HttpPostRequestCallback createHttpPostRequestCallback() { + return new Slf4JHttpLookupPostRequestCallback(); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return new HashSet<>(); + } + + @Override + public Set> optionalOptions() { + return new HashSet<>(); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelper.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/TableSourceHelper.java similarity index 64% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelper.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/TableSourceHelper.java index 3996fed8..1fb63998 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelper.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/TableSourceHelper.java @@ -1,27 +1,47 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; -import java.util.List; +package org.apache.flink.connector.http.table.lookup; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeRoot; import org.apache.flink.table.types.logical.utils.LogicalTypeChecks; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +import java.util.Collections; +import java.util.List; + import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType; +/** table source helper. */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class TableSourceHelper { /** * Returns the first-level field names for the provided {@link DataType}. * - *

Note: This method returns an empty list for every {@link DataType} that is not a - * composite + *

Note: This method returns an empty list for every {@link DataType} that is not a composite * type. + * * @param type logical type * @return List of field names */ @@ -37,6 +57,7 @@ public static List getFieldNames(LogicalType type) { /** * Builds {@link RowData} object based on provided list of values. + * * @param values values to use as {@link RowData} column values. * @return new {@link RowData} instance. */ diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java new file mode 100644 index 00000000..131a6ef0 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.table.data.RowData; + +import java.util.Collection; +import java.util.stream.Collectors; + +/** + * A {@link LookupQueryCreator} that prepares + * q parameter GET query for ElasticSearch Search API using Lucene query + * string syntax (in first versions of the ElasticSearch called Search + * Lite). + */ +public class ElasticSearchLiteQueryCreator implements LookupQueryCreator { + + private static final String ENCODED_SPACE = "%20"; + private static final String ENCODED_QUOTATION_MARK = "%22"; + + private final LookupRow lookupRow; + + public ElasticSearchLiteQueryCreator(LookupRow lookupRow) { + this.lookupRow = lookupRow; + } + + private static String processLookupArg(LookupArg arg) { + return arg.getArgName() + + ":" + + ENCODED_QUOTATION_MARK + + arg.getArgValue() + + ENCODED_QUOTATION_MARK; + } + + @Override + public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { + Collection lookupArgs = lookupRow.convertToLookupArgs(lookupDataRow); + + var luceneQuery = + lookupArgs.stream() + .map(ElasticSearchLiteQueryCreator::processLookupArg) + .collect(Collectors.joining(ENCODED_SPACE + "AND" + ENCODED_SPACE)); + + String lookupQuery = luceneQuery.isEmpty() ? "" : ("q=" + luceneQuery); + + return new LookupQueryInfo(lookupQuery); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java new file mode 100644 index 00000000..6b0e9c2c --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.LookupQueryCreatorFactory; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.table.factories.DynamicTableFactory; + +import java.util.Set; + +/** Factory for creating {@link ElasticSearchLiteQueryCreator}. */ +public class ElasticSearchLiteQueryCreatorFactory implements LookupQueryCreatorFactory { + + public static final String IDENTIFIER = "elasticsearch-lite"; + + @Override + public LookupQueryCreator createLookupQueryCreator( + ReadableConfig readableConfig, + LookupRow lookupRow, + DynamicTableFactory.Context dynamicTableFactoryContext) { + return new ElasticSearchLiteQueryCreator(lookupRow); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return Set.of(); + } + + @Override + public Set> optionalOptions() { + return Set.of(); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreator.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreator.java new file mode 100644 index 00000000..387a0880 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreator.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.utils.uri.NameValuePair; +import org.apache.flink.connector.http.utils.uri.URLEncodedUtils; +import org.apache.flink.table.data.RowData; + +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.stream.Collectors; + +/** + * A {@link LookupQueryCreator} that builds an "ordinary" GET query, i.e. adds + * joinColumn1=value1&joinColumn2=value2&... to the URI of the endpoint. + */ +public class GenericGetQueryCreator implements LookupQueryCreator { + + private final LookupRow lookupRow; + + public GenericGetQueryCreator(LookupRow lookupRow) { + this.lookupRow = lookupRow; + } + + @Override + public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { + + Collection lookupArgs = lookupRow.convertToLookupArgs(lookupDataRow); + + String lookupQuery = + URLEncodedUtils.format( + lookupArgs.stream() + .map(arg -> new NameValuePair(arg.getArgName(), arg.getArgValue())) + .collect(Collectors.toList()), + StandardCharsets.UTF_8); + + return new LookupQueryInfo(lookupQuery); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorFactory.java new file mode 100644 index 00000000..2894ff12 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorFactory.java @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.LookupQueryCreatorFactory; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.table.factories.DynamicTableFactory; + +import java.util.Set; + +/** Factory for creating {@link GenericGetQueryCreator}. */ +public class GenericGetQueryCreatorFactory implements LookupQueryCreatorFactory { + + public static final String IDENTIFIER = "generic-get-query"; + + @Override + public LookupQueryCreator createLookupQueryCreator( + ReadableConfig readableConfig, + LookupRow lookupRow, + DynamicTableFactory.Context dynamicTableFactoryContext) { + return new GenericGetQueryCreator(lookupRow); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return Set.of(); + } + + @Override + public Set> optionalOptions() { + return Set.of(); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java similarity index 53% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java index f67a25a0..a5c1f6f1 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreator.java @@ -1,21 +1,29 @@ /* - * © Copyright IBM Corp. 2025 + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; -import java.util.*; +package org.apache.flink.connector.http.table.lookup.querycreators; -import lombok.extern.slf4j.Slf4j; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.utils.SerializationSchemaUtils; import org.apache.flink.table.api.DataTypes.Field; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; @@ -25,32 +33,39 @@ import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.Preconditions; -import com.getindata.connectors.http.LookupArg; -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.utils.SerializationSchemaUtils; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; + +import lombok.extern.slf4j.Slf4j; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.StringJoiner; /** - * Generic JSON and URL query creator; in addition to be able to map columns to json requests, - * it allows url inserts to be mapped to column names using templating. - *
- * For GETs, column names are mapped to query parameters. e.g. for - * GenericJsonAndUrlQueryCreator.REQUEST_PARAM_FIELDS = "id1;id2" - * and url of http://base. At lookup time with values of id1=1 and id2=2 a call of - * http/base?id1=1&id2=2 will be issued. - *
- * For PUT and POST, parameters are mapped to the json body e.g. for - * REQUEST_PARAM_FIELDS = "id1;id2" and url of http://base. At lookup time with values of id1=1 and - * id2=2 as call of http/base will be issued with a json payload of {"id1":1,"id2":2} + * Generic JSON and URL query creator; in addition to be able to map columns to json requests, it + * allows url inserts to be mapped to column names using templating.
+ * For GETs, column names are mapped to query parameters. e.g. for + * GenericJsonAndUrlQueryCreator.REQUEST_PARAM_FIELDS = "id1;id2" and url of http://base. At + * lookup time with values of id1=1 and id2=2 a call of http/base?id1=1&id2=2 will be issued. *
+ * For PUT and POST, parameters are mapped to the json body e.g. for REQUEST_PARAM_FIELDS = + * "id1;id2" and url of http://base. At lookup time with values of id1=1 and id2=2 as call of + * http/base will be issued with a json payload of {"id1":1,"id2":2}
* For all http methods, url segments can be used to include lookup up values. Using the map from * GenericJsonAndUrlQueryCreator.REQUEST_URL_MAP which has a key of the insert and the - * value of the associated column. - * e.g. for GenericJsonAndUrlQueryCreator.REQUEST_URL_MAP = "key1":"col1" - * and url of http://base/{key1}. At lookup time with values of col1="aaaa" a call of - * http/base/aaaa will be issued. - * + * value of the associated column. e.g. for GenericJsonAndUrlQueryCreator.REQUEST_URL_MAP + * = "key1":"col1" and url of http://base/{key1}. At lookup time with values of col1="aaaa" + * a call of http/base/aaaa will be issued. */ @Slf4j public class GenericJsonAndUrlQueryCreator implements LookupQueryCreator { @@ -61,7 +76,7 @@ public class GenericJsonAndUrlQueryCreator implements LookupQueryCreator { private boolean schemaOpened = false; private LookupRow lookupRow; private final String httpMethod; - private final List requestQueryParamsFields; + private final List requestQueryParamsFields; private final List requestBodyFields; private final Map requestUrlMap; @@ -69,19 +84,19 @@ public class GenericJsonAndUrlQueryCreator implements LookupQueryCreator { * Construct a Generic JSON and URL query creator. * * @param httpMethod the requested http method - * @param serializationSchema serialization schema for RowData + * @param serializationSchema serialization schema for RowData * @param requestQueryParamsFields query param fields * @param requestBodyFields body fields used for PUT and POSTs * @param requestUrlMap url map - * @param lookupRow lookup row itself. + * @param lookupRow lookup row itself. */ - public GenericJsonAndUrlQueryCreator(final String httpMethod, - final SerializationSchema - serializationSchema, - final List requestQueryParamsFields, - final List requestBodyFields, - final Map requestUrlMap, - final LookupRow lookupRow) { + public GenericJsonAndUrlQueryCreator( + final String httpMethod, + final SerializationSchema serializationSchema, + final List requestQueryParamsFields, + final List requestBodyFields, + final Map requestUrlMap, + final LookupRow lookupRow) { this.httpMethod = httpMethod; this.serializationSchema = serializationSchema; this.lookupRow = lookupRow; @@ -89,9 +104,9 @@ public GenericJsonAndUrlQueryCreator(final String httpMethod, this.requestBodyFields = requestBodyFields; this.requestUrlMap = requestUrlMap; } + @VisibleForTesting - void setSerializationSchema(SerializationSchema - serializationSchema) { + void setSerializationSchema(SerializationSchema serializationSchema) { this.serializationSchema = serializationSchema; } @@ -101,12 +116,13 @@ public LookupQueryInfo createLookupQuery(final RowData lookupDataRow) { final String lookupQuery; Map bodyBasedUrlQueryParams = new HashMap<>(); - final Collection lookupArgs = - lookupRow.convertToLookupArgs(lookupDataRow); + final Collection lookupArgs = lookupRow.convertToLookupArgs(lookupDataRow); ObjectNode jsonObject; try { - jsonObject = (ObjectNode) ObjectMapperAdapter.instance().readTree( - serializationSchema.serialize(lookupDataRow)); + jsonObject = + (ObjectNode) + ObjectMapperAdapter.instance() + .readTree(serializationSchema.serialize(lookupDataRow)); } catch (IOException e) { String message = "Unable to parse the lookup arguments to json."; log.error(message, e); @@ -122,33 +138,34 @@ public LookupQueryInfo createLookupQuery(final RowData lookupDataRow) { // and not use/deprecate bodyBasedUrlQueryParams if (httpMethod.equalsIgnoreCase("GET")) { // add the query parameters - lookupQuery = convertToQueryParameters(jsonObjectForQueryParams, - StandardCharsets.UTF_8.toString()); + lookupQuery = + convertToQueryParameters( + jsonObjectForQueryParams, StandardCharsets.UTF_8.toString()); } else { // Body-based queries // serialize to a string for the body. try { - lookupQuery = ObjectMapperAdapter.instance() - .writeValueAsString(jsonObject.retain(requestBodyFields)); + lookupQuery = + ObjectMapperAdapter.instance() + .writeValueAsString(jsonObject.retain(requestBodyFields)); } catch (JsonProcessingException e) { final String message = "Unable to convert Json Object to a string"; - throw new RuntimeException(message,e); + throw new RuntimeException(message, e); } // body parameters // use the request json object to scope the required fields and the lookupArgs as values - bodyBasedUrlQueryParams = createBodyBasedParams(lookupArgs, - jsonObjectForQueryParams); + bodyBasedUrlQueryParams = createBodyBasedParams(lookupArgs, jsonObjectForQueryParams); } // add the path map - final Map pathBasedUrlParams = createPathBasedParams(lookupArgs, - requestUrlMap); + final Map pathBasedUrlParams = + createPathBasedParams(lookupArgs, requestUrlMap); return new LookupQueryInfo(lookupQuery, bodyBasedUrlQueryParams, pathBasedUrlParams); - } /** - * Create a Row from a RowData and DataType + * Create a Row from a RowData and DataType. + * * @param lookupRowData the lookup RowData * @param rowType the datatype * @return row return row @@ -170,52 +187,54 @@ static Row rowDataToRow(final RowData lookupRowData, final DataType rowType) { } /** - * Create map of the json key to the lookup argument - * value. This is used for body based content. + * Create map of the json key to the lookup argument value. This is used for body based content. + * * @param args lookup arguments * @param objectNode object node * @return map of field content to the lookup argument value. */ - private Map createBodyBasedParams(final Collection args, - ObjectNode objectNode ) { + private Map createBodyBasedParams( + final Collection args, ObjectNode objectNode) { Map mapOfJsonKeyToLookupArg = new LinkedHashMap<>(); Iterator> iterator = objectNode.fields(); - iterator.forEachRemaining(field -> { - for (final LookupArg arg : args) { - if (arg.getArgName().equals(field.getKey())) { - String keyForMap = field.getKey(); - mapOfJsonKeyToLookupArg.put( - keyForMap, arg.getArgValue()); - } - } - }); + iterator.forEachRemaining( + field -> { + for (final LookupArg arg : args) { + if (arg.getArgName().equals(field.getKey())) { + String keyForMap = field.getKey(); + mapOfJsonKeyToLookupArg.put(keyForMap, arg.getArgValue()); + } + } + }); return mapOfJsonKeyToLookupArg; } + /** - * Create map of the json key to the lookup argument - * value. This is used for path based content. + * Create map of the json key to the lookup argument value. This is used for path based content. + * * @param args lookup arguments * @param urlMap map of insert name to column name * @return map of field content to the lookup argument value. */ - private Map createPathBasedParams(final Collection args, - Map urlMap ) { + private Map createPathBasedParams( + final Collection args, Map urlMap) { Map mapOfJsonKeyToLookupArg = new LinkedHashMap<>(); if (urlMap != null) { - for (String key: urlMap.keySet()) { + for (String key : urlMap.keySet()) { for (final LookupArg arg : args) { if (arg.getArgName().equals(key)) { - mapOfJsonKeyToLookupArg.put( - urlMap.get(key), arg.getArgValue()); + mapOfJsonKeyToLookupArg.put(urlMap.get(key), arg.getArgValue()); } } } } return mapOfJsonKeyToLookupArg; } + /** - * Convert json object to query params string + * Convert json object to query params string. + * * @param jsonObject supplies json object * @param enc encoding string - used in unit test to drive unsupported encoding * @return query params string @@ -225,22 +244,24 @@ static String convertToQueryParameters(final ObjectNode jsonObject, String enc) Preconditions.checkNotNull(jsonObject); final StringJoiner result = new StringJoiner("&"); - jsonObject.fields().forEachRemaining(field -> { - final String fieldName = field.getKey(); - final String fieldValue = field.getValue().asText(); + jsonObject + .fields() + .forEachRemaining( + field -> { + final String fieldName = field.getKey(); + final String fieldValue = field.getValue().asText(); - try { - result.add(fieldName + "=" - + URLEncoder.encode(fieldValue, enc)); - } catch (UnsupportedEncodingException e) { - final String message = - "Failed to encode the value of the query parameter name " - + fieldName - + ": " - + fieldValue; - throw new RuntimeException(message, e); - } - }); + try { + result.add(fieldName + "=" + URLEncoder.encode(fieldValue, enc)); + } catch (UnsupportedEncodingException e) { + final String message = + "Failed to encode the value of the query parameter name " + + fieldName + + ": " + + fieldValue; + throw new RuntimeException(message, e); + } + }); return result.toString(); } @@ -249,9 +270,8 @@ private void checkOpened() { if (!this.schemaOpened) { try { this.serializationSchema.open( - SerializationSchemaUtils - .createSerializationInitContext( - GenericJsonAndUrlQueryCreator.class)); + SerializationSchemaUtils.createSerializationInitContext( + GenericJsonAndUrlQueryCreator.class)); this.schemaOpened = true; } catch (final Exception e) { final String message = diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java new file mode 100644 index 00000000..a718371f --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.api.common.serialization.SerializationSchema; +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.LookupQueryCreatorFactory; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.utils.SynchronizedSerializationSchema; +import org.apache.flink.table.connector.format.EncodingFormat; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.factories.DynamicTableFactory; +import org.apache.flink.table.factories.FactoryUtil; +import org.apache.flink.table.factories.SerializationFormatFactory; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.apache.flink.configuration.ConfigOptions.key; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.ASYNC_POLLING; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_METHOD; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_REQUEST_FORMAT; + +/** + * Generic JSON and url query creator factory defined configuration to define the columns to be. + * + *

    + *
  1. List of column names to be included in the query params + *
  2. List of column names to be included in the body (for PUT and POST) + *
  3. Map of templated uri segment names to column names + *
+ */ +@SuppressWarnings({"checkstyle:RegexpSingleline", "checkstyle:LineLength"}) +public class GenericJsonAndUrlQueryCreatorFactory implements LookupQueryCreatorFactory { + private static final long serialVersionUID = 1L; + + public static final String ID = "generic-json-url"; + + public static final ConfigOption> REQUEST_QUERY_PARAM_FIELDS = + key("http.request.query-param-fields") + .stringType() + .asList() + .defaultValues() // default to empty list so we do not need to check for null + .withDescription( + "The names of the fields that will be mapped to query parameters." + + " The parameters are separated by semicolons," + + " such as 'param1;param2'."); + public static final ConfigOption> REQUEST_BODY_FIELDS = + key("http.request.body-fields") + .stringType() + .asList() + .defaultValues() // default to empty list so we do not need to check for null + .withDescription( + "The names of the fields that will be mapped to the body." + + " The parameters are separated by semicolons," + + " such as 'param1;param2'."); + public static final ConfigOption> REQUEST_URL_MAP = + ConfigOptions.key("http.request.url-map") + .mapType() + .noDefaultValue() + .withDescription( + "The map of insert names to column names used" + + "as url segments. Parses a string as a map of strings. " + + "
" + + "For example if there are table columns called customerId" + + " and orderId, then specifying value customerId:cid1,orderID:oid" + + " and a url of https://myendpoint/customers/{cid}/orders/{oid}" + + " will mean that the url used for the lookup query will" + + " dynamically pickup the values for customerId, orderId" + + " and use them in the url." + + "
Notes
" + + "The expected format of the map is:" + + "
" + + " key1:value1,key2:value2"); + + @Override + public LookupQueryCreator createLookupQueryCreator( + final ReadableConfig readableConfig, + final LookupRow lookupRow, + final DynamicTableFactory.Context dynamicTableFactoryContext) { + final String httpMethod = readableConfig.get(LOOKUP_METHOD); + final String formatIdentifier = readableConfig.get(LOOKUP_REQUEST_FORMAT); + // get the information from config + final List requestQueryParamsFields = + readableConfig.get(REQUEST_QUERY_PARAM_FIELDS); + final List requestBodyFields = readableConfig.get(REQUEST_BODY_FIELDS); + Map requestUrlMap = readableConfig.get(REQUEST_URL_MAP); + + final SerializationFormatFactory jsonFormatFactory = + FactoryUtil.discoverFactory( + Thread.currentThread().getContextClassLoader(), + SerializationFormatFactory.class, + formatIdentifier); + QueryFormatAwareConfiguration queryFormatAwareConfiguration = + new QueryFormatAwareConfiguration( + LOOKUP_REQUEST_FORMAT.key() + "." + formatIdentifier, + (Configuration) readableConfig); + EncodingFormat> encoder = + jsonFormatFactory.createEncodingFormat( + dynamicTableFactoryContext, queryFormatAwareConfiguration); + + final SerializationSchema jsonSerializationSchema; + if (readableConfig.get(ASYNC_POLLING)) { + jsonSerializationSchema = + new SynchronizedSerializationSchema<>( + encoder.createRuntimeEncoder( + null, lookupRow.getLookupPhysicalRowDataType())); + } else { + jsonSerializationSchema = + encoder.createRuntimeEncoder(null, lookupRow.getLookupPhysicalRowDataType()); + } + // create using config parameter values and specify serialization + // schema from json format. + return new GenericJsonAndUrlQueryCreator( + httpMethod, + jsonSerializationSchema, + requestQueryParamsFields, + requestBodyFields, + requestUrlMap, + lookupRow); + } + + @Override + public String factoryIdentifier() { + return ID; + } + + @Override + public Set> requiredOptions() { + return Set.of(); + } + + @Override + public Set> optionalOptions() { + return Set.of(REQUEST_QUERY_PARAM_FIELDS, REQUEST_BODY_FIELDS, REQUEST_URL_MAP); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreator.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreator.java new file mode 100644 index 00000000..d830f0ac --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreator.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.api.common.serialization.SerializationSchema; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.utils.SerializationSchemaUtils; +import org.apache.flink.table.data.RowData; +import org.apache.flink.util.FlinkRuntimeException; + +import java.nio.charset.StandardCharsets; + +/** A {@link LookupQueryCreator} that builds Json based body for REST requests, i.e. adds */ +public class GenericJsonQueryCreator implements LookupQueryCreator { + + /** The {@link SerializationSchema} to serialize {@link RowData} object. */ + private final SerializationSchema jsonSerialization; + + private boolean schemaOpened = false; + + public GenericJsonQueryCreator(SerializationSchema jsonSerialization) { + + this.jsonSerialization = jsonSerialization; + } + + /** + * Creates a Jason string from given {@link RowData}. + * + * @param lookupDataRow {@link RowData} to serialize into Json string. + * @return Json string created from lookupDataRow argument. + */ + @Override + public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { + checkOpened(); + String lookupQuery = + new String(jsonSerialization.serialize(lookupDataRow), StandardCharsets.UTF_8); + + return new LookupQueryInfo(lookupQuery); + } + + private void checkOpened() { + if (!schemaOpened) { + try { + jsonSerialization.open( + SerializationSchemaUtils.createSerializationInitContext( + GenericJsonQueryCreator.class)); + } catch (Exception e) { + throw new FlinkRuntimeException( + "Failed to initialize serialization schema for GenericJsonQueryCreatorFactory.", + e); + } + schemaOpened = true; + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java new file mode 100644 index 00000000..75cb22e2 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.api.common.serialization.SerializationSchema; +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.LookupQueryCreatorFactory; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.utils.SynchronizedSerializationSchema; +import org.apache.flink.table.connector.format.EncodingFormat; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.factories.DynamicTableFactory; +import org.apache.flink.table.factories.FactoryUtil; +import org.apache.flink.table.factories.SerializationFormatFactory; + +import java.util.Set; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.ASYNC_POLLING; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_REQUEST_FORMAT; + +/** Factory for creating {@link GenericJsonQueryCreatorFactory}. */ +public class GenericJsonQueryCreatorFactory implements LookupQueryCreatorFactory { + + public static final String IDENTIFIER = "generic-json-query"; + + @Override + public LookupQueryCreator createLookupQueryCreator( + ReadableConfig readableConfig, + LookupRow lookupRow, + DynamicTableFactory.Context dynamicTableFactoryContext) { + + String formatIdentifier = readableConfig.get(LOOKUP_REQUEST_FORMAT); + SerializationFormatFactory jsonFormatFactory = + FactoryUtil.discoverFactory( + dynamicTableFactoryContext.getClassLoader(), + SerializationFormatFactory.class, + formatIdentifier); + QueryFormatAwareConfiguration queryFormatAwareConfiguration = + new QueryFormatAwareConfiguration( + LOOKUP_REQUEST_FORMAT.key() + "." + formatIdentifier, + (Configuration) readableConfig); + EncodingFormat> encoder = + jsonFormatFactory.createEncodingFormat( + dynamicTableFactoryContext, queryFormatAwareConfiguration); + + final SerializationSchema serializationSchema; + if (readableConfig.get(ASYNC_POLLING)) { + serializationSchema = + new SynchronizedSerializationSchema<>( + encoder.createRuntimeEncoder( + null, lookupRow.getLookupPhysicalRowDataType())); + } else { + serializationSchema = + encoder.createRuntimeEncoder(null, lookupRow.getLookupPhysicalRowDataType()); + } + + return new GenericJsonQueryCreator(serializationSchema); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return Set.of(); + } + + @Override + public Set> optionalOptions() { + return Set.of(); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ObjectMapperAdapter.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ObjectMapperAdapter.java new file mode 100644 index 00000000..1e0b674e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/ObjectMapperAdapter.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationFeature; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.MapperFeature; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.SerializationFeature; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.json.JsonMapper; + +/** Centralizes the use of {@link ObjectMapper}. */ +public class ObjectMapperAdapter { + private static final ObjectMapper MAPPER = initialize(); + + private static ObjectMapper initialize() { + final ObjectMapper mapper = + JsonMapper.builder().configure(MapperFeature.USE_STD_BEAN_NAMING, false).build(); + mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); + mapper.disable(SerializationFeature.WRITE_DATES_WITH_ZONE_ID); + mapper.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); + return mapper; + } + + public static ObjectMapper instance() { + return MAPPER; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/PrefixedConfigOption.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/PrefixedConfigOption.java new file mode 100644 index 00000000..60e29565 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/PrefixedConfigOption.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.FallbackKey; + +import java.lang.reflect.Constructor; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.util.ArrayList; +import java.util.List; + +/** + * This is a ConfigOption that has an associated config option and prefix. + * + *

Note that this Class used to extend ConfigOption, but at Flink 1.16, there was a new way of + * doing class loaders for custom content, so we could no longer extend ConfigOption. + */ +public class PrefixedConfigOption { + /** configOption to decorate. */ + private ConfigOption configOption; + + public ConfigOption getConfigOption() { + return configOption; + } + + /** + * This constructor creates a new clone of the supplied option 'other' with the prefix prefixing + * the key. We create a new object, because we do not want to mutate a Flink object that we did + * not create. + * + * @param keyPrefix prefix that will be added to decorate the {@link ConfigOption} key. + * @param other original {@link ConfigOption} to clone and decorate. + */ + public PrefixedConfigOption(String keyPrefix, ConfigOption other) { + String prefixedKey = keyPrefix + other.key(); + Class clazz; + boolean isList; + + try { + // get clazz + Field field = other.getClass().getDeclaredField("clazz"); + field.setAccessible(true); + clazz = (Class) field.get(other); + + // get isList + field = other.getClass().getDeclaredField("isList"); + field.setAccessible(true); + isList = (Boolean) field.get(other); + + /* + * Create a new ConfigOption based on other, but with a prefixed key. + * At 1.16 we cannot access the protected fields / constructor in the supplied + * configOption as this object is loaded using a different classloader. + * Without changing Flink to make the constructor, methods and fields public, we need + * to use reflection to access and create the new prefixed ConfigOption. It is not + * great practise to use reflection, but getting round this classloader issue + * necessitates it's use. + */ + Constructor constructor = other.getClass().getDeclaredConstructors()[0]; + constructor.setAccessible(true); + configOption = + (ConfigOption) + constructor.newInstance( + prefixedKey, + clazz, + other.description(), + other.defaultValue(), + isList, + getFallbackKeys(other)); + } catch (InstantiationException + | IllegalAccessException + | InvocationTargetException + | NoSuchFieldException e) { + throw new RuntimeException(e); + } + } + + private static FallbackKey[] getFallbackKeys(ConfigOption other) { + List fallbackKeys = new ArrayList<>(); + for (FallbackKey fallbackKey : other.fallbackKeys()) { + fallbackKeys.add(fallbackKey); + } + return fallbackKeys.toArray(new FallbackKey[0]); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfiguration.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfiguration.java similarity index 53% rename from src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfiguration.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfiguration.java index c91f8905..4efec67c 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfiguration.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfiguration.java @@ -1,11 +1,28 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Optional; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.configuration.ConfigOption; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.factories.SerializationFormatFactory; +import java.util.Optional; + /** * An internal extension of Flink's {@link Configuration} class. This implementation uses {@link * PrefixedConfigOption} internally to decorate Flink's {@link ConfigOption} used for {@link @@ -16,19 +33,19 @@ class QueryFormatAwareConfiguration extends Configuration { /** * Format name for {@link SerializationFormatFactory} identifier used as {@code * HttpLookupConnectorOptions#LOOKUP_REQUEST_FORMAT}. - *

- * This will be used as prefix parameter for {@link PrefixedConfigOption}. + * + *

This will be used as prefix parameter for {@link PrefixedConfigOption}. */ private final String queryFormatName; QueryFormatAwareConfiguration(String queryFormatName, Configuration other) { super(other); this.queryFormatName = - (queryFormatName.endsWith(".")) ? queryFormatName : queryFormatName + "."; + (queryFormatName.endsWith(".")) ? queryFormatName : queryFormatName + "."; } /** - * Returns value for {@link ConfigOption} option which key is prefixed with "queryFormatName" + * Returns value for {@link ConfigOption} option which key is prefixed with "queryFormatName". * * @param option option which key will be prefixed with queryFormatName. * @return value for option after adding queryFormatName prefix @@ -36,7 +53,7 @@ class QueryFormatAwareConfiguration extends Configuration { @Override public Optional getOptional(ConfigOption option) { PrefixedConfigOption prefixedConfigOption = - new PrefixedConfigOption<>(queryFormatName, option); + new PrefixedConfigOption<>(queryFormatName, option); return super.getOptional(prefixedConfigOption.getConfigOption()); } } diff --git a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSink.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSink.java similarity index 57% rename from src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSink.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSink.java index f634e1d6..4d0e53ce 100644 --- a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSink.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSink.java @@ -1,14 +1,34 @@ -package com.getindata.connectors.http.internal.table.sink; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Properties; -import javax.annotation.Nullable; +package org.apache.flink.connector.http.table.sink; -import lombok.EqualsAndHashCode; -import lombok.extern.slf4j.Slf4j; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.ReadableConfig; import org.apache.flink.connector.base.table.sink.AsyncDynamicTableSink; import org.apache.flink.connector.base.table.sink.AsyncDynamicTableSinkBuilder; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.HttpSinkBuilder; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.sink.httpclient.JavaNetSinkHttpClient; +import org.apache.flink.connector.http.table.SerializationSchemaElementConverter; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; import org.apache.flink.table.connector.ChangelogMode; import org.apache.flink.table.connector.format.EncodingFormat; import org.apache.flink.table.connector.sink.DynamicTableSink; @@ -17,16 +37,15 @@ import org.apache.flink.table.types.DataType; import org.apache.flink.util.Preconditions; -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.HttpSink; -import com.getindata.connectors.http.HttpSinkBuilder; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.sink.httpclient.JavaNetSinkHttpClient; -import com.getindata.connectors.http.internal.table.SerializationSchemaElementConverter; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.internal.table.sink.HttpDynamicSinkConnectorOptions.INSERT_METHOD; -import static com.getindata.connectors.http.internal.table.sink.HttpDynamicSinkConnectorOptions.URL; +import lombok.EqualsAndHashCode; +import lombok.extern.slf4j.Slf4j; + +import javax.annotation.Nullable; + +import java.util.Properties; + +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.INSERT_METHOD; +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.URL; /** * A dynamic HTTP Sink based on {@link AsyncDynamicTableSink} that adds Table API support for {@link @@ -37,25 +56,23 @@ * *

    *
  • {@code maxBatchSize}: the maximum size of a batch of entries that may be sent to the HTTP - * endpoint;
  • + * endpoint; *
  • {@code maxInFlightRequests}: the maximum number of in flight requests that may exist, if * any more in flight requests need to be initiated once the maximum has been reached, then it - * will be blocked until some have completed;
  • + * will be blocked until some have completed; *
  • {@code maxBufferedRequests}: the maximum number of elements held in the buffer, requests to - * add elements will be blocked while the number of elements in the buffer is at the - * maximum;
  • + * add elements will be blocked while the number of elements in the buffer is at the maximum; *
  • {@code maxBufferSizeInBytes}: the maximum size of a batch of entries that may be sent to - * the HTTP endpoint measured in bytes;
  • + * the HTTP endpoint measured in bytes; *
  • {@code maxTimeInBufferMS}: the maximum amount of time an entry is allowed to live in the - * buffer, if any element reaches this age, the entire buffer will be flushed - * immediately;
  • - *
  • {@code consumedDataType}: the consumed data type of the table;
  • - *
  • {@code encodingFormat}: the format for encoding records;
  • - *
  • {@code httpPostRequestCallback}: the {@link HttpPostRequestCallback} implementation - * for processing of requests and responses;
  • - *
  • {@code tableOptions}: the {@link ReadableConfig} instance with values defined in Table - * API DDL;
  • - *
  • {@code properties}: properties related to the Http Sink.
  • + * buffer, if any element reaches this age, the entire buffer will be flushed immediately; + *
  • {@code consumedDataType}: the consumed data type of the table; + *
  • {@code encodingFormat}: the format for encoding records; + *
  • {@code httpPostRequestCallback}: the {@link HttpPostRequestCallback} implementation for + * processing of requests and responses; + *
  • {@code tableOptions}: the {@link ReadableConfig} instance with values defined in Table API + * DDL; + *
  • {@code properties}: properties related to the Http Sink. *
* *

The following example shows the minimum Table API example to create a {@link HttpDynamicSink} @@ -87,27 +104,31 @@ public class HttpDynamicSink extends AsyncDynamicTableSink private final Properties properties; protected HttpDynamicSink( - @Nullable Integer maxBatchSize, - @Nullable Integer maxInFlightRequests, - @Nullable Integer maxBufferedRequests, - @Nullable Long maxBufferSizeInBytes, - @Nullable Long maxTimeInBufferMS, - DataType consumedDataType, - EncodingFormat> encodingFormat, - HttpPostRequestCallback httpPostRequestCallback, - ReadableConfig tableOptions, - Properties properties) { - super(maxBatchSize, maxInFlightRequests, maxBufferedRequests, maxBufferSizeInBytes, - maxTimeInBufferMS); + @Nullable Integer maxBatchSize, + @Nullable Integer maxInFlightRequests, + @Nullable Integer maxBufferedRequests, + @Nullable Long maxBufferSizeInBytes, + @Nullable Long maxTimeInBufferMS, + DataType consumedDataType, + EncodingFormat> encodingFormat, + HttpPostRequestCallback httpPostRequestCallback, + ReadableConfig tableOptions, + Properties properties) { + super( + maxBatchSize, + maxInFlightRequests, + maxBufferedRequests, + maxBufferSizeInBytes, + maxTimeInBufferMS); this.consumedDataType = - Preconditions.checkNotNull(consumedDataType, "Consumed data type must not be null"); + Preconditions.checkNotNull(consumedDataType, "Consumed data type must not be null"); this.encodingFormat = - Preconditions.checkNotNull(encodingFormat, "Encoding format must not be null"); + Preconditions.checkNotNull(encodingFormat, "Encoding format must not be null"); this.httpPostRequestCallback = - Preconditions.checkNotNull(httpPostRequestCallback, - "Post request callback must not be null"); + Preconditions.checkNotNull( + httpPostRequestCallback, "Post request callback must not be null"); this.tableOptions = - Preconditions.checkNotNull(tableOptions, "Table options must not be null"); + Preconditions.checkNotNull(tableOptions, "Table options must not be null"); this.properties = properties; } @@ -119,20 +140,22 @@ public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { @Override public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { SerializationSchema serializationSchema = - encodingFormat.createRuntimeEncoder(context, consumedDataType); + encodingFormat.createRuntimeEncoder(context, consumedDataType); var insertMethod = tableOptions.get(INSERT_METHOD); - HttpSinkBuilder builder = HttpSink - .builder() - .setEndpointUrl(tableOptions.get(URL)) - .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) - .setHttpPostRequestCallback(httpPostRequestCallback) - // In future header preprocessor could be set via custom factory - .setHttpHeaderPreprocessor(HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor()) - .setElementConverter( - new SerializationSchemaElementConverter(insertMethod, serializationSchema)) - .setProperties(properties); + HttpSinkBuilder builder = + HttpSink.builder() + .setEndpointUrl(tableOptions.get(URL)) + .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) + .setHttpPostRequestCallback(httpPostRequestCallback) + // In future header preprocessor could be set via custom factory + .setHttpHeaderPreprocessor( + HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor()) + .setElementConverter( + new SerializationSchemaElementConverter( + insertMethod, serializationSchema)) + .setProperties(properties); addAsyncOptionsToSinkBuilder(builder); return SinkV2Provider.of(builder.build()); @@ -141,17 +164,16 @@ public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { @Override public DynamicTableSink copy() { return new HttpDynamicSink( - maxBatchSize, - maxInFlightRequests, - maxBufferedRequests, - maxBufferSizeInBytes, - maxTimeInBufferMS, - consumedDataType, - encodingFormat, - httpPostRequestCallback, - tableOptions, - properties - ); + maxBatchSize, + maxInFlightRequests, + maxBufferedRequests, + maxBufferSizeInBytes, + maxTimeInBufferMS, + consumedDataType, + encodingFormat, + httpPostRequestCallback, + tableOptions, + properties); } @Override @@ -159,11 +181,10 @@ public String asSummaryString() { return "HttpSink"; } - /** - * Builder to construct {@link HttpDynamicSink}. - */ + /** Builder to construct {@link HttpDynamicSink}. */ public static class HttpDynamicTableSinkBuilder - extends AsyncDynamicTableSinkBuilder { + extends AsyncDynamicTableSinkBuilder< + HttpSinkRequestEntry, HttpDynamicTableSinkBuilder> { private final Properties properties = new Properties(); @@ -177,7 +198,7 @@ public static class HttpDynamicTableSinkBuilder /** * @param tableOptions the {@link ReadableConfig} consisting of options listed in table - * creation DDL + * creation DDL * @return {@link HttpDynamicTableSinkBuilder} itself */ public HttpDynamicTableSinkBuilder setTableOptions(ReadableConfig tableOptions) { @@ -190,18 +211,18 @@ public HttpDynamicTableSinkBuilder setTableOptions(ReadableConfig tableOptions) * @return {@link HttpDynamicTableSinkBuilder} itself */ public HttpDynamicTableSinkBuilder setEncodingFormat( - EncodingFormat> encodingFormat) { + EncodingFormat> encodingFormat) { this.encodingFormat = encodingFormat; return this; } /** - * @param httpPostRequestCallback the {@link HttpPostRequestCallback} implementation - * for processing of requests and responses + * @param httpPostRequestCallback the {@link HttpPostRequestCallback} implementation for + * processing of requests and responses * @return {@link HttpDynamicTableSinkBuilder} itself */ public HttpDynamicTableSinkBuilder setHttpPostRequestCallback( - HttpPostRequestCallback httpPostRequestCallback) { + HttpPostRequestCallback httpPostRequestCallback) { this.httpPostRequestCallback = httpPostRequestCallback; return this; } @@ -217,6 +238,7 @@ public HttpDynamicTableSinkBuilder setConsumedDataType(DataType consumedDataType /** * Set property for Http Sink. + * * @param propertyName property name * @param propertyValue property value * @return {@link HttpDynamicTableSinkBuilder} itself @@ -227,7 +249,8 @@ public HttpDynamicTableSinkBuilder setProperty(String propertyName, String prope } /** - * Add properties to Http Sink configuration + * Add properties to Http Sink configuration. + * * @param properties properties to add * @return {@link HttpDynamicTableSinkBuilder} itself */ @@ -239,17 +262,16 @@ public HttpDynamicTableSinkBuilder setProperties(Properties properties) { @Override public HttpDynamicSink build() { return new HttpDynamicSink( - getMaxBatchSize(), - getMaxInFlightRequests(), - getMaxBufferedRequests(), - getMaxBufferSizeInBytes(), - getMaxTimeInBufferMS(), - consumedDataType, - encodingFormat, - httpPostRequestCallback, - tableOptions, - properties - ); + getMaxBatchSize(), + getMaxInFlightRequests(), + getMaxBufferedRequests(), + getMaxBufferSizeInBytes(), + getMaxTimeInBufferMS(), + consumedDataType, + encodingFormat, + httpPostRequestCallback, + tableOptions, + properties); } } } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkConnectorOptions.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkConnectorOptions.java new file mode 100644 index 00000000..368447c2 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkConnectorOptions.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; + +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SINK_REQUEST_CALLBACK_IDENTIFIER; + +/** Table API options for {@link HttpDynamicSink}. */ +public class HttpDynamicSinkConnectorOptions { + + public static final ConfigOption URL = + ConfigOptions.key("url") + .stringType() + .noDefaultValue() + .withDescription("The HTTP endpoint URL."); + + public static final ConfigOption INSERT_METHOD = + ConfigOptions.key("insert-method") + .stringType() + .defaultValue("POST") + .withDescription("Method used for requests built from SQL's INSERT."); + + public static final ConfigOption REQUEST_CALLBACK_IDENTIFIER = + ConfigOptions.key(SINK_REQUEST_CALLBACK_IDENTIFIER) + .stringType() + .defaultValue(Slf4jHttpPostRequestCallbackFactory.IDENTIFIER); +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactory.java new file mode 100644 index 00000000..6d4b2d1e --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactory.java @@ -0,0 +1,116 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.base.table.AsyncDynamicTableSinkFactory; +import org.apache.flink.connector.base.table.sink.options.AsyncSinkConfigurationValidator; +import org.apache.flink.connector.http.HttpPostRequestCallbackFactory; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.utils.ConfigUtils; +import org.apache.flink.table.connector.sink.DynamicTableSink; +import org.apache.flink.table.factories.FactoryUtil; + +import java.util.Properties; +import java.util.Set; + +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.INSERT_METHOD; +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.REQUEST_CALLBACK_IDENTIFIER; +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.URL; + +/** Factory for creating {@link HttpDynamicSink}. */ +public class HttpDynamicTableSinkFactory extends AsyncDynamicTableSinkFactory { + + public static final String IDENTIFIER = "http-sink"; + + @Override + public DynamicTableSink createDynamicTableSink(Context context) { + final AsyncDynamicSinkContext factoryContext = new AsyncDynamicSinkContext(this, context); + + // This is actually same as calling helper.getOptions(); + ReadableConfig tableOptions = factoryContext.getTableOptions(); + + // Validate configuration + FactoryUtil.createTableFactoryHelper(this, context) + .validateExcept( + // properties coming from + // org.apache.flink.table.api.config.ExecutionConfigOptions + "table.", HttpConnectorConfigConstants.FLINK_CONNECTOR_HTTP); + validateHttpSinkOptions(tableOptions); + + Properties asyncSinkProperties = + new AsyncSinkConfigurationValidator(tableOptions).getValidatedConfigurations(); + + // generics type erasure, so we have to do an unchecked cast + final HttpPostRequestCallbackFactory postRequestCallbackFactory = + FactoryUtil.discoverFactory( + context.getClassLoader(), + HttpPostRequestCallbackFactory.class, // generics type erasure + tableOptions.get(REQUEST_CALLBACK_IDENTIFIER)); + + Properties httpConnectorProperties = + ConfigUtils.getHttpConnectorProperties(context.getCatalogTable().getOptions()); + + HttpDynamicSink.HttpDynamicTableSinkBuilder builder = + new HttpDynamicSink.HttpDynamicTableSinkBuilder() + .setTableOptions(tableOptions) + .setEncodingFormat(factoryContext.getEncodingFormat()) + .setHttpPostRequestCallback( + postRequestCallbackFactory.createHttpPostRequestCallback()) + .setConsumedDataType(factoryContext.getPhysicalDataType()) + .setProperties(httpConnectorProperties); + addAsyncOptionsToBuilder(asyncSinkProperties, builder); + + return builder.build(); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return Set.of(URL, FactoryUtil.FORMAT); + } + + @Override + public Set> optionalOptions() { + var options = super.optionalOptions(); + options.add(INSERT_METHOD); + options.add(REQUEST_CALLBACK_IDENTIFIER); + return options; + } + + private void validateHttpSinkOptions(ReadableConfig tableOptions) + throws IllegalArgumentException { + tableOptions + .getOptional(INSERT_METHOD) + .ifPresent( + insertMethod -> { + if (!Set.of("POST", "PUT").contains(insertMethod)) { + throw new IllegalArgumentException( + String.format( + "Invalid option '%s'. It is expected to be either 'POST' or 'PUT'.", + INSERT_METHOD.key())); + } + }); + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallback.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallback.java new file mode 100644 index 00000000..feaf205f --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallback.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.utils.ConfigUtils; + +import lombok.extern.slf4j.Slf4j; + +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * A {@link HttpPostRequestCallback} that logs pairs of request and response as INFO level + * logs using Slf4j. + * + *

Serving as a default implementation of {@link HttpPostRequestCallback} for the {@link + * HttpDynamicSink}. + */ +@Slf4j +public class Slf4jHttpPostRequestCallback implements HttpPostRequestCallback { + + @Override + public void call( + HttpResponse response, + HttpRequest requestEntry, + String endpointUrl, + Map headerMap) { + + String requestBody = + requestEntry.getElements().stream() + .map(element -> new String(element, StandardCharsets.UTF_8)) + .collect(Collectors.joining()); + + if (response == null) { + log.info( + "Got response for a request.\n Request:\n " + + "Method: {}\n Body: {}\n Response: null", + requestEntry.getMethod(), + requestBody); + } else { + log.info( + "Got response for a request.\n Request:\n " + + "Method: {}\n Body: {}\n Response: {}\n Body: {}", + requestEntry.method, + requestBody, + response, + response.body().replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "")); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallbackFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallbackFactory.java new file mode 100644 index 00000000..78d00bff --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/table/sink/Slf4jHttpPostRequestCallbackFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.HttpPostRequestCallbackFactory; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; + +import java.util.HashSet; +import java.util.Set; + +/** Factory for creating {@link Slf4jHttpPostRequestCallback}. */ +public class Slf4jHttpPostRequestCallbackFactory + implements HttpPostRequestCallbackFactory { + + public static final String IDENTIFIER = "slf4j-logger"; + + @Override + public HttpPostRequestCallback createHttpPostRequestCallback() { + return new Slf4jHttpPostRequestCallback(); + } + + @Override + public String factoryIdentifier() { + return IDENTIFIER; + } + + @Override + public Set> requiredOptions() { + return new HashSet<>(); + } + + @Override + public Set> optionalOptions() { + return new HashSet<>(); + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/ConfigUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ConfigUtils.java similarity index 59% rename from src/main/java/com/getindata/connectors/http/internal/utils/ConfigUtils.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ConfigUtils.java index 31b60d1f..1c6dbb51 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/ConfigUtils.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ConfigUtils.java @@ -1,35 +1,54 @@ -package com.getindata.connectors.http.internal.utils; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.connector.http.config.ConfigException; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.util.StringUtils; import lombok.AccessLevel; import lombok.NoArgsConstructor; -import org.apache.flink.util.StringUtils; +import lombok.experimental.UtilityClass; -import com.getindata.connectors.http.internal.config.ConfigException; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; +import java.util.HashMap; +import java.util.Map; +import java.util.Properties; +/** config utils. */ +@UtilityClass @NoArgsConstructor(access = AccessLevel.NONE) public final class ConfigUtils { /** * A pattern matcher linebreak regexp that represents any Unicode linebreak sequence making it - * effectively equivalent to: - *

-     * {@code
+     * effectively equivalent to.
+     *
+     * 
{@code
      * \u000D\u000A|[\u000A\u000B\u000C\u000D\u0085\u2028\u2029]
-     * }
-     * 
+ * }
*/ public static final String UNIVERSAL_NEW_LINE_REGEXP = "\\R"; private static final String PROPERTY_NAME_DELIMITER = "."; /** - * Convert properties that name starts with given {@code keyPrefix} to Map. - * Values for this property will be cast to {@code valueClazz} type. + * Convert properties that name starts with given {@code keyPrefix} to Map. Values for this + * property will be cast to {@code valueClazz} type. * * @param properties properties to extract keys from. * @param keyPrefix prefix used to match property name with. @@ -38,9 +57,7 @@ public final class ConfigUtils { * @return Map of propertyName to propertyValue. */ public static Map propertiesToMap( - Properties properties, - String keyPrefix, - Class valueClazz) { + Properties properties, String keyPrefix, Class valueClazz) { Map map = new HashMap<>(); for (Map.Entry entry : properties.entrySet()) { @@ -51,9 +68,7 @@ public static Map propertiesToMap( } } else { throw new ConfigException( - entry.getKey().toString(), - entry.getValue(), "Key must be a string." - ); + entry.getKey().toString(), entry.getValue(), "Key must be a string."); } } return map; @@ -66,9 +81,9 @@ public static Map propertiesToMap( * * @param propertyKey Property name to extract the last element from. * @return property last element or the property name if {@code propertyKey} parameter had no - * dot delimiter. + * dot delimiter. * @throws ConfigException when invalid property such as null, empty, blank, ended with dot was - * used. + * used. */ public static String extractPropertyLastElement(String propertyKey) { if (StringUtils.isNullOrWhitespaceOnly(propertyKey)) { @@ -82,25 +97,22 @@ public static String extractPropertyLastElement(String propertyKey) { int delimiterLastIndex = propertyKey.lastIndexOf(PROPERTY_NAME_DELIMITER); if (delimiterLastIndex == propertyKey.length() - 1) { throw new ConfigException( - String.format( - "Invalid property - %s. Property name should not end with property delimiter.", - propertyKey) - ); + String.format( + "Invalid property - %s. Property name should not end with property delimiter.", + propertyKey)); } return propertyKey.substring(delimiterLastIndex + 1); } private static void tryAddToConfigMap( - Properties properties, - Class clazz, Map map, - String key) { + Properties properties, Class clazz, Map map, String key) { try { map.put(key, clazz.cast(properties.get(key))); } catch (ClassCastException e) { throw new ConfigException( - String.format("Unable to cast value for property %s to type %s", key, - clazz), e); + String.format("Unable to cast value for property %s to type %s", key, clazz), + e); } } @@ -108,9 +120,12 @@ public static Properties getHttpConnectorProperties(Map tableOpt final Properties httpProperties = new Properties(); tableOptions.entrySet().stream() - .filter(entry -> - entry.getKey().startsWith(HttpConnectorConfigConstants.GID_CONNECTOR_HTTP)) - .forEach(entry -> httpProperties.put(entry.getKey(), entry.getValue())); + .filter( + entry -> + entry.getKey() + .startsWith( + HttpConnectorConfigConstants.FLINK_CONNECTOR_HTTP)) + .forEach(entry -> httpProperties.put(entry.getKey(), entry.getValue())); return httpProperties; } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ExceptionUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ExceptionUtils.java new file mode 100644 index 00000000..f9eb24da --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ExceptionUtils.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.experimental.UtilityClass; + +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.io.UncheckedIOException; + +/** Exception utils. */ +@UtilityClass +@NoArgsConstructor(access = AccessLevel.NONE) +public final class ExceptionUtils { + + public static String stringifyException(Throwable e) { + try (StringWriter stm = new StringWriter(); + PrintWriter wrt = new PrintWriter(stm)) { + + e.printStackTrace(wrt); + wrt.close(); + return stm.toString(); + + } catch (IOException ioException) { + throw new UncheckedIOException(ioException); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/HttpHeaderUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/HttpHeaderUtils.java new file mode 100644 index 00000000..fefd87b9 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/HttpHeaderUtils.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.preprocessor.BasicAuthHeaderValuePreprocessor; +import org.apache.flink.connector.http.preprocessor.ComposeHeaderPreprocessor; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.preprocessor.OIDCAuthHeaderValuePreprocessor; +import org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; + +import java.time.Duration; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Properties; +import java.util.stream.Stream; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; + +/** Http header utils. */ +@UtilityClass +@NoArgsConstructor(access = AccessLevel.NONE) +@Slf4j +public final class HttpHeaderUtils { + + public static final String AUTHORIZATION = "Authorization"; + + public static Map prepareHeaderMap( + String headerKeyPrefix, Properties properties, HeaderPreprocessor headerPreprocessor) { + + // at this stage headerMap keys are full property paths not only header names. + Map propertyHeaderMap = + ConfigUtils.propertiesToMap(properties, headerKeyPrefix, String.class); + + // Map with keys pointing to the headerName. + Map headerMap = new HashMap<>(); + + for (Entry headerAndValue : propertyHeaderMap.entrySet()) { + String propertyName = headerAndValue.getKey(); + String headerValue = headerAndValue.getValue(); + log.info( + "prepareHeaderMap propertyName=" + propertyName + ",headerValue" + headerValue); + String headerName = ConfigUtils.extractPropertyLastElement(propertyName); + String preProcessedHeader = + headerPreprocessor.preprocessValueForHeader(headerName, headerValue); + log.info("prepareHeaderMap preProcessedHeader=" + preProcessedHeader); + headerMap.put(headerName, preProcessedHeader); + } + return headerMap; + } + + /** + * Flat map a given Map of header name and header value map to an array containing both header + * names and values. For example, header map of + * + *
{@code
+     * Map.of(
+     * header1, val1,
+     * header2, val2
+     * )
+     * }
+ * + *

will be converter to an array of: + * + *

{@code
+     * String[] headers = {"header1", "val1", "header2", "val2"};
+     * }
+ * + * @param headerMap mapping of header names to header values + * @return an array containing both header names and values + */ + public static String[] toHeaderAndValueArray(Map headerMap) { + return headerMap.entrySet().stream() + .flatMap(entry -> Stream.of(entry.getKey(), entry.getValue())) + .toArray(String[]::new); + } + + public static HeaderPreprocessor createBasicAuthorizationHeaderPreprocessor() { + return createBasicAuthorizationHeaderPreprocessor(false); + } + + public static HeaderPreprocessor createBasicAuthorizationHeaderPreprocessor( + boolean useRawAuthHeader) { + return new ComposeHeaderPreprocessor( + Collections.singletonMap( + AUTHORIZATION, new BasicAuthHeaderValuePreprocessor(useRawAuthHeader))); + } + + public static HeaderPreprocessor createOIDCAuthorizationHeaderPreprocessor( + String oidcAuthURL, String oidcTokenRequest, Optional oidcExpiryReduction) { + return new ComposeHeaderPreprocessor( + Collections.singletonMap( + AUTHORIZATION, + new OIDCAuthHeaderValuePreprocessor( + oidcAuthURL, oidcTokenRequest, oidcExpiryReduction))); + } + + public static HeaderPreprocessor createHeaderPreprocessor(ReadableConfig readableConfig) { + boolean useRawAuthHeader = + readableConfig.get(HttpLookupConnectorOptions.USE_RAW_AUTH_HEADER); + HeaderPreprocessor headerPreprocessor = + HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(useRawAuthHeader); + log.info("created HeaderPreprocessor for basic useRawAuthHeader"); + return headerPreprocessor; + } + + public static HeaderPreprocessor createOIDCHeaderPreprocessor(ReadableConfig readableConfig) { + HeaderPreprocessor headerPreprocessor = null; + Optional oidcAuthURL = + readableConfig.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL); + + if (oidcAuthURL.isPresent()) { + Optional oidcTokenRequest = + readableConfig.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST); + + Optional oidcExpiryReduction = + readableConfig.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION); + headerPreprocessor = + HttpHeaderUtils.createOIDCAuthorizationHeaderPreprocessor( + oidcAuthURL.get(), oidcTokenRequest.get(), oidcExpiryReduction); + log.info("created OIDC HeaderPreprocessor"); + } + return headerPreprocessor; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactory.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactory.java similarity index 53% rename from src/main/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactory.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactory.java index 34ddf79b..fb6b74d4 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactory.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactory.java @@ -1,4 +1,38 @@ -package com.getindata.connectors.http.internal.utils; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.configuration.ReadableConfig; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.security.SecurityContext; +import org.apache.flink.connector.http.security.SelfSignedTrustManager; +import org.apache.flink.connector.http.table.lookup.HttpLookupConfig; +import org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions; +import org.apache.flink.util.StringUtils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; import java.net.InetSocketAddress; import java.net.ProxySelector; @@ -10,22 +44,9 @@ import java.util.Optional; import java.util.Properties; import java.util.concurrent.Executor; -import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.X509TrustManager; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.util.StringUtils; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.security.SecurityContext; -import com.getindata.connectors.http.internal.security.SelfSignedTrustManager; -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConfig; -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions; +/** Java Net Http Client Factory. */ +@UtilityClass @Slf4j @NoArgsConstructor(access = AccessLevel.NONE) public class JavaNetHttpClientFactory { @@ -41,28 +62,34 @@ public static HttpClient createClient(HttpLookupConfig options) { SSLContext sslContext = getSslContext(options.getProperties()); - var clientBuilder = HttpClient.newBuilder() - .followRedirects(Redirect.NORMAL) - .sslContext(sslContext); + var clientBuilder = + HttpClient.newBuilder().followRedirects(Redirect.NORMAL).sslContext(sslContext); ReadableConfig readableConfig = options.getReadableConfig(); readableConfig - .getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_CONNECTION_TIMEOUT) - .ifPresent(clientBuilder::connectTimeout); - - Optional proxyHost = readableConfig.getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_HOST); - Optional proxyPort = readableConfig.getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PORT); - - if(proxyHost.isPresent() && proxyPort.isPresent()){ - - Optional proxyUsername = readableConfig - .getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_USERNAME); - Optional proxyPassword = readableConfig - .getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PASSWORD); - - ProxyConfig proxyConfig = new ProxyConfig(proxyHost.get(), proxyPort.get(), proxyUsername, proxyPassword); - clientBuilder.proxy(ProxySelector.of(new InetSocketAddress(proxyConfig.getHost(), proxyConfig.getPort()))); + .getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_CONNECTION_TIMEOUT) + .ifPresent(clientBuilder::connectTimeout); + + Optional proxyHost = + readableConfig.getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_HOST); + Optional proxyPort = + readableConfig.getOptional(HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PORT); + + if (proxyHost.isPresent() && proxyPort.isPresent()) { + + Optional proxyUsername = + readableConfig.getOptional( + HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_USERNAME); + Optional proxyPassword = + readableConfig.getOptional( + HttpLookupConnectorOptions.SOURCE_LOOKUP_PROXY_PASSWORD); + + ProxyConfig proxyConfig = + new ProxyConfig(proxyHost.get(), proxyPort.get(), proxyUsername, proxyPassword); + clientBuilder.proxy( + ProxySelector.of( + new InetSocketAddress(proxyConfig.getHost(), proxyConfig.getPort()))); proxyConfig.getAuthenticator().ifPresent(clientBuilder::authenticator); } @@ -74,7 +101,7 @@ public static HttpClient createClient(HttpLookupConfig options) { * calls. * * @param properties properties used to build {@link SSLContext} - * @param executor {@link Executor} for async calls. + * @param executor {@link Executor} for async calls. * @return new {@link HttpClient} instance. */ public static HttpClient createClient(Properties properties, Executor executor) { @@ -82,20 +109,21 @@ public static HttpClient createClient(Properties properties, Executor executor) SSLContext sslContext = getSslContext(properties); return HttpClient.newBuilder() - .followRedirects(Redirect.NORMAL) - .sslContext(sslContext) - .executor(executor) - .build(); + .followRedirects(Redirect.NORMAL) + .sslContext(sslContext) + .executor(executor) + .build(); } /** * Creates an {@link SSLContext} based on provided properties. + * *
    - *
  • {@link HttpConnectorConfigConstants#ALLOW_SELF_SIGNED}
  • - *
  • {@link HttpConnectorConfigConstants#SERVER_TRUSTED_CERT}
  • - *
  • {@link HttpConnectorConfigConstants#PROP_DELIM}
  • - *
  • {@link HttpConnectorConfigConstants#CLIENT_CERT}
  • - *
  • {@link HttpConnectorConfigConstants#CLIENT_PRIVATE_KEY}
  • + *
  • {@link HttpConnectorConfigConstants#ALLOW_SELF_SIGNED} + *
  • {@link HttpConnectorConfigConstants#SERVER_TRUSTED_CERT} + *
  • {@link HttpConnectorConfigConstants#PROP_DELIM} + *
  • {@link HttpConnectorConfigConstants#CLIENT_CERT} + *
  • {@link HttpConnectorConfigConstants#CLIENT_PRIVATE_KEY} *
* * @param properties properties used to build {@link SSLContext} @@ -105,25 +133,27 @@ private static SSLContext getSslContext(Properties properties) { String keyStorePath = properties.getProperty(HttpConnectorConfigConstants.KEY_STORE_PATH, ""); - boolean selfSignedCert = Boolean.parseBoolean( - properties.getProperty(HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "false")); + boolean selfSignedCert = + Boolean.parseBoolean( + properties.getProperty( + HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "false")); - String[] serverTrustedCerts = properties - .getProperty(HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, "") - .split(HttpConnectorConfigConstants.PROP_DELIM); + String[] serverTrustedCerts = + properties + .getProperty(HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, "") + .split(HttpConnectorConfigConstants.PROP_DELIM); - String clientCert = properties - .getProperty(HttpConnectorConfigConstants.CLIENT_CERT, ""); + String clientCert = properties.getProperty(HttpConnectorConfigConstants.CLIENT_CERT, ""); - String clientPrivateKey = properties - .getProperty(HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, ""); + String clientPrivateKey = + properties.getProperty(HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, ""); if (StringUtils.isNullOrWhitespaceOnly(keyStorePath) && !selfSignedCert // checking the property in this way so that serverTrustedCerts is not left and null // or empty, which causes the http client to error. && (properties.getProperty(HttpConnectorConfigConstants.SERVER_TRUSTED_CERT) - == null) + == null) && StringUtils.isNullOrWhitespaceOnly(clientCert) && StringUtils.isNullOrWhitespaceOnly(clientPrivateKey)) { try { @@ -141,7 +171,7 @@ private static SSLContext getSslContext(Properties properties) { } if (!StringUtils.isNullOrWhitespaceOnly(clientCert) - && !StringUtils.isNullOrWhitespaceOnly(clientPrivateKey)) { + && !StringUtils.isNullOrWhitespaceOnly(clientPrivateKey)) { securityContext.addMTlsCerts(clientCert, clientPrivateKey); } @@ -151,8 +181,7 @@ private static SSLContext getSslContext(Properties properties) { } private static TrustManager[] getTrustedManagers( - SecurityContext securityContext, - boolean selfSignedCert) { + SecurityContext securityContext, boolean selfSignedCert) { TrustManager[] trustManagers = securityContext.getTrustManagers(); @@ -164,9 +193,12 @@ private static TrustManager[] getTrustedManagers( } private static List wrapWithSelfSignedManagers(TrustManager[] trustManagers) { - log.warn("Creating Trust Managers for self-signed certificates - not Recommended. " - + "Use [" + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT + "] " - + "connector property to add certificated as trusted."); + log.warn( + "Creating Trust Managers for self-signed certificates - not Recommended. " + + "Use [" + + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT + + "] " + + "connector property to add certificated as trusted."); List selfSignedManagers = new ArrayList<>(trustManagers.length); for (TrustManager trustManager : trustManagers) { @@ -185,19 +217,19 @@ private static List wrapWithSelfSignedManagers(TrustManager[] trus private static SecurityContext createSecurityContext(Properties properties) { String keyStorePath = - properties.getProperty(HttpConnectorConfigConstants.KEY_STORE_PATH, ""); + properties.getProperty(HttpConnectorConfigConstants.KEY_STORE_PATH, ""); if (StringUtils.isNullOrWhitespaceOnly(keyStorePath)) { return SecurityContext.create(); } else { char[] storePassword = - properties.getProperty(HttpConnectorConfigConstants.KEY_STORE_PASSWORD, "") - .toCharArray(); + properties + .getProperty(HttpConnectorConfigConstants.KEY_STORE_PASSWORD, "") + .toCharArray(); if (storePassword.length == 0) { throw new RuntimeException("Missing password for provided KeyStore"); } return SecurityContext.createFromKeyStore(keyStorePath, storePassword); } } - } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ProxyConfig.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ProxyConfig.java new file mode 100644 index 00000000..d3422c21 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ProxyConfig.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import lombok.Getter; + +import java.net.Authenticator; +import java.net.PasswordAuthentication; +import java.util.Optional; + +/** proxy config. */ +@Getter +public class ProxyConfig { + + private final String host; + + private final int port; + + private final Optional authenticator; + + public ProxyConfig( + String host, int port, Optional proxyUsername, Optional proxyPassword) { + this.host = host; + this.port = port; + + if (proxyUsername.isPresent() && proxyPassword.isPresent()) { + this.authenticator = + Optional.of( + new Authenticator() { + @Override + protected PasswordAuthentication getPasswordAuthentication() { + if (getRequestorType().equals(RequestorType.PROXY) + && getRequestingHost().equalsIgnoreCase(host)) { + return new PasswordAuthentication( + proxyUsername.get(), + proxyPassword.get().toCharArray()); + } else { + return null; + } + } + }); + } else { + this.authenticator = Optional.empty(); + } + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SerializationSchemaUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SerializationSchemaUtils.java new file mode 100644 index 00000000..a1eee1fc --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SerializationSchemaUtils.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.metrics.MetricGroup; +import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; +import org.apache.flink.util.SimpleUserCodeClassLoader; +import org.apache.flink.util.UserCodeClassLoader; + +/** Serialization Schema Utils. */ +public final class SerializationSchemaUtils { + + private SerializationSchemaUtils() {} + + public static + org.apache.flink.api.common.serialization.SerializationSchema.InitializationContext + createSerializationInitContext(Class classForClassLoader) { + + return new org.apache.flink.api.common.serialization.SerializationSchema + .InitializationContext() { + + @Override + public MetricGroup getMetricGroup() { + return new UnregisteredMetricsGroup(); + } + + @Override + public UserCodeClassLoader getUserCodeClassLoader() { + return SimpleUserCodeClassLoader.create(classForClassLoader.getClassLoader()); + } + }; + } + + public static + org.apache.flink.api.common.serialization.DeserializationSchema.InitializationContext + createDeserializationInitContext(Class classForClassLoader) { + + return new org.apache.flink.api.common.serialization.DeserializationSchema + .InitializationContext() { + + @Override + public MetricGroup getMetricGroup() { + return new UnregisteredMetricsGroup(); + } + + @Override + public UserCodeClassLoader getUserCodeClassLoader() { + return SimpleUserCodeClassLoader.create(classForClassLoader.getClassLoader()); + } + }; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/SynchronizedSerializationSchema.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SynchronizedSerializationSchema.java similarity index 51% rename from src/main/java/com/getindata/connectors/http/internal/utils/SynchronizedSerializationSchema.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SynchronizedSerializationSchema.java index 0e47b4f7..d05d9671 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/SynchronizedSerializationSchema.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/SynchronizedSerializationSchema.java @@ -1,4 +1,21 @@ -package com.getindata.connectors.http.internal.utils; +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; import org.apache.flink.api.common.serialization.SerializationSchema; diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ThreadUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ThreadUtils.java new file mode 100644 index 00000000..b138fc12 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/ThreadUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; + +import java.lang.Thread.UncaughtExceptionHandler; + +import static org.apache.flink.connector.http.utils.ExceptionUtils.stringifyException; + +/** thread utils . */ +@UtilityClass +@Slf4j +@NoArgsConstructor(access = AccessLevel.NONE) +public final class ThreadUtils { + + public static final UncaughtExceptionHandler LOGGING_EXCEPTION_HANDLER = + (t, e) -> log.warn("Thread:" + t + " exited with Exception:" + stringifyException(e)); +} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBuffer.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/CharArrayBuffer.java similarity index 66% rename from src/main/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBuffer.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/CharArrayBuffer.java index 0aba0ed6..45fbe0f4 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBuffer.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/CharArrayBuffer.java @@ -1,45 +1,34 @@ /* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* * ============================= NOTE ================================= * This code has been copied from * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== */ -package com.getindata.connectors.http.internal.utils.uri; +package org.apache.flink.connector.http.utils.uri; + +import org.apache.flink.util.Preconditions; import java.io.Serializable; import java.nio.CharBuffer; -import org.apache.flink.util.Preconditions; - -/** - * A resizable char array. - */ +/** A resizable char array. */ final class CharArrayBuffer implements CharSequence, Serializable { private static final long serialVersionUID = -6208952725094867135L; @@ -83,7 +72,7 @@ void append(final String str) { * @param i the index of the desired char value. * @return the char value at the specified index. * @throws IndexOutOfBoundsException if {@code index} is negative or greater than or equal to - * {@link #length()}. + * {@link #length()}. */ @Override public char charAt(final int i) { @@ -110,7 +99,7 @@ public CharSequence subSequence(final int beginIndex, final int endIndex) { } if (beginIndex > endIndex) { throw new IndexOutOfBoundsException( - "beginIndex: " + beginIndex + " > endIndex: " + endIndex); + "beginIndex: " + beginIndex + " > endIndex: " + endIndex); } return CharBuffer.wrap(this.buffer, beginIndex, endIndex); } @@ -125,5 +114,4 @@ private void expand(final int newLen) { public String toString() { return new String(this.buffer, 0, this.len); } - } diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/NameValuePair.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/NameValuePair.java new file mode 100644 index 00000000..4a64d6ef --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/NameValuePair.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.apache.flink.util.Preconditions; + +import lombok.Data; + +/** name value pair. */ +@Data +public class NameValuePair { + + private final String name; + + private final String value; + + /** + * Default Constructor taking a name and a value. The value may be null. + * + * @param name The name. + * @param value The value. + */ + public NameValuePair(final String name, final String value) { + super(); + this.name = Preconditions.checkNotNull(name, "Name may not be null"); + this.value = value; + } +} diff --git a/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/ParserCursor.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/ParserCursor.java new file mode 100644 index 00000000..990a3e31 --- /dev/null +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/ParserCursor.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import lombok.Getter; + +/** + * This class represents a context of a parsing operation. + * + *
    + *
  • the current position the parsing operation is expected to start at + *
  • the bounds limiting the scope of the parsing operation + *
+ */ +@Getter +class ParserCursor { + + private final int lowerBound; + + private final int upperBound; + + private int pos; + + ParserCursor(final int lowerBound, final int upperBound) { + super(); + if (lowerBound < 0) { + throw new IndexOutOfBoundsException("Lower bound cannot be negative"); + } + if (lowerBound > upperBound) { + throw new IndexOutOfBoundsException("Lower bound cannot be greater then upper bound"); + } + this.lowerBound = lowerBound; + this.upperBound = upperBound; + this.pos = lowerBound; + } + + void updatePos(final int pos) { + if (pos < this.lowerBound) { + throw new IndexOutOfBoundsException( + "pos: " + pos + " < lowerBound: " + this.lowerBound); + } + if (pos > this.upperBound) { + throw new IndexOutOfBoundsException( + "pos: " + pos + " > upperBound: " + this.upperBound); + } + this.pos = pos; + } + + boolean atEnd() { + return this.pos >= this.upperBound; + } + + @Override + public String toString() { + return "[" + this.lowerBound + '>' + this.pos + '>' + this.upperBound + ']'; + } +} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/TokenParser.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/TokenParser.java similarity index 58% rename from src/main/java/com/getindata/connectors/http/internal/utils/uri/TokenParser.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/TokenParser.java index 0463da50..4d24c4ae 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/TokenParser.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/TokenParser.java @@ -1,62 +1,48 @@ /* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* * ============================= NOTE ================================= * This code has been copied from * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== */ -package com.getindata.connectors.http.internal.utils.uri; +package org.apache.flink.connector.http.utils.uri; import java.util.BitSet; /** * Low level parser for header field elements. The parsing routines of this class are designed to * produce near zero intermediate garbage and make no intermediate copies of input data. - *

- * This class is immutable and thread safe. + * + *

This class is immutable and thread safe. */ class TokenParser { - /** - * US-ASCII CR, carriage return (13) - */ + /** US-ASCII CR, carriage return (13). */ static final char CR = '\r'; - /** - * US-ASCII LF, line feed (10) - */ + + /** US-ASCII LF, line feed (10.). */ static final char LF = '\n'; - /** - * US-ASCII SP, space (32) - */ + + /** US-ASCII SP, space (32). */ static final char SP = ' '; - /** - * US-ASCII HT, horizontal-tab (9) - */ + + /** US-ASCII HT, horizontal-tab (9). */ static final char HT = '\t'; static final TokenParser INSTANCE = new TokenParser(); @@ -69,15 +55,13 @@ static boolean isWhitespace(final char ch) { * Extracts from the sequence of chars a token terminated with any of the given delimiters * discarding semantically insignificant whitespace characters. * - * @param buf buffer with the sequence of chars to be parsed - * @param cursor defines the bounds and current position of the buffer + * @param buf buffer with the sequence of chars to be parsed + * @param cursor defines the bounds and current position of the buffer * @param delimiters set of delimiting characters. Can be {@code null} if the token is not - * delimited by any character. + * delimited by any character. */ String parseToken( - final CharArrayBuffer buf, - final ParserCursor cursor, - final BitSet delimiters) { + final CharArrayBuffer buf, final ParserCursor cursor, final BitSet delimiters) { final StringBuilder dst = new StringBuilder(); boolean whitespace = false; @@ -103,7 +87,7 @@ String parseToken( * Skips semantically insignificant whitespace characters and moves the cursor to the closest * non-whitespace character. * - * @param buf buffer with the sequence of chars to be parsed + * @param buf buffer with the sequence of chars to be parsed * @param cursor defines the bounds and current position of the buffer */ void skipWhiteSpace(final CharArrayBuffer buf, final ParserCursor cursor) { @@ -124,15 +108,17 @@ void skipWhiteSpace(final CharArrayBuffer buf, final ParserCursor cursor) { * Transfers content into the destination buffer until a whitespace character or any of the * given delimiters is encountered. * - * @param buf buffer with the sequence of chars to be parsed - * @param cursor defines the bounds and current position of the buffer + * @param buf buffer with the sequence of chars to be parsed + * @param cursor defines the bounds and current position of the buffer * @param delimiters set of delimiting characters. Can be {@code null} if the value is delimited - * by a whitespace only. - * @param dst destination buffer + * by a whitespace only. + * @param dst destination buffer */ - void copyContent(final CharArrayBuffer buf, final ParserCursor cursor, - final BitSet delimiters, - final StringBuilder dst) { + void copyContent( + final CharArrayBuffer buf, + final ParserCursor cursor, + final BitSet delimiters, + final StringBuilder dst) { int pos = cursor.getPos(); final int indexFrom = cursor.getPos(); final int indexTo = cursor.getUpperBound(); diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/URIBuilder.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URIBuilder.java similarity index 68% rename from src/main/java/com/getindata/connectors/http/internal/utils/uri/URIBuilder.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URIBuilder.java index 287e3322..a7c316ee 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/URIBuilder.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URIBuilder.java @@ -1,36 +1,29 @@ /* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* * ============================= NOTE ================================= * This code has been copied from * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== */ -package com.getindata.connectors.http.internal.utils.uri; +package org.apache.flink.connector.http.utils.uri; + +import org.apache.flink.util.StringUtils; import java.net.URI; import java.net.URISyntaxException; @@ -39,11 +32,9 @@ import java.util.ArrayList; import java.util.List; -import org.apache.flink.util.StringUtils; - /** - * Builder for {@link URI} instances. - * This class is based on {@code org.apache.httpcomponents.httpclient#URIBuilder} version 4.5.13. + * Builder for {@link URI} instances. This class is based on {@code + * org.apache.httpcomponents.httpclient#URIBuilder} version 4.5.13. */ public class URIBuilder { @@ -92,10 +83,10 @@ public URIBuilder(final URI uri, final Charset charset) { /** * Adds parameter to URI query. The parameter name and value are expected to be unescaped and * may contain non ASCII characters. - *

- * Please note query parameters and custom query component are mutually exclusive. This method - * will remove custom query if present. - *

+ * + *

Please note query parameters and custom query component are mutually exclusive. This + * method will remove custom query if present. + * * @param param parameter to add * @param value value to add * @return the URI builder @@ -112,6 +103,7 @@ public URIBuilder addParameter(final String param, final String value) { /** * Builds a {@link URI} instance. + * * @return URI * @throws URISyntaxException URI syntax Exception */ @@ -166,18 +158,15 @@ private void digestURI(final URI uri) { this.encodedAuthority = uri.getRawAuthority(); this.encodedPath = uri.getRawPath(); this.queryParams = - parseQuery( - uri.getRawQuery(), - this.charset != null ? this.charset : StandardCharsets.UTF_8 - ); + parseQuery( + uri.getRawQuery(), + this.charset != null ? this.charset : StandardCharsets.UTF_8); this.encodedFragment = uri.getRawFragment(); this.encodedQuery = uri.getRawQuery(); } private String encodeUrlForm(final List params) { return URLEncodedUtils.format( - params, this.charset != null ? this.charset : StandardCharsets.UTF_8 - ); + params, this.charset != null ? this.charset : StandardCharsets.UTF_8); } } - diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtils.java b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtils.java similarity index 77% rename from src/main/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtils.java rename to flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtils.java index 4b4d8cf5..7a358ef8 100644 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtils.java +++ b/flink-connector-http/src/main/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtils.java @@ -1,36 +1,29 @@ /* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . + * http://www.apache.org/licenses/LICENSE-2.0 * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* * ============================= NOTE ================================= * This code has been copied from * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== */ -package com.getindata.connectors.http.internal.utils.uri; +package org.apache.flink.connector.http.utils.uri; + +import org.apache.flink.util.Preconditions; import java.nio.ByteBuffer; import java.nio.CharBuffer; @@ -41,11 +34,7 @@ import java.util.Collections; import java.util.List; -import org.apache.flink.util.Preconditions; - -/** - * A collection of utilities for encoding URLs. - */ +/** A collection of utilities for encoding URLs. */ public class URLEncodedUtils { private static final char QP_SEP_A = '&'; @@ -59,15 +48,15 @@ public class URLEncodedUtils { private static final BitSet PATH_SEPARATORS = new BitSet(256); /** * Unreserved characters, i.e. alphanumeric, plus: {@code _ - ! . ~ ' ( ) *} - *

- * This list is the same as the {@code unreserved} list in - * RFC 2396 + * + *

This list is the same as the {@code unreserved} list in RFC 2396 */ private static final BitSet UNRESERVED = new BitSet(256); /** * Punctuation characters: , ; : $ & + = - *

- * These are the additional characters allowed by userinfo. + * + *

These are the additional characters allowed by userinfo. */ private static final BitSet PUNCT = new BitSet(256); /** @@ -87,11 +76,10 @@ public class URLEncodedUtils { private static final BitSet URIC = new BitSet(256); /** * Reserved characters, i.e. {@code ;/?:@&=+$,[]} - *

- * This list is the same as the {@code reserved} list in - * RFC 2396 - * as augmented by - * RFC 2732 + * + *

This list is the same as the {@code reserved} list in RFC 2396 as augmented by RFC 2732 */ private static final BitSet RESERVED = new BitSet(256); /** @@ -179,7 +167,7 @@ public class URLEncodedUtils { * {@code ';'} are accepted as parameter separators. * * @param queryComponent URI query component. - * @param charset charset to use when decoding the parameters. + * @param charset charset to use when decoding the parameters. * @return list of query parameters. */ static List parse(final String queryComponent, final Charset charset) { @@ -194,13 +182,13 @@ static List parse(final String queryComponent, final Charset char /** * Returns a list of {@link NameValuePair}s parameters. * - * @param buf text to parse. - * @param charset Encoding to use when decoding the parameters. + * @param buf text to parse. + * @param charset Encoding to use when decoding the parameters. * @param separators element separators. * @return a list of {@link NameValuePair} as built from the URI's query portion. */ static List parse( - final CharArrayBuffer buf, final Charset charset, final char... separators) { + final CharArrayBuffer buf, final Charset charset, final char... separators) { Preconditions.checkNotNull(buf, "Char array buffer cannot be null."); final TokenParser tokenParser = TokenParser.INSTANCE; final BitSet delimSet = new BitSet(); @@ -225,9 +213,9 @@ static List parse( } } if (!name.isEmpty()) { - list.add(new NameValuePair( - decodeFormFields(name, charset), - decodeFormFields(value, charset))); + list.add( + new NameValuePair( + decodeFormFields(name, charset), decodeFormFields(value, charset))); } } return list; @@ -269,19 +257,19 @@ static List splitPathSegments(final CharSequence s) { * Returns a list of URI path segments. * * @param sequence URI path component. - * @param charset parameter charset. + * @param charset parameter charset. * @return list of segments. */ - static List parsePathSegments( - final CharSequence sequence, - final Charset charset) { + static List parsePathSegments(final CharSequence sequence, final Charset charset) { Preconditions.checkNotNull(sequence, "Char sequence cannot be null."); final List list = splitPathSegments(sequence); for (int i = 0; i < list.size(); i++) { list.set( - i, - urlDecode(list.get(i), charset != null ? charset : StandardCharsets.UTF_8, false) - ); + i, + urlDecode( + list.get(i), + charset != null ? charset : StandardCharsets.UTF_8, + false)); } return list; } @@ -290,7 +278,7 @@ static List parsePathSegments( * Returns a string consisting of joint encoded path segments. * * @param segments the segments. - * @param charset parameter charset. + * @param charset parameter charset. * @return URI path component */ static String formatSegments(final Iterable segments, final Charset charset) { @@ -307,13 +295,11 @@ static String formatSegments(final Iterable segments, final Charset char * list of parameters in an HTTP PUT or HTTP POST. * * @param parameters The parameters to include. - * @param charset The encoding to use. + * @param charset The encoding to use. * @return An {@code application/x-www-form-urlencoded} string */ public static String format( - final Iterable parameters, - final Charset charset - ) { + final Iterable parameters, final Charset charset) { return format(parameters, QP_SEP_A, charset); } @@ -321,16 +307,15 @@ public static String format( * Returns a String that is suitable for use as an {@code application/x-www-form-urlencoded} * list of parameters in an HTTP PUT or HTTP POST. * - * @param parameters The parameters to include. - * @param parameterSeparator The parameter separator, by convention, {@code '&'} or {@code - * ';'}. - * @param charset The encoding to use. + * @param parameters The parameters to include. + * @param parameterSeparator The parameter separator, by convention, {@code '&'} or {@code ';'}. + * @param charset The encoding to use. * @return An {@code application/x-www-form-urlencoded} string */ static String format( - final Iterable parameters, - final char parameterSeparator, - final Charset charset) { + final Iterable parameters, + final char parameterSeparator, + final Charset charset) { Preconditions.checkNotNull(parameters, "Parameters cannot be null."); final StringBuilder result = new StringBuilder(); for (final NameValuePair parameter : parameters) { @@ -353,10 +338,10 @@ private static List createEmptyList() { } private static String urlEncode( - final String content, - final Charset charset, - final BitSet safeChars, - final boolean blankAsPlus) { + final String content, + final Charset charset, + final BitSet safeChars, + final boolean blankAsPlus) { if (content == null) { return null; } @@ -383,16 +368,14 @@ private static String urlEncode( * Decode/unescape a portion of a URL, to use with the query part ensure {@code plusAsBlank} is * true. * - * @param content the portion to decode - * @param charset the charset to use + * @param content the portion to decode + * @param charset the charset to use * @param plusAsBlank if {@code true}, then convert '+' to space (e.g. for www-url-form-encoded - * content), otherwise leave as is. + * content), otherwise leave as is. * @return encoded string */ private static String urlDecode( - final String content, - final Charset charset, - final boolean plusAsBlank) { + final String content, final Charset charset, final boolean plusAsBlank) { final ByteBuffer bb = ByteBuffer.allocate(content.length()); final CharBuffer cb = CharBuffer.wrap(content); @@ -422,9 +405,9 @@ private static String urlDecode( /** * Encode/escape www-url-form-encoded content. - *

- * Uses the {@link #URLENCODER} set of characters, rather than the {@link #UNRESERVED} set; this - * is for compatibilty with previous releases, URLEncoder.encode() and most browsers. + * + *

Uses the {@link #URLENCODER} set of characters, rather than the {@link #UNRESERVED} set; + * this is for compatibilty with previous releases, URLEncoder.encode() and most browsers. * * @param content the content to encode, will convert space to '+' * @param charset the charset to use @@ -435,9 +418,7 @@ private static String encodeFormFields(final String content, final Charset chars return null; } return urlEncode( - content, - charset != null ? charset : StandardCharsets.UTF_8, URLENCODER, true - ); + content, charset != null ? charset : StandardCharsets.UTF_8, URLENCODER, true); } /** diff --git a/flink-connector-http/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/flink-connector-http/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory new file mode 100644 index 00000000..eacea811 --- /dev/null +++ b/flink-connector-http/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory @@ -0,0 +1,28 @@ +# +# /* +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +# + +org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory +org.apache.flink.connector.http.table.lookup.querycreators.ElasticSearchLiteQueryCreatorFactory +org.apache.flink.connector.http.table.lookup.querycreators.GenericGetQueryCreatorFactory +org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonQueryCreatorFactory +org.apache.flink.connector.http.table.lookup.Slf4jHttpLookupPostRequestCallbackFactory +org.apache.flink.connector.http.table.sink.HttpDynamicTableSinkFactory +org.apache.flink.connector.http.table.sink.Slf4jHttpPostRequestCallbackFactory +org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory \ No newline at end of file diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/ExceptionUtilsTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/ExceptionUtilsTest.java new file mode 100644 index 00000000..6cd4f712 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/ExceptionUtilsTest.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.utils.ExceptionUtils; + +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link ExceptionUtils} . */ +@Slf4j +class ExceptionUtilsTest { + + @Test + void shouldConvertStackTrace() { + String stringifyException = + ExceptionUtils.stringifyException(new RuntimeException("Test Exception")); + assertThat(stringifyException).contains("java.lang.RuntimeException: Test Exception"); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactoryTest.java new file mode 100644 index 00000000..ef20a3f5 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpPostRequestCallbackFactoryTest.java @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.table.lookup.HttpLookupSourceRequestEntry; +import org.apache.flink.connector.http.table.sink.HttpDynamicTableSinkFactory; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.TableResult; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import com.github.tomakehurst.wiremock.WireMockServer; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static org.apache.flink.connector.http.TestLookupPostRequestCallbackFactory.TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT; +import static org.apache.flink.connector.http.TestPostRequestCallbackFactory.TEST_POST_REQUEST_CALLBACK_IDENT; +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** Test for {@link HttpPostRequestCallbackFactory}. */ +public class HttpPostRequestCallbackFactoryTest { + private static final int SERVER_PORT = WireMockServerPortAllocator.getServerPort(); + + private WireMockServer wireMockServer; + protected StreamExecutionEnvironment env; + protected StreamTableEnvironment tEnv; + + private static final ArrayList requestEntries = new ArrayList<>(); + + private static final ArrayList lookupRequestEntries = + new ArrayList<>(); + + private static final ArrayList> responses = new ArrayList<>(); + + @BeforeEach + public void setup() { + wireMockServer = new WireMockServer(SERVER_PORT); + wireMockServer.start(); + + env = StreamExecutionEnvironment.getExecutionEnvironment(); + tEnv = StreamTableEnvironment.create(env); + + requestEntries.clear(); + responses.clear(); + } + + @AfterEach + public void tearDown() { + wireMockServer.stop(); + } + + @ParameterizedTest + @CsvSource(value = {"single, {\"id\":1}", "batch, [{\"id\":1}]"}) + public void httpPostRequestCallbackFactoryTest(String mode, String expectedRequest) + throws ExecutionException, InterruptedException { + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.sink.request-callback' = '%s',\n" + + " 'http.sink.writer.request.mode' = '%s',\n" + + " 'http.sink.header.Content-Type' = 'application/json'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + SERVER_PORT + "/myendpoint", + TEST_POST_REQUEST_CALLBACK_IDENT, + mode); + tEnv.executeSql(createTable); + + final String insert = "INSERT INTO http VALUES (1)"; + tEnv.executeSql(insert).await(); + + assertEquals(1, requestEntries.size()); + assertEquals(1, responses.size()); + + String actualRequest = + requestEntries.get(0).getElements().stream() + .map(element -> new String(element, StandardCharsets.UTF_8)) + .collect(Collectors.joining()); + + Assertions.assertThat(actualRequest).isEqualToIgnoringNewLines(expectedRequest); + } + + @Test + public void httpLookupPostRequestCallbackFactoryTest() + throws ExecutionException, InterruptedException { + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .willReturn(aResponse().withStatus(200).withBody("{\"customerId\": 1}"))); + + final String createTable1 = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " orderId INT\n" + + ") WITH (\n" + + " 'connector' = 'datagen',\n" + + " 'fields.orderId.kind' = 'sequence',\n" + + " 'fields.orderId.start' = '1',\n" + + " 'fields.orderId.end' = '1'\n" + + ");"; + tEnv.executeSql(createTable1); + + final String createTable2 = + String.format( + "CREATE TABLE Customers (\n" + + " `customerId` INT\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.source.lookup.request-callback' = '%s'\n" + + ")", + "rest-lookup", + "http://localhost:" + SERVER_PORT + "/myendpoint", + TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT); + tEnv.executeSql(createTable2); + + final String joinTable = + "SELECT o.`orderId`, c.`customerId`\n" + + " FROM Orders AS o\n" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.`proc_time` AS c\n" + + " ON o.`orderId` = c.`customerId`;"; + + final TableResult resultTable = tEnv.sqlQuery(joinTable).execute(); + resultTable.await(); + + assertEquals(1, lookupRequestEntries.size()); + assertEquals(1, responses.size()); + } + + /** TestPostRequestCallback. */ + public static class TestPostRequestCallback implements HttpPostRequestCallback { + @Override + public void call( + HttpResponse response, + HttpRequest requestEntry, + String endpointUrl, + Map headerMap) { + requestEntries.add(requestEntry); + responses.add(response); + } + } + + /** TestLookupPostRequestCallback. */ + public static class TestLookupPostRequestCallback + implements HttpPostRequestCallback { + @Override + public void call( + HttpResponse response, + HttpLookupSourceRequestEntry requestEntry, + String endpointUrl, + Map headerMap) { + lookupRequestEntries.add(requestEntry); + responses.add(response); + } + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpsConnectionTestBase.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpsConnectionTestBase.java new file mode 100644 index 00000000..20483a84 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/HttpsConnectionTestBase.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; +import org.apache.flink.connector.http.table.sink.Slf4jHttpPostRequestCallback; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; + +import com.github.tomakehurst.wiremock.WireMockServer; + +import java.util.Properties; + +/** Https Connection Test Base. */ +public abstract class HttpsConnectionTestBase { + + protected static final String ENDPOINT = "/myendpoint"; + + protected static final String CERTS_PATH = "src/test/resources/security/certs/"; + + protected static final String SERVER_KEYSTORE_PATH = + "src/test/resources/security/certs/serverKeyStore.jks"; + + protected static final String SERVER_TRUSTSTORE_PATH = + "src/test/resources/security/certs/serverTrustStore.jks"; + + protected WireMockServer wireMockServer; + + protected Properties properties; + + protected HeaderPreprocessor headerPreprocessor; + + protected HttpPostRequestCallback postRequestCallback = + new Slf4jHttpPostRequestCallback(); + + public void setUp() { + this.properties = new Properties(); + this.headerPreprocessor = HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); + } + + public void tearDown() { + if (wireMockServer != null) { + wireMockServer.stop(); + } + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/StreamTableJob.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/StreamTableJob.java new file mode 100644 index 00000000..24153bc5 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/StreamTableJob.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.api.common.restartstrategy.RestartStrategies; +import org.apache.flink.api.java.utils.ParameterTool; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.Table; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +/** Test for {@link StreamTableJob}. */ +public class StreamTableJob { + + public static void main(String[] args) { + + ParameterTool parameters = ParameterTool.fromSystemProperties(); + parameters = parameters.mergeWith(ParameterTool.fromArgs(args)); + + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + // env.enableCheckpointing(5000); + env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1000, 1000)); + env.setParallelism(1); + env.disableOperatorChaining(); + env.getConfig().setGlobalJobParameters(parameters); + + StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); + + tableEnv.executeSql( + "CREATE TABLE Orders (id STRING, id2 STRING, proc_time AS PROCTIME())" + + " WITH (" + + "'connector' = 'datagen', 'rows-per-second' = '1', 'fields.id.kind' = 'sequence'," + + " 'fields.id.start' = '1', 'fields.id.end' = '120'," + + " 'fields.id2.kind' = 'sequence', 'fields.id2.start' = '2'," + + " 'fields.id2.end' = '120')"); + tableEnv.executeSql( + "CREATE TABLE Customers (id STRING, id2 STRING, msg STRING, uuid STRING, isActive STRING, balance STRING) WITH ('connector' = 'rest-lookup'," + + " 'url' = 'http://localhost:" + + WireMockServerPortAllocator.getServerPort() + + "/client', " + + "'asyncPolling' = 'true', " + + "'field.isActive.path' = '$.details.isActive', " + + "'field.balance.path' = '$.details.nestedDetails.balance')"); + + Table resultTable = + tableEnv.sqlQuery( + "SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o " + + "JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c " + + "ON o.id = c.id AND o.id2 = c.id2"); + + /* DataStream rowDataStream = tableEnv.toDataStream(resultTable); + rowDataStream.print();*/ + + // Table result = tableEnv.sqlQuery("SELECT * FROM Orders"); + // Table result = tableEnv.sqlQuery("SELECT * FROM Customers"); + // Table result = tableEnv.sqlQuery("SELECT * FROM T WHERE T.id > 10"); + + resultTable.execute().print(); + + // env.execute(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestHelper.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestHelper.java new file mode 100644 index 00000000..2fb422c7 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestHelper.java @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; +import org.junit.jupiter.api.Assertions; + +import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Objects; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test helper. */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public final class TestHelper { + + private static final TestHelper INSTANCE = new TestHelper(); + + public static String readTestFile(String pathToFile) { + try { + URI uri = Objects.requireNonNull(INSTANCE.getClass().getResource(pathToFile)).toURI(); + return Files.readString(Path.of(uri)); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static void assertPropertyArray( + String[] headerArray, String propertyName, String expectedValue) { + // important thing is that we have property followed by its value. + for (int i = 0; i < headerArray.length; i++) { + if (headerArray[i].equals(propertyName)) { + assertThat(headerArray[i + 1]) + .withFailMessage( + "Property Array does not contain property name, value pairs.") + .isEqualTo(expectedValue); + return; + } + } + Assertions.fail( + String.format( + "Missing property name [%s] in header array %s.", + propertyName, Arrays.toString(headerArray))); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestLookupPostRequestCallbackFactory.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestLookupPostRequestCallbackFactory.java new file mode 100644 index 00000000..898701a2 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestLookupPostRequestCallbackFactory.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.connector.http.table.lookup.HttpLookupSourceRequestEntry; + +import java.util.HashSet; +import java.util.Set; + +/** Test for {@link TestLookupPostRequestCallbackFactory}. */ +public class TestLookupPostRequestCallbackFactory + implements HttpPostRequestCallbackFactory { + + public static final String TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT = + "test-lookup-request-callback"; + + @Override + public HttpPostRequestCallback createHttpPostRequestCallback() { + return new HttpPostRequestCallbackFactoryTest.TestLookupPostRequestCallback(); + } + + @Override + public String factoryIdentifier() { + return TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT; + } + + @Override + public Set> requiredOptions() { + return new HashSet<>(); + } + + @Override + public Set> optionalOptions() { + return new HashSet<>(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestPostRequestCallbackFactory.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestPostRequestCallbackFactory.java new file mode 100644 index 00000000..6ab162a9 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/TestPostRequestCallbackFactory.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.connector.http.sink.httpclient.HttpRequest; + +import java.util.HashSet; +import java.util.Set; + +/** Test PostRequestCallbackFactory. */ +public class TestPostRequestCallbackFactory implements HttpPostRequestCallbackFactory { + + public static final String TEST_POST_REQUEST_CALLBACK_IDENT = "test-request-callback"; + + @Override + public HttpPostRequestCallback createHttpPostRequestCallback() { + return new HttpPostRequestCallbackFactoryTest.TestPostRequestCallback(); + } + + @Override + public String factoryIdentifier() { + return TEST_POST_REQUEST_CALLBACK_IDENT; + } + + @Override + public Set> requiredOptions() { + return new HashSet<>(); + } + + @Override + public Set> optionalOptions() { + return new HashSet<>(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/WireMockServerPortAllocator.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/WireMockServerPortAllocator.java new file mode 100644 index 00000000..c8e27b6a --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/WireMockServerPortAllocator.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +/** class to allocate ports. */ +public class WireMockServerPortAllocator { + + public static final Integer PORT_BASE = 8090; + public static final Integer SECURE_PORT_BASE = 9090; + public static Integer nextPortToUse = PORT_BASE; + public static Integer nextSecurePortToUse = SECURE_PORT_BASE; + + public static int getServerPort() { + return ++nextPortToUse; + } + + public static int getSecureServerPort() { + return ++nextSecurePortToUse; + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/HttpStubApp.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/HttpStubApp.java new file mode 100644 index 00000000..21b814e8 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/HttpStubApp.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.app; + +import org.apache.flink.connector.http.WireMockServerPortAllocator; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import com.github.tomakehurst.wiremock.stubbing.StubMapping; +import lombok.extern.slf4j.Slf4j; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; + +/** http stub app. */ +@Slf4j +public class HttpStubApp { + + private static final String URL = "/client"; + + private static WireMockServer wireMockServer; + + @SuppressWarnings("unchecked") + public static void main(String[] args) { + wireMockServer = + new WireMockServer( + WireMockConfiguration.wireMockConfig() + .port(WireMockServerPortAllocator.getServerPort()) + .extensions(JsonTransform.class)); + wireMockServer.start(); + + wireMockServer.addStubMapping(setupServerStub()); + } + + private static StubMapping setupServerStub() { + return wireMockServer.stubFor( + get(urlPathEqualTo(URL)) + .willReturn(aResponse().withTransformers(JsonTransform.NAME))); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/JsonTransform.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/JsonTransform.java new file mode 100644 index 00000000..6766f44e --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/app/JsonTransform.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.app; + +import com.github.tomakehurst.wiremock.common.FileSource; +import com.github.tomakehurst.wiremock.extension.Parameters; +import com.github.tomakehurst.wiremock.extension.ResponseTransformer; +import com.github.tomakehurst.wiremock.http.Request; +import com.github.tomakehurst.wiremock.http.Response; + +import java.util.UUID; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Wiremock Extension that prepares HTTP REST endpoint response body. This extension is stateful, + * every next response will have values like id == counter, id2 == counter + 1 and uuid = + * randomValue value in its response, where counter is incremented for every subsequent request. + * + *

This class is used for AppDemo Wiremock. + */ +public class JsonTransform extends ResponseTransformer { + + public static final String NAME = "JsonTransform"; + + private static final String RESULT_JSON = + "{\n" + + "\t\"id\": \"&COUNTER&\",\n" + + "\t\"id2\": \"&COUNTER_2&\",\n" + + "\t\"uuid\": \"&UUID&\",\n" + + "\t\"picture\": \"http://placehold.it/32x32\",\n" + + "\t\"msg\": \"&PARAM&, cnt: &COUNTER&\",\n" + + "\t\"age\": 30,\n" + + "\t\"eyeColor\": \"green\",\n" + + "\t\"name\": \"Marva Fischer\",\n" + + "\t\"gender\": \"female\",\n" + + "\t\"company\": \"SILODYNE\",\n" + + "\t\"email\": \"marvafischer@silodyne.com\",\n" + + "\t\"phone\": \"+1 (990) 562-2120\",\n" + + "\t\"address\": \"601 Auburn Place, Bynum, New York, 7057\",\n" + + "\t\"about\": \"Proident Lorem et duis nisi tempor elit occaecat laboris" + + " dolore magna Lorem consequat. Deserunt velit minim nisi consectetur duis " + + "amet labore cupidatat. Pariatur sunt occaecat qui reprehenderit ipsum ex culpa " + + "ullamco ex duis adipisicing commodo sunt. Ad cupidatat magna ad in officia " + + "irure aute duis culpa et. Magna esse adipisicing consequat occaecat. Excepteur amet " + + "dolore occaecat sit officia dolore elit in cupidatat non anim.\\r\\n\",\n" + + "\t\"registered\": \"2020-07-11T11:13:32 -02:00\",\n" + + "\t\"latitude\": -35.237843,\n" + + "\t\"longitude\": 60.386104,\n" + + "\t\"tags\": [\n" + + "\t\t\"officia\",\n" + + "\t\t\"eiusmod\",\n" + + "\t\t\"labore\",\n" + + "\t\t\"ex\",\n" + + "\t\t\"aliqua\",\n" + + "\t\t\"consectetur\",\n" + + "\t\t\"excepteur\"\n" + + "\t],\n" + + "\t\"friends\": [\n" + + "\t\t{\n" + + "\t\t\t\"id\": 0,\n" + + "\t\t\t\"name\": \"Kemp Newman\"\n" + + "\t\t},\n" + + "\t\t{\n" + + "\t\t\t\"id\": 1,\n" + + "\t\t\t\"name\": \"Sears Blackburn\"\n" + + "\t\t},\n" + + "\t\t{\n" + + "\t\t\t\"id\": 2,\n" + + "\t\t\t\"name\": \"Lula Rogers\"\n" + + "\t\t}\n" + + "\t],\n" + + "\t\"details\": {\n" + + "\t\t\"isActive\": true,\n" + + "\t\t\"nestedDetails\": {\n" + + "\t\t\t\"index\": 0,\n" + + "\t\t\t\"guid\": \"d81fc542-6b49-4d59-8fb9-d57430d4871d\",\n" + + "\t\t\t\"balance\": \"$1,729.34\"\n" + + "\t\t}\n" + + "\t},\n" + + "\t\"greeting\": \"Hello, Marva Fischer! You have 7 unread messages.\",\n" + + "\t\"favoriteFruit\": \"banana\"\n" + + "}"; + private final AtomicInteger counter = new AtomicInteger(0); + + @Override + public Response transform( + Request request, Response response, FileSource files, Parameters parameters) { + int cnt = counter.getAndIncrement(); + + return Response.response() + .body(generateResponse(request.getUrl(), cnt)) + .status(response.getStatus()) + .statusMessage(response.getStatusMessage()) + .build(); + } + + @Override + public String getName() { + return NAME; + } + + private String generateResponse(String param, int counter) { + return RESULT_JSON + .replaceAll("&PARAM&", param) + .replaceAll("&COUNTER&", String.valueOf(counter)) + .replaceAll("&COUNTER_2&", String.valueOf(counter + 1)) + .replaceAll("&UUID&", UUID.randomUUID().toString()); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManagerTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/auth/OidcAccessTokenManagerTest.java similarity index 81% rename from src/test/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManagerTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/auth/OidcAccessTokenManagerTest.java index d02f57da..71538805 100644 --- a/src/test/java/com/getindata/connectors/http/internal/auth/OidcAccessTokenManagerTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/auth/OidcAccessTokenManagerTest.java @@ -1,23 +1,47 @@ -package com.getindata.connectors.http.internal.auth; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.auth; -import java.net.*; +import net.minidev.json.JSONObject; +import org.junit.jupiter.api.Test; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLSession; + +import java.net.Authenticator; +import java.net.CookieHandler; +import java.net.ProxySelector; +import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpHeaders; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.time.Duration; -import java.util.*; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; -import javax.net.ssl.SSLContext; -import javax.net.ssl.SSLParameters; -import javax.net.ssl.SSLSession; -import net.minidev.json.JSONObject; -import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertTrue; +/** Test for {@link OidcAccessTokenManager}. */ public class OidcAccessTokenManagerTest { private static final String BASE_URL = "http://localhost/aaa"; @@ -73,8 +97,8 @@ public void testAuthenticateWithExpiryReduction() throws InterruptedException { authHttpClient.setIsExpired(1); authHttpClient.setAccessToken("Access1"); String url = "http://localhost"; - OidcAccessTokenManager oidcAuth = new OidcAccessTokenManager(authHttpClient, - "abc", url, Duration.ofSeconds(5)); + OidcAccessTokenManager oidcAuth = + new OidcAccessTokenManager(authHttpClient, "abc", url, Duration.ofSeconds(5)); // apply the authorization to the httpRequest String token1 = oidcAuth.authenticate(); @@ -135,8 +159,8 @@ public Optional executor() { } @Override - public HttpResponse send(HttpRequest request, - HttpResponse.BodyHandler responseBodyHandler) { + public HttpResponse send( + HttpRequest request, HttpResponse.BodyHandler responseBodyHandler) { JSONObject json = new JSONObject(); @@ -153,8 +177,7 @@ public HttpResponse send(HttpRequest request, @Override public CompletableFuture> sendAsync( - HttpRequest request, - HttpResponse.BodyHandler responseBodyHandler) { + HttpRequest request, HttpResponse.BodyHandler responseBodyHandler) { return null; } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/config/ConfigExceptionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/config/ConfigExceptionTest.java new file mode 100644 index 00000000..0380c921 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/config/ConfigExceptionTest.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.config; + +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link ConfigException}. */ +class ConfigExceptionTest { + + @Test + public void testTemplateMessageWithNull() { + ConfigException exception = new ConfigException("myProp", -1, null); + assertThat(exception.getMessage()).isEqualTo("Invalid value -1 for configuration myProp"); + } + + @Test + public void testTemplateMessage() { + ConfigException exception = new ConfigException("myProp", -1, "Invalid test value."); + assertThat(exception.getMessage()) + .isEqualTo("Invalid value -1 for configuration myProp: Invalid test value."); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/HttpClientWithRetryTest.java similarity index 65% rename from src/test/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetryTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/HttpClientWithRetryTest.java index fe3a7254..9de7803e 100644 --- a/src/test/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetryTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/HttpClientWithRetryTest.java @@ -1,10 +1,25 @@ -package com.getindata.connectors.http.internal.retry; - -import java.io.IOException; -import java.net.http.HttpClient; -import java.net.http.HttpResponse; -import java.util.function.Supplier; -import java.util.stream.Stream; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; + +import org.apache.flink.connector.http.HttpStatusCodeValidationFailedException; +import org.apache.flink.connector.http.status.HttpResponseChecker; import io.github.resilience4j.core.IntervalFunction; import io.github.resilience4j.retry.RetryConfig; @@ -15,6 +30,13 @@ import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.net.http.HttpResponse; +import java.util.function.Supplier; +import java.util.stream.Stream; + import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; @@ -23,33 +45,30 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.getindata.connectors.http.HttpStatusCodeValidationFailedException; -import com.getindata.connectors.http.internal.status.HttpResponseChecker; - - +/** Test for {@link HttpClient} with retry. */ @SuppressWarnings("unchecked") @ExtendWith(MockitoExtension.class) class HttpClientWithRetryTest { - @Mock - private HttpClient httpClient; + @Mock private HttpClient httpClient; - @Mock - private HttpResponseChecker responseChecker; + @Mock private HttpResponseChecker responseChecker; private HttpClientWithRetry client; @BeforeEach void setup() { - var retryConfig = RetryConfig.custom() - .maxAttempts(3) - .intervalFunction(IntervalFunction.of(1)) - .build(); + var retryConfig = + RetryConfig.custom() + .maxAttempts(3) + .intervalFunction(IntervalFunction.of(1)) + .build(); client = new HttpClientWithRetry(httpClient, retryConfig, responseChecker); } @Test - void shouldRetryOnIOException() throws IOException, InterruptedException, HttpStatusCodeValidationFailedException { + void shouldRetryOnIOException() + throws IOException, InterruptedException, HttpStatusCodeValidationFailedException { var response = mock(HttpResponse.class); when(httpClient.send(any(), any())).thenThrow(IOException.class).thenReturn(response); when(responseChecker.isSuccessful(response)).thenReturn(true); @@ -83,8 +102,13 @@ void shouldFailAfterExceedingMaxRetryAttempts() throws IOException, InterruptedE when(responseChecker.isSuccessful(response)).thenReturn(false); when(responseChecker.isTemporalError(response)).thenReturn(true); - var exception = assertThrows(HttpStatusCodeValidationFailedException.class, - () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); + var exception = + assertThrows( + HttpStatusCodeValidationFailedException.class, + () -> + client.send( + mock(Supplier.class), + mock(HttpResponse.BodyHandler.class))); verify(httpClient, times(3)).send(any(), any()); assertEquals(response, exception.getResponse()); @@ -97,8 +121,9 @@ void shouldFailOnError() throws IOException, InterruptedException { when(responseChecker.isSuccessful(response)).thenReturn(false); when(responseChecker.isTemporalError(response)).thenReturn(false); - assertThrows(HttpStatusCodeValidationFailedException.class, - () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); + assertThrows( + HttpStatusCodeValidationFailedException.class, + () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); verify(httpClient, times(1)).send(any(), any()); } @@ -107,8 +132,9 @@ void shouldFailOnError() throws IOException, InterruptedException { void shouldHandleUncheckedExceptionFromRetry() throws IOException, InterruptedException { when(httpClient.send(any(), any())).thenThrow(RuntimeException.class); - assertThrows(RuntimeException.class, - () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); + assertThrows( + RuntimeException.class, + () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); verify(httpClient, times(1)).send(any(), any()); } @@ -132,11 +158,13 @@ private static Stream> failures() { @ParameterizedTest @MethodSource("failures") - void shouldFailOnException(Class exceptionClass) throws IOException, InterruptedException { + void shouldFailOnException(Class exceptionClass) + throws IOException, InterruptedException { when(httpClient.send(any(), any())).thenThrow(exceptionClass); - assertThrows(exceptionClass, - () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); + assertThrows( + exceptionClass, + () -> client.send(mock(Supplier.class), mock(HttpResponse.BodyHandler.class))); verify(httpClient).send(any(), any()); } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryConfigProviderTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryConfigProviderTest.java new file mode 100644 index 00000000..1cd7d6de --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryConfigProviderTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; + +import org.apache.flink.configuration.Configuration; + +import org.junit.jupiter.api.Test; + +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.mockStatic; + +/** Test for {@link RetryConfigProvider}. */ +class RetryConfigProviderTest { + + @Test + void verifyFixedDelayRetryConfig() { + var config = new Configuration(); + config.setString("http.source.lookup.retry-strategy.type", "fixed-delay"); + config.setString("http.source.lookup.retry-strategy.fixed-delay.delay", "10s"); + config.setInteger("lookup.max-retries", 12); + + var retryConfig = RetryConfigProvider.create(config); + + assertEquals(13, retryConfig.getMaxAttempts()); + IntStream.range(1, 12) + .forEach( + attempt -> + assertEquals( + 10000, retryConfig.getIntervalFunction().apply(attempt))); + } + + @Test + void verifyExponentialDelayConfig() { + var config = new Configuration(); + config.setString("http.source.lookup.retry-strategy.type", "exponential-delay"); + + config.setString( + "http.source.lookup.retry-strategy.exponential-delay.initial-backoff", "15ms"); + config.setString( + "http.source.lookup.retry-strategy.exponential-delay.max-backoff", "120ms"); + config.setInteger( + "http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier", 2); + config.setInteger("lookup.max-retries", 6); + + var retryConfig = RetryConfigProvider.create(config); + var intervalFunction = retryConfig.getIntervalFunction(); + + assertEquals(7, retryConfig.getMaxAttempts()); + assertEquals(15, intervalFunction.apply(1)); + assertEquals(30, intervalFunction.apply(2)); + assertEquals(60, intervalFunction.apply(3)); + assertEquals(120, intervalFunction.apply(4)); + assertEquals(120, intervalFunction.apply(5)); + assertEquals(120, intervalFunction.apply(6)); + } + + @Test + void failWhenStrategyIsUnsupported() { + var config = new Configuration(); + config.setString("http.source.lookup.retry-strategy.type", "dummy"); + + try (var mockedStatic = mockStatic(RetryStrategyType.class)) { + var dummyStrategy = mock(RetryStrategyType.class); + mockedStatic.when(() -> RetryStrategyType.fromCode("dummy")).thenReturn(dummyStrategy); + + assertThrows(IllegalArgumentException.class, () -> RetryConfigProvider.create(config)); + } + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/retry/RetryStrategyTypeTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryStrategyTypeTest.java similarity index 57% rename from src/test/java/com/getindata/connectors/http/internal/retry/RetryStrategyTypeTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryStrategyTypeTest.java index 6a411367..41142057 100644 --- a/src/test/java/com/getindata/connectors/http/internal/retry/RetryStrategyTypeTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/retry/RetryStrategyTypeTest.java @@ -1,15 +1,35 @@ -package com.getindata.connectors.http.internal.retry; - -import java.util.stream.Stream; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.retry; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; + +import java.util.stream.Stream; + import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; +/** Test for {@link RetryStrategyType}. */ class RetryStrategyTypeTest { static Stream inputArguments() { @@ -17,8 +37,7 @@ static Stream inputArguments() { Arguments.of("FIXED-DELAY", RetryStrategyType.FIXED_DELAY), Arguments.of("fixed-delay", RetryStrategyType.FIXED_DELAY), Arguments.of("exponential-delay", RetryStrategyType.EXPONENTIAL_DELAY), - Arguments.of("EXPONENTIAL-DELAY", RetryStrategyType.EXPONENTIAL_DELAY) - ); + Arguments.of("EXPONENTIAL-DELAY", RetryStrategyType.EXPONENTIAL_DELAY)); } @ParameterizedTest @@ -30,13 +49,7 @@ void parseFromCodes(String code, RetryStrategyType expectedType) { } @ParameterizedTest - @ValueSource(strings = { - "fixed_delay", - "FIXED_DELAY", - "ABC", - "FIXED-DELA", - "exponential_delay" - }) + @ValueSource(strings = {"fixed_delay", "FIXED_DELAY", "ABC", "FIXED-DELA", "exponential_delay"}) void failWhenCodeIsIllegal(String code) { assertThrows(IllegalArgumentException.class, () -> RetryStrategyType.fromCode(code)); } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkBuilderTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkBuilderTest.java new file mode 100644 index 00000000..f0ef779f --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkBuilderTest.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.clients.SinkHttpClient; +import org.apache.flink.connector.http.clients.SinkHttpClientResponse; + +import org.junit.jupiter.api.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link HttpSink }. */ +public class HttpSinkBuilderTest { + + private static final ElementConverter ELEMENT_CONVERTER = + (s, context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8)); + + @Test + public void testEmptyUrl() { + assertThrows( + IllegalArgumentException.class, + () -> + HttpSink.builder() + .setElementConverter(ELEMENT_CONVERTER) + .setSinkHttpClientBuilder( + (properties, + httpPostRequestCallback, + headerPreprocessor, + requestSubmitterFactory) -> new MockHttpClient()) + .setEndpointUrl("") + .build()); + } + + @Test + public void testNullUrl() { + assertThrows( + IllegalArgumentException.class, + () -> + HttpSink.builder() + .setElementConverter(ELEMENT_CONVERTER) + .setSinkHttpClientBuilder( + (properties, + httpPostRequestCallback, + headerPreprocessor, + requestSubmitterFactory) -> new MockHttpClient()) + .build()); + } + + @Test + public void testNullHttpClient() { + assertThrows( + NullPointerException.class, + () -> + HttpSink.builder() + .setElementConverter(ELEMENT_CONVERTER) + .setSinkHttpClientBuilder(null) + .setEndpointUrl("localhost:8000") + .build()); + } + + private static class MockHttpClient implements SinkHttpClient { + + MockHttpClient() {} + + @Override + public CompletableFuture putRequests( + List requestEntries, String endpointUrl) { + throw new RuntimeException("Mock implementation of HttpClient"); + } + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkConnectionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkConnectionTest.java new file mode 100644 index 00000000..89d84ffb --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkConnectionTest.java @@ -0,0 +1,378 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.configuration.ConfigConstants; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.MetricOptions; +import org.apache.flink.connector.http.HttpSink; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.config.SinkRequestSubmitMode; +import org.apache.flink.connector.http.sink.httpclient.JavaNetSinkHttpClient; +import org.apache.flink.metrics.Counter; +import org.apache.flink.metrics.Metric; +import org.apache.flink.metrics.MetricConfig; +import org.apache.flink.metrics.MetricGroup; +import org.apache.flink.metrics.reporter.MetricReporter; +import org.apache.flink.metrics.reporter.MetricReporterFactory; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.testutils.junit.extensions.ContextClassLoaderExtension; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.type.TypeReference; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.http.Fault; +import com.github.tomakehurst.wiremock.stubbing.ServeEvent; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Properties; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.serverError; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.stubbing.Scenario.STARTED; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** Test for {@link HttpSink }. */ +public class HttpSinkConnectionTest { + + @RegisterExtension + static final ContextClassLoaderExtension CONTEXT_CLASS_LOADER_EXTENSION = + ContextClassLoaderExtension.builder() + .withServiceEntry( + MetricReporterFactory.class, + SendErrorsTestReporterFactory.class.getName()) + .build(); + + private static int serverPort; + + private static int secServerPort; + + private static final Set messageIds = + IntStream.range(0, 50).boxed().collect(Collectors.toSet()); + + private static final List messages = + messageIds.stream() + .map(i -> "{\"http-sink-id\":" + i + "}") + .collect(Collectors.toList()); + + private StreamExecutionEnvironment env; + + private WireMockServer wireMockServer; + + @BeforeEach + public void setUp() { + SendErrorsTestReporterFactory.reset(); + serverPort = WireMockServerPortAllocator.getServerPort(); + secServerPort = WireMockServerPortAllocator.getSecureServerPort(); + + env = + StreamExecutionEnvironment.getExecutionEnvironment( + new Configuration() { + { + setString( + ConfigConstants.METRICS_REPORTER_PREFIX + + "test." + + MetricOptions.REPORTER_FACTORY_CLASS.key(), + SendErrorsTestReporterFactory.class.getName()); + } + }); + + wireMockServer = new WireMockServer(serverPort, secServerPort); + wireMockServer.start(); + } + + @AfterEach + public void tearDown() { + wireMockServer.stop(); + } + + @Test + public void testConnection_singleRequestMode() throws Exception { + + @SuppressWarnings("unchecked") + Function> responseMapper = + response -> { + try { + return new ObjectMapper() + .readValue(response.getRequest().getBody(), HashMap.class); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + + List> responses = + testConnection(SinkRequestSubmitMode.SINGLE, responseMapper); + + var idsSet = new HashSet<>(messageIds); + for (var request : responses) { + var el = (Integer) request.get("http-sink-id"); + assertTrue(idsSet.contains(el)); + idsSet.remove(el); + } + + // check that we hot responses for all requests. + assertTrue(idsSet.isEmpty()); + } + + @Test + public void testConnection_batchRequestMode() throws Exception { + + Function>> responseMapper = + response -> { + try { + return new ObjectMapper() + .readValue( + response.getRequest().getBody(), + new TypeReference>>() {}); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + + List>> responses = + testConnection(SinkRequestSubmitMode.BATCH, responseMapper); + + var idsSet = new HashSet<>(messageIds); + for (var requests : responses) { + for (var request : requests) { + var el = (Integer) request.get("http-sink-id"); + assertTrue(idsSet.contains(el)); + idsSet.remove(el); + } + } + + // check that we hot responses for all requests. + assertTrue(idsSet.isEmpty()); + } + + public List testConnection( + SinkRequestSubmitMode mode, Function responseMapper) + throws Exception { + + String endpoint = "/myendpoint"; + String contentTypeHeader = "application/json"; + + wireMockServer.stubFor( + any(urlPathEqualTo(endpoint)) + .withHeader("Content-Type", equalTo(contentTypeHeader)) + .willReturn( + aResponse() + .withHeader("Content-Type", contentTypeHeader) + .withStatus(200) + .withBody("{}"))); + + var source = env.fromCollection(messages); + var httpSink = + HttpSink.builder() + .setEndpointUrl("http://localhost:" + serverPort + endpoint) + .setElementConverter( + (s, _context) -> + new HttpSinkRequestEntry( + "POST", s.getBytes(StandardCharsets.UTF_8))) + .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) + .setProperty( + HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Content-Type", + contentTypeHeader) + .setProperty( + HttpConnectorConfigConstants.SINK_HTTP_REQUEST_MODE, mode.getMode()) + .build(); + source.sinkTo(httpSink); + env.execute("Http Sink test connection"); + + var responses = wireMockServer.getAllServeEvents(); + assertTrue( + responses.stream() + .allMatch( + response -> + Objects.equals(response.getRequest().getUrl(), endpoint))); + assertTrue( + responses.stream().allMatch(response -> response.getResponse().getStatus() == 200)); + assertTrue( + responses.stream() + .allMatch( + response -> + Objects.equals(response.getRequest().getUrl(), endpoint))); + assertTrue( + responses.stream().allMatch(response -> response.getResponse().getStatus() == 200)); + + List collect = responses.stream().map(responseMapper).collect(Collectors.toList()); + assertTrue(collect.stream().allMatch(Objects::nonNull)); + return collect; + } + + @Test + public void testServerErrorConnection() throws Exception { + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .withHeader("Content-Type", equalTo("application/json")) + .inScenario("Retry Scenario") + .whenScenarioStateIs(STARTED) + .willReturn(serverError()) + .willSetStateTo("Cause Success")); + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .withHeader("Content-Type", equalTo("application/json")) + .inScenario("Retry Scenario") + .whenScenarioStateIs("Cause Success") + .willReturn(aResponse().withStatus(200)) + .willSetStateTo("Cause Success")); + + var source = env.fromCollection(List.of(messages.get(0))); + var httpSink = + HttpSink.builder() + .setEndpointUrl("http://localhost:" + serverPort + "/myendpoint") + .setElementConverter( + (s, _context) -> + new HttpSinkRequestEntry( + "POST", s.getBytes(StandardCharsets.UTF_8))) + .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) + .build(); + source.sinkTo(httpSink); + env.execute("Http Sink test failed connection"); + + assertEquals(1, SendErrorsTestReporterFactory.getCount()); + // TODO: reintroduce along with the retries + // var postedRequests = wireMockServer + // .findAll(postRequestedFor(urlPathEqualTo("/myendpoint"))); + // assertEquals(2, postedRequests.size()); + // assertEquals(postedRequests.get(0).getBodyAsString(), + // postedRequests.get(1).getBodyAsString()); + } + + @Test + public void testFailedConnection() throws Exception { + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .withHeader("Content-Type", equalTo("application/json")) + .inScenario("Retry Scenario") + .whenScenarioStateIs(STARTED) + .willReturn(aResponse().withFault(Fault.EMPTY_RESPONSE)) + .willSetStateTo("Cause Success")); + + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .withHeader("Content-Type", equalTo("application/json")) + .inScenario("Retry Scenario") + .whenScenarioStateIs("Cause Success") + .willReturn(aResponse().withStatus(200)) + .willSetStateTo("Cause Success")); + + var source = env.fromCollection(List.of(messages.get(0))); + var httpSink = + HttpSink.builder() + .setEndpointUrl("http://localhost:" + serverPort + "/myendpoint") + .setElementConverter( + (s, _context) -> + new HttpSinkRequestEntry( + "POST", s.getBytes(StandardCharsets.UTF_8))) + .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) + .build(); + source.sinkTo(httpSink); + env.execute("Http Sink test failed connection"); + + assertEquals(1, SendErrorsTestReporterFactory.getCount()); + // var postedRequests = wireMockServer + // .findAll(postRequestedFor(urlPathEqualTo("/myendpoint"))); + // assertEquals(2, postedRequests.size()); + // assertEquals(postedRequests.get(0).getBodyAsString(), + // postedRequests.get(1).getBodyAsString()); + } + + @Test + public void testFailedConnection404OnIncludeList() throws Exception { + wireMockServer.stubFor( + any(urlPathEqualTo("/myendpoint")) + .withHeader("Content-Type", equalTo("application/json")) + .willReturn(aResponse().withBody("404 body").withStatus(404))); + + var source = env.fromCollection(List.of(messages.get(0))); + var httpSink = + HttpSink.builder() + .setEndpointUrl("http://localhost:" + serverPort + "/myendpoint") + .setElementConverter( + (s, _context) -> + new HttpSinkRequestEntry( + "POST", s.getBytes(StandardCharsets.UTF_8))) + .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) + .setProperty("http.sink.error.code.exclude", "404, 405") + .setProperty("http.sink.error.code", "4XX") + .build(); + source.sinkTo(httpSink); + env.execute("Http Sink test failed connection"); + + assertEquals(0, SendErrorsTestReporterFactory.getCount()); + } + + /** must be public because of the reflection. */ + public static class SendErrorsTestReporterFactory + implements MetricReporter, MetricReporterFactory { + static volatile List numRecordsSendErrors = null; + + public static long getCount() { + return numRecordsSendErrors.stream().map(Counter::getCount).reduce(0L, Long::sum); + } + + public static void reset() { + numRecordsSendErrors = new ArrayList<>(); + } + + @Override + public void open(MetricConfig metricConfig) {} + + @Override + public void close() {} + + @Override + public void notifyOfAddedMetric(Metric metric, String s, MetricGroup metricGroup) { + + if ("numRecordsSendErrors".equals(s)) { + numRecordsSendErrors.add((Counter) metric); + } + } + + @Override + public void notifyOfRemovedMetric(Metric metric, String s, MetricGroup metricGroup) {} + + @Override + public MetricReporter createMetricReporter(Properties properties) { + return new SendErrorsTestReporterFactory(); + } + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializerTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializerTest.java new file mode 100644 index 00000000..e40552f3 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterStateSerializerTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; + +import org.apache.flink.connector.base.sink.writer.BufferedRequestState; +import org.apache.flink.connector.base.sink.writer.ElementConverter; + +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import static org.apache.flink.connector.base.sink.writer.AsyncSinkWriterTestUtils.assertThatBufferStatesAreEqual; +import static org.apache.flink.connector.base.sink.writer.AsyncSinkWriterTestUtils.getTestState; + +/** Test for {@link HttpSinkWriter }. */ +public class HttpSinkWriterStateSerializerTest { + + private static final ElementConverter ELEMENT_CONVERTER = + (s, _context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8)); + + @Test + public void testSerializeAndDeserialize() throws IOException { + BufferedRequestState expectedState = + getTestState( + ELEMENT_CONVERTER, + httpSinkRequestEntry -> + Math.toIntExact(httpSinkRequestEntry.getSizeInBytes())); + + HttpSinkWriterStateSerializer serializer = new HttpSinkWriterStateSerializer(); + BufferedRequestState actualState = + serializer.deserialize(1, serializer.serialize(expectedState)); + + assertThatBufferStatesAreEqual(actualState, expectedState); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterTest.java similarity index 56% rename from src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterTest.java index db1975ed..865daadd 100644 --- a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/HttpSinkWriterTest.java @@ -1,57 +1,72 @@ -package com.getindata.connectors.http.internal.sink; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; -import java.util.function.Consumer; - -import lombok.extern.slf4j.Slf4j; import org.apache.flink.api.connector.sink2.Sink.InitContext; import org.apache.flink.connector.base.sink.writer.BufferedRequestState; import org.apache.flink.connector.base.sink.writer.ElementConverter; +import org.apache.flink.connector.http.clients.SinkHttpClient; +import org.apache.flink.connector.http.clients.SinkHttpClientResponse; import org.apache.flink.metrics.Counter; import org.apache.flink.metrics.groups.OperatorIOMetricGroup; import org.apache.flink.metrics.groups.SinkWriterMetricGroup; + +import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.function.Consumer; + import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import com.getindata.connectors.http.internal.SinkHttpClient; -import com.getindata.connectors.http.internal.SinkHttpClientResponse; - +/** Test for {@link HttpSinkWriter }. */ @Slf4j @ExtendWith(MockitoExtension.class) class HttpSinkWriterTest { private HttpSinkWriter httpSinkWriter; - @Mock - private ElementConverter elementConverter; + @Mock private ElementConverter elementConverter; - @Mock - private InitContext context; + @Mock private InitContext context; - @Mock - private SinkHttpClient httpClient; + @Mock private SinkHttpClient httpClient; // To work with Flink 1.15 and Flink 1.16 @Mock(lenient = true) private SinkWriterMetricGroup metricGroup; - @Mock - private OperatorIOMetricGroup operatorIOMetricGroup; + @Mock private OperatorIOMetricGroup operatorIOMetricGroup; - @Mock - private Counter errorCounter; + @Mock private Counter errorCounter; @BeforeEach public void setUp() { @@ -61,19 +76,20 @@ public void setUp() { Collection> stateBuffer = new ArrayList<>(); - this.httpSinkWriter = new HttpSinkWriter<>( - elementConverter, - context, - 10, - 10, - 100, - 10, - 10, - 10, - "http://localhost/client", - httpClient, - stateBuffer, - new Properties()); + this.httpSinkWriter = + new HttpSinkWriter<>( + elementConverter, + context, + 10, + 10, + 100, + 10, + 10, + 10, + "http://localhost/client", + httpClient, + stateBuffer, + new Properties()); } @Test @@ -86,7 +102,7 @@ public void testErrorMetric() throws InterruptedException { HttpSinkRequestEntry request = new HttpSinkRequestEntry("PUT", "hello".getBytes()); Consumer> requestResult = - httpSinkRequestEntries -> log.info(String.valueOf(httpSinkRequestEntries)); + httpSinkRequestEntries -> log.info(String.valueOf(httpSinkRequestEntries)); List requestEntries = Collections.singletonList(request); this.httpSinkWriter.submitRequestEntries(requestEntries, requestResult); diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactoryTest.java new file mode 100644 index 00000000..e1efbdd8 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterFactoryTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.config.ConfigException; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import java.util.Properties; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link BatchRequestSubmitterFactory}. */ +class BatchRequestSubmitterFactoryTest { + + @ParameterizedTest + @ValueSource(ints = {0, -1}) + public void shouldThrowIfInvalidDefaultSize(int invalidArgument) { + assertThrows( + IllegalArgumentException.class, + () -> new BatchRequestSubmitterFactory(invalidArgument)); + } + + @Test + public void shouldCreateSubmitterWithDefaultBatchSize() { + + int defaultBatchSize = 10; + BatchRequestSubmitter submitter = + new BatchRequestSubmitterFactory(defaultBatchSize) + .createSubmitter(new Properties(), new String[0]); + + assertThat(submitter.getBatchSize()).isEqualTo(defaultBatchSize); + } + + @ParameterizedTest + @ValueSource(strings = {"1", "2"}) + public void shouldCreateSubmitterWithCustomBatchSize(String batchSize) { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, batchSize); + + BatchRequestSubmitter submitter = + new BatchRequestSubmitterFactory(10).createSubmitter(properties, new String[0]); + + assertThat(submitter.getBatchSize()).isEqualTo(Integer.valueOf(batchSize)); + } + + @ParameterizedTest + @ValueSource(strings = {"0", "-1"}) + public void shouldThrowIfBatchSizeToSmall(String invalidBatchSize) { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, invalidBatchSize); + + BatchRequestSubmitterFactory factory = new BatchRequestSubmitterFactory(10); + + assertThrows( + ConfigException.class, () -> factory.createSubmitter(properties, new String[0])); + } + + @ParameterizedTest + @ValueSource(strings = {"1.1", "2,2", "hello"}) + public void shouldThrowIfInvalidBatchSize(String invalidBatchSize) { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, invalidBatchSize); + + BatchRequestSubmitterFactory factory = new BatchRequestSubmitterFactory(10); + + assertThrows( + ConfigException.class, () -> factory.createSubmitter(properties, new String[0])); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterTest.java new file mode 100644 index 00000000..12705ac4 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/BatchRequestSubmitterTest.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.net.http.HttpClient; +import java.util.List; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** Test for {@link BatchRequestSubmitter}. */ +@ExtendWith(MockitoExtension.class) +class BatchRequestSubmitterTest { + + @Mock private HttpClient mockHttpClient; + + @ParameterizedTest + @CsvSource(value = {"50, 1", "5, 1", "3, 2", "2, 3", "1, 5"}) + public void submitBatches(int batchSize, int expectedNumberOfBatchRequests) { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, + String.valueOf(batchSize)); + + when(mockHttpClient.sendAsync(any(), any())).thenReturn(new CompletableFuture<>()); + + BatchRequestSubmitter submitter = + new BatchRequestSubmitter(properties, new String[0], mockHttpClient); + + submitter.submit( + "http://hello.pl", + IntStream.range(0, 5) + .mapToObj(val -> new HttpSinkRequestEntry("PUT", new byte[0])) + .collect(Collectors.toList())); + + verify(mockHttpClient, times(expectedNumberOfBatchRequests)).sendAsync(any(), any()); + } + + private static Stream httpRequestMethods() { + return Stream.of( + Arguments.of(List.of("PUT", "PUT", "PUT", "PUT", "POST"), 2), + Arguments.of(List.of("PUT", "PUT", "PUT", "POST", "PUT"), 3), + Arguments.of(List.of("POST", "PUT", "POST", "POST", "PUT"), 4)); + } + + @ParameterizedTest + @MethodSource("httpRequestMethods") + public void shouldSplitBatchPerHttpMethod( + List httpMethods, int expectedNumberOfBatchRequests) { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, String.valueOf(50)); + + when(mockHttpClient.sendAsync(any(), any())).thenReturn(new CompletableFuture<>()); + + BatchRequestSubmitter submitter = + new BatchRequestSubmitter(properties, new String[0], mockHttpClient); + + submitter.submit( + "http://hello.pl", + httpMethods.stream() + .map(method -> new HttpSinkRequestEntry(method, new byte[0])) + .collect(Collectors.toList())); + + verify(mockHttpClient, times(expectedNumberOfBatchRequests)).sendAsync(any(), any()); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java new file mode 100644 index 00000000..ef959b85 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java @@ -0,0 +1,329 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.HttpsConnectionTestBase; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.clients.SinkHttpClientResponse; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.sink.HttpSinkRequestEntry; + +import com.github.tomakehurst.wiremock.WireMockServer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; + +import java.io.File; +import java.util.Collections; +import java.util.Properties; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link JavaNetSinkHttpClient }. */ +class JavaNetSinkHttpClientConnectionTest extends HttpsConnectionTestBase { + + private RequestSubmitterFactory perRequestSubmitterFactory; + + private RequestSubmitterFactory batchRequestSubmitterFactory; + + public static int serverPort; + + public static int httpsServerPort; + + @BeforeEach + public void setUp() { + super.setUp(); + serverPort = WireMockServerPortAllocator.getServerPort(); + httpsServerPort = WireMockServerPortAllocator.getSecureServerPort(); + + this.perRequestSubmitterFactory = new PerRequestRequestSubmitterFactory(); + this.batchRequestSubmitterFactory = new BatchRequestSubmitterFactory(50); + } + + @AfterEach + public void tearDown() { + super.tearDown(); + } + + @Test + public void testHttpConnection() { + + wireMockServer = new WireMockServer(serverPort); + wireMockServer.start(); + mockEndPoint(wireMockServer); + + testSinkClientForConnection( + new Properties(), "http://localhost:", serverPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + new Properties(), "http://localhost:", serverPort, batchRequestSubmitterFactory); + } + + @Test + public void testHttpsConnectionWithSelfSignedCert() { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + + wireMockServer = + new WireMockServer( + options() + .httpsPort(httpsServerPort) + .httpDisabled(true) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password")); + + wireMockServer.start(); + mockEndPoint(wireMockServer); + + properties.setProperty(HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "true"); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, batchRequestSubmitterFactory); + } + + @ParameterizedTest + @ValueSource(strings = {"ca.crt", "server.crt", "ca_server_bundle.cert.pem"}) + public void testHttpsConnectionWithAddedCerts(String certName) { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustedCert = new File(CERTS_PATH + certName); + + wireMockServer = + new WireMockServer( + options() + .httpsPort(httpsServerPort) + .httpDisabled(true) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password")); + + wireMockServer.start(); + mockEndPoint(wireMockServer); + + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, trustedCert.getAbsolutePath()); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, batchRequestSubmitterFactory); + } + + @ParameterizedTest + @ValueSource(strings = {"clientPrivateKey.pem", "clientPrivateKey.der"}) + public void testMTlsConnection(String clientPrivateKeyName) { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); + + File clientCert = new File(CERTS_PATH + "client.crt"); + File clientPrivateKey = new File(CERTS_PATH + clientPrivateKeyName); + + this.wireMockServer = + new WireMockServer( + options() + .httpDisabled(true) + .httpsPort(httpsServerPort) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password")); + + wireMockServer.start(); + mockEndPoint(wireMockServer); + + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_CERT, clientCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, + clientPrivateKey.getAbsolutePath()); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, batchRequestSubmitterFactory); + } + + @Test + public void testMTlsConnectionUsingKeyStore() { + String password = "password"; + + String clientKeyStoreName = "client_keyStore.p12"; + String serverKeyStoreName = "serverKeyStore.jks"; + String serverTrustStoreName = "serverTrustStore.jks"; + + File clientKeyStoreFile = new File(CERTS_PATH + clientKeyStoreName); + File serverKeyStoreFile = new File(CERTS_PATH + serverKeyStoreName); + File serverTrustStoreFile = new File(CERTS_PATH + serverTrustStoreName); + File serverTrustedCert = new File(CERTS_PATH + "ca_server_bundle.cert.pem"); + + this.wireMockServer = + new WireMockServer( + options() + .httpDisabled(true) + .httpsPort(httpsServerPort) + .keystorePath(serverKeyStoreFile.getAbsolutePath()) + .keystorePassword(password) + .keyManagerPassword(password) + .needClientAuth(true) + .trustStorePath(serverTrustStoreFile.getAbsolutePath()) + .trustStorePassword(password)); + + wireMockServer.start(); + mockEndPoint(wireMockServer); + + properties.setProperty(HttpConnectorConfigConstants.KEY_STORE_PASSWORD, password); + properties.setProperty( + HttpConnectorConfigConstants.KEY_STORE_PATH, clientKeyStoreFile.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + properties, "https://localhost:", httpsServerPort, batchRequestSubmitterFactory); + } + + @ParameterizedTest + @CsvSource( + value = { + "invalid.crt, client.crt, clientPrivateKey.pem", + "ca.crt, invalid.crt, clientPrivateKey.pem", + "ca.crt, client.crt, invalid.pem" + }) + public void shouldThrowOnInvalidPath( + String serverCertName, String clientCertName, String clientKeyName) { + + File serverTrustedCert = new File(CERTS_PATH + serverCertName); + File clientCert = new File(CERTS_PATH + clientCertName); + File clientPrivateKey = new File(CERTS_PATH + clientKeyName); + + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_CERT, clientCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, + clientPrivateKey.getAbsolutePath()); + + assertAll( + () -> { + assertThrows( + RuntimeException.class, + () -> + new JavaNetSinkHttpClient( + properties, + postRequestCallback, + headerPreprocessor, + perRequestSubmitterFactory)); + assertThrows( + RuntimeException.class, + () -> + new JavaNetSinkHttpClient( + properties, + postRequestCallback, + headerPreprocessor, + batchRequestSubmitterFactory)); + }); + } + + @ParameterizedTest + @ValueSource(strings = {"user:password", "Basic dXNlcjpwYXNzd29yZA=="}) + public void shouldConnectWithBasicAuth(String authorizationHeaderValue) { + + wireMockServer = new WireMockServer(serverPort); + wireMockServer.start(); + mockEndPointWithBasicAuth(wireMockServer); + + properties.setProperty( + HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Authorization", + authorizationHeaderValue); + + testSinkClientForConnection( + properties, "http://localhost:", serverPort, perRequestSubmitterFactory); + + testSinkClientForConnection( + properties, "http://localhost:", serverPort, batchRequestSubmitterFactory); + } + + private void testSinkClientForConnection( + Properties properties, + String endpointUrl, + int httpsServerPort, + RequestSubmitterFactory requestSubmitterFactory) { + + try { + JavaNetSinkHttpClient client = + new JavaNetSinkHttpClient( + properties, + postRequestCallback, + headerPreprocessor, + requestSubmitterFactory); + HttpSinkRequestEntry requestEntry = new HttpSinkRequestEntry("GET", new byte[0]); + SinkHttpClientResponse response = + client.putRequests( + Collections.singletonList(requestEntry), + endpointUrl + httpsServerPort + ENDPOINT) + .get(); + + assertThat(response.getSuccessfulRequests()).isNotEmpty(); + assertThat(response.getFailedRequests()).isEmpty(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private void mockEndPoint(WireMockServer wireMockServer) { + wireMockServer.stubFor( + any(urlPathEqualTo(ENDPOINT)) + .willReturn(aResponse().withStatus(200).withBody("{}"))); + } + + private void mockEndPointWithBasicAuth(WireMockServer wireMockServer) { + + wireMockServer.stubFor( + any(urlPathEqualTo(ENDPOINT)) + .withBasicAuth("user", "password") + .willReturn(aResponse().withStatus(200).withBody("{}"))); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientTest.java similarity index 51% rename from src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientTest.java index c12b2699..0edffe65 100644 --- a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/JavaNetSinkHttpClientTest.java @@ -1,8 +1,28 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.http.HttpClient; -import java.util.Properties; -import java.util.stream.Stream; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient; + +import org.apache.flink.connector.http.HttpPostRequestCallback; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.table.sink.Slf4jHttpPostRequestCallback; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -14,23 +34,25 @@ import org.mockito.Mock; import org.mockito.MockedStatic; import org.mockito.junit.jupiter.MockitoExtension; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.*; -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.table.sink.Slf4jHttpPostRequestCallback; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.TestHelper.assertPropertyArray; +import java.net.http.HttpClient; +import java.util.Properties; +import java.util.stream.Stream; + +import static org.apache.flink.connector.http.TestHelper.assertPropertyArray; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.mockStatic; +import static org.mockito.Mockito.when; +/** Test for {@link JavaNetSinkHttpClient }. */ @ExtendWith(MockitoExtension.class) class JavaNetSinkHttpClientTest { private static MockedStatic httpClientStaticMock; - @Mock - private HttpClient.Builder httpClientBuilder; + @Mock private HttpClient.Builder httpClientBuilder; + @BeforeAll public static void beforeAll() { httpClientStaticMock = mockStatic(HttpClient.class); @@ -59,9 +81,8 @@ public void setUp() { private static Stream provideSubmitterFactory() { return Stream.of( - Arguments.of(new PerRequestRequestSubmitterFactory()), - Arguments.of(new BatchRequestSubmitterFactory(50)) - ); + Arguments.of(new PerRequestRequestSubmitterFactory()), + Arguments.of(new BatchRequestSubmitterFactory(50))); } @ParameterizedTest @@ -69,12 +90,11 @@ private static Stream provideSubmitterFactory() { public void shouldBuildClientWithoutHeaders(RequestSubmitterFactory requestSubmitterFactory) { JavaNetSinkHttpClient client = - new JavaNetSinkHttpClient( - new Properties(), - postRequestCallback, - this.headerPreprocessor, - requestSubmitterFactory - ); + new JavaNetSinkHttpClient( + new Properties(), + postRequestCallback, + this.headerPreprocessor, + requestSubmitterFactory); assertThat(client.getHeadersAndValues()).isEmpty(); } @@ -87,25 +107,22 @@ public void shouldBuildClientWithHeaders(RequestSubmitterFactory requestSubmitte properties.setProperty("property", "val1"); properties.setProperty("my.property", "val2"); properties.setProperty( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Origin", - "https://developer.mozilla.org") - ; + HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Origin", + "https://developer.mozilla.org"); properties.setProperty( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Cache-Control", - "no-cache, no-store, max-age=0, must-revalidate" - ); + HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Cache-Control", + "no-cache, no-store, max-age=0, must-revalidate"); properties.setProperty( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Access-Control-Allow-Origin", - "*" - ); + HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Access-Control-Allow-Origin", + "*"); // WHEN - JavaNetSinkHttpClient client = new JavaNetSinkHttpClient( - properties, - postRequestCallback, - headerPreprocessor, - requestSubmitterFactory - ); + JavaNetSinkHttpClient client = + new JavaNetSinkHttpClient( + properties, + postRequestCallback, + headerPreprocessor, + requestSubmitterFactory); String[] headersAndValues = client.getHeadersAndValues(); assertThat(headersAndValues).hasSize(6); @@ -113,10 +130,9 @@ public void shouldBuildClientWithHeaders(RequestSubmitterFactory requestSubmitte // assert that we have property followed by its value. assertPropertyArray(headersAndValues, "Origin", "https://developer.mozilla.org"); assertPropertyArray( - headersAndValues, - "Cache-Control", "no-cache, no-store, max-age=0, must-revalidate" - ); + headersAndValues, + "Cache-Control", + "no-cache, no-store, max-age=0, must-revalidate"); assertPropertyArray(headersAndValues, "Access-Control-Allow-Origin", "*"); } - } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java new file mode 100644 index 00000000..52380bd2 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.sink.httpclient.status; + +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.status.ComposeHttpStatusCodeChecker; +import org.apache.flink.connector.http.status.ComposeHttpStatusCodeChecker.ComposeHttpStatusCodeCheckerConfig; +import org.apache.flink.util.StringUtils; + +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; + +import java.util.Arrays; +import java.util.List; +import java.util.Properties; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link ComposeHttpStatusCodeChecker}. */ +class ComposeHttpStatusCodeCheckerTest { + + private static final String STRING_CODES = "403, 100,200, 300, , 303 ,200"; + + private static final List CODES = + Arrays.stream(STRING_CODES.split(HttpConnectorConfigConstants.PROP_DELIM)) + .filter(code -> !StringUtils.isNullOrWhitespaceOnly(code)) + .map(String::trim) + .mapToInt(Integer::parseInt) + .boxed() + .collect(Collectors.toList()); + + private ComposeHttpStatusCodeChecker codeChecker; + + @BeforeAll + public static void beforeAll() { + assertThat(CODES).isNotEmpty(); + } + + private static Stream propertiesArguments() { + return Stream.of( + Arguments.of(new Properties()), + Arguments.of(prepareErrorCodeProperties("", "")), + Arguments.of(prepareErrorCodeProperties(" ", " ")), + Arguments.of(prepareErrorCodeProperties(",,,", ",,,,"))); + } + + @ParameterizedTest + @MethodSource("propertiesArguments") + public void shouldPassOnDefault(Properties properties) { + + ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); + + codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); + + assertAll( + () -> { + assertThat(codeChecker.isErrorCode(100)).isFalse(); + assertThat(codeChecker.isErrorCode(200)).isFalse(); + assertThat(codeChecker.isErrorCode(500)).isTrue(); + assertThat(codeChecker.isErrorCode(501)).isTrue(); + assertThat(codeChecker.isErrorCode(400)).isTrue(); + assertThat(codeChecker.isErrorCode(404)).isTrue(); + }); + } + + @Test + public void shouldParseIncludeList() { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_INCLUDE_LIST, STRING_CODES); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, "1XX, 2XX, 3XX, 4XX, 5XX"); + + ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); + + codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); + + assertAll( + () -> { + CODES.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isFalse()); + + assertThat(codeChecker.isErrorCode(301)) + .withFailMessage( + "Not in include list but matches 3XX range. " + + "Should be considered as error code.") + .isTrue(); + }); + } + + @Test + public void shouldParseErrorCodeList() { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, STRING_CODES); + + ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); + + codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); + + assertAll(() -> CODES.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isTrue())); + } + + @Test + public void shouldParseErrorCodeRange() { + + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, "1xx, 2XX "); + + List codes = List.of(100, 110, 200, 220); + + ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); + + codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); + + assertAll( + () -> { + codes.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isTrue()); + + assertThat(codeChecker.isErrorCode(303)) + .withFailMessage( + "Out ot Error code type range therefore should be not marked as error code.") + .isFalse(); + }); + } + + @ParameterizedTest + @ValueSource(strings = {"X", "XXX", " X X", "1X1", "XX1", "XX1XX", "XX1 XX"}) + public void shouldThrowOnInvalidCodeRange(String listCode) { + + Properties properties = new Properties(); + properties.setProperty(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, listCode); + + ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); + + assertThrows(Exception.class, () -> new ComposeHttpStatusCodeChecker(checkerConfig)); + } + + private static Properties prepareErrorCodeProperties(String errorCodeList, String includeList) { + Properties properties = new Properties(); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_INCLUDE_LIST, includeList); + properties.setProperty( + HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, errorCodeList); + return properties; + } + + private ComposeHttpStatusCodeCheckerConfig prepareCheckerConfig(Properties properties) { + return ComposeHttpStatusCodeCheckerConfig.builder() + .properties(properties) + .includeListPrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_INCLUDE_LIST) + .errorCodePrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST) + .build(); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/status/HttpCodesParserTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpCodesParserTest.java similarity index 55% rename from src/test/java/com/getindata/connectors/http/internal/status/HttpCodesParserTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpCodesParserTest.java index 502ad9d8..ddb8e746 100644 --- a/src/test/java/com/getindata/connectors/http/internal/status/HttpCodesParserTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpCodesParserTest.java @@ -1,62 +1,76 @@ -package com.getindata.connectors.http.internal.status; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; +package org.apache.flink.connector.http.status; + +import org.apache.flink.util.ConfigurationException; import lombok.Builder; import lombok.Getter; import lombok.NonNull; import lombok.Singular; -import org.apache.flink.util.ConfigurationException; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import java.util.stream.Stream; + import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +/** Test http codes parser. */ class HttpCodesParserTest { @ParameterizedTest - @ValueSource(strings = { - "6XX", - "1XXX", - "600", - "99", - "1XX,11", - "abc", - "!1XX", - "1 2 3", - "1X X" - }) + @ValueSource(strings = {"6XX", "1XXX", "600", "99", "1XX,11", "abc", "!1XX", "1 2 3", "1X X"}) void failWhenCodeExpressionIsInvalid(String codeExpression) { - assertThrows(ConfigurationException.class, - () -> HttpCodesParser.parse(codeExpression)); + assertThrows(ConfigurationException.class, () -> HttpCodesParser.parse(codeExpression)); } private static Stream inputArgsStream() { return Stream.of( - InputArgs.builder().codeExpression("2XX,404,!203,!205") + InputArgs.builder() + .codeExpression("2XX,404,!203,!205") .expectedCodes(range(200, 300, 203, 205)) .expectedCode(404) .build(), - InputArgs.builder().codeExpression(" 400, 401 , 403, 500,501, !502") + InputArgs.builder() + .codeExpression(" 400, 401 , 403, 500,501, !502") .expectedCodes(List.of(400, 401, 403, 500, 501)) .build(), - InputArgs.builder().codeExpression("!405,1XX, 2XX ,404,!202,405") + InputArgs.builder() + .codeExpression("!405,1XX, 2XX ,404,!202,405") .expectedCodes(range(100, 300, 202)) .expectedCode(404) .build(), - InputArgs.builder().codeExpression("!404, 4XX") + InputArgs.builder() + .codeExpression("!404, 4XX") .expectedCodes(range(400, 500, 404)) .build(), - InputArgs.builder().codeExpression("2xX,!401,3Xx,4xx") + InputArgs.builder() + .codeExpression("2xX,!401,3Xx,4xx") .expectedCodes(range(200, 500, 401)) - .build() - ); + .build()); } @ParameterizedTest @@ -76,7 +90,8 @@ void parseCodeExpressionTest(InputArgs inputArgs) throws ConfigurationException private static List range(int start, int endExclusive, int... exclusions) { var exclusionSet = Arrays.stream(exclusions).boxed().collect(Collectors.toSet()); - return IntStream.range(start, endExclusive).boxed() + return IntStream.range(start, endExclusive) + .boxed() .filter(item -> !exclusionSet.contains(item)) .collect(Collectors.toList()); } @@ -84,9 +99,7 @@ private static List range(int start, int endExclusive, int... exclusion @Builder @Getter private static class InputArgs { - @NonNull - private final String codeExpression; - @Singular - private final Set expectedCodes; + @NonNull private final String codeExpression; + @Singular private final Set expectedCodes; } } diff --git a/src/test/java/com/getindata/connectors/http/internal/status/HttpResponseCheckerTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpResponseCheckerTest.java similarity index 70% rename from src/test/java/com/getindata/connectors/http/internal/status/HttpResponseCheckerTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpResponseCheckerTest.java index 4cc27052..bde482c4 100644 --- a/src/test/java/com/getindata/connectors/http/internal/status/HttpResponseCheckerTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/status/HttpResponseCheckerTest.java @@ -1,35 +1,58 @@ -package com.getindata.connectors.http.internal.status; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.status; -import java.net.http.HttpResponse; -import java.util.Set; -import java.util.stream.Stream; -import static java.util.Collections.emptySet; +import org.apache.flink.util.ConfigurationException; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; -import org.apache.flink.util.ConfigurationException; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; + +import java.net.http.HttpResponse; +import java.util.Set; +import java.util.stream.Stream; + +import static java.util.Collections.emptySet; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +/** Test for {@link HttpResponseChecker }. */ class HttpResponseCheckerTest { @Test void failWhenTheSameCodeIsMarkedSuccessAndError() { - assertThrows(ConfigurationException.class, - () -> new HttpResponseChecker(Set.of(404), Set.of(404))); + assertThrows( + ConfigurationException.class, + () -> new HttpResponseChecker(Set.of(404), Set.of(404))); } @Test void failWhenSuccessListIsEmpty() { - assertThrows(ConfigurationException.class, - () -> new HttpResponseChecker(emptySet(), Set.of(500))); + assertThrows( + ConfigurationException.class, + () -> new HttpResponseChecker(emptySet(), Set.of(500))); } private static Stream testData() { @@ -86,10 +109,8 @@ private void assertError(HttpResponseChecker checker, HttpResponse response) @RequiredArgsConstructor @Getter private static class InputArgs { - @NonNull - private final Integer code; - @NonNull - private final CodeType codeType; + @NonNull private final Integer code; + @NonNull private final CodeType codeType; HttpResponse getResponse() { var response = mock(HttpResponse.class); @@ -99,8 +120,8 @@ HttpResponse getResponse() { } private enum CodeType { - SUCCESSFUL, TEMPORAL_ERROR, ERROR + SUCCESSFUL, + TEMPORAL_ERROR, + ERROR } } - - diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/BasicAuthHeaderValuePreprocessorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/BasicAuthHeaderValuePreprocessorTest.java new file mode 100644 index 00000000..6df61e0a --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/BasicAuthHeaderValuePreprocessorTest.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.preprocessor.BasicAuthHeaderValuePreprocessor; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link BasicAuthHeaderValuePreprocessor}. */ +class BasicAuthHeaderValuePreprocessorTest { + + @ParameterizedTest + @CsvSource({ + "user:password, Basic dXNlcjpwYXNzd29yZA==, false", + "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, false", + "abc123, abc123, true", + "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, true", + "Bearer dXNlcjpwYXNzd29yZA==, Bearer dXNlcjpwYXNzd29yZA==, true" + }) + public void testAuthorizationHeaderPreprocess( + String headerRawValue, String expectedHeaderValue, boolean useRawAuthHeader) { + BasicAuthHeaderValuePreprocessor preprocessor = + new BasicAuthHeaderValuePreprocessor(useRawAuthHeader); + String headerValue = preprocessor.preprocessHeaderValue(headerRawValue); + assertThat(headerValue).isEqualTo(expectedHeaderValue); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/ComposeHeaderPreprocessorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/ComposeHeaderPreprocessorTest.java new file mode 100644 index 00000000..3610b260 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/ComposeHeaderPreprocessorTest.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.connector.http.preprocessor.ComposeHeaderPreprocessor; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link ComposeHeaderPreprocessor}. */ +public class ComposeHeaderPreprocessorTest { + @ParameterizedTest + @CsvSource({ + "a, a", + "a123, a123", + "user:password, user:password", + "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==" + }) + public void testNoPreprocessors(String rawValue, String expectedValue) { + var noPreprocessorHeaderPreprocessor = new ComposeHeaderPreprocessor(null); + var obtainedValue = + noPreprocessorHeaderPreprocessor.preprocessValueForHeader("someHeader", rawValue); + assertThat(obtainedValue).isEqualTo(expectedValue); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunctionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunctionTest.java similarity index 52% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunctionTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunctionTest.java index fb627480..682d3a31 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunctionTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/AsyncHttpTableLookupFunctionTest.java @@ -1,4 +1,34 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.functions.FunctionContext; + +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; import java.util.ArrayList; import java.util.Collection; @@ -12,35 +42,24 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; + import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; - -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.functions.FunctionContext; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.Mockito; -import org.mockito.junit.jupiter.MockitoExtension; +import static org.apache.flink.connector.http.table.lookup.TableSourceHelper.buildGenericRowData; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static com.getindata.connectors.http.internal.table.lookup.TableSourceHelper.buildGenericRowData; - +/** Test for {@link AsyncHttpTableLookupFunction}. */ @Slf4j @ExtendWith(MockitoExtension.class) class AsyncHttpTableLookupFunctionTest { private final int[] rowKeys = {1, 2, 4, 12, 3}; - @Mock - private HttpTableLookupFunction decorate; + @Mock private HttpTableLookupFunction decorate; private AsyncHttpTableLookupFunction asyncFunction; @@ -70,17 +89,17 @@ void shouldEvaluateInAsyncWay() throws InterruptedException { CompletableFuture> future = new CompletableFuture<>(); asyncFunction.eval(future, key); future.whenComplete( - (rs, t) -> { - result.addAll(rs); - latch.countDown(); - }); + (rs, t) -> { + result.addAll(rs); + latch.countDown(); + }); } assertThat(latch.await(3, TimeUnit.SECONDS)) - .withFailMessage( - "Future complete in AsyncHttpTableLookupFunction was not called" - + " for at lest one event.") - .isEqualTo(true); + .withFailMessage( + "Future complete in AsyncHttpTableLookupFunction was not called" + + " for at lest one event.") + .isEqualTo(true); assertThat(result.size()).isEqualTo(rowKeys.length); assertThat(threadNames.size()).isEqualTo(rowKeys.length); @@ -99,29 +118,28 @@ void shouldHandleExceptionOnOneThread() throws InterruptedException { for (int key : rowKeys) { CompletableFuture> future = new CompletableFuture<>(); asyncFunction.eval(future, key); - future - .whenComplete( - (rs, t) -> { - if (t != null) { - log.error(t.getMessage(), t); - } - - result.addAll(rs); - latch.countDown(); - }) - .exceptionally( - throwable -> { - wasException.set(true); - latch.countDown(); - return emptyList(); - }); + future.whenComplete( + (rs, t) -> { + if (t != null) { + log.error(t.getMessage(), t); + } + + result.addAll(rs); + latch.countDown(); + }) + .exceptionally( + throwable -> { + wasException.set(true); + latch.countDown(); + return emptyList(); + }); } assertThat(latch.await(3, TimeUnit.SECONDS)) - .withFailMessage( - "Future complete in AsyncHttpTableLookupFunction was not called" - + " for at lest one event.") - .isEqualTo(true); + .withFailMessage( + "Future complete in AsyncHttpTableLookupFunction was not called" + + " for at lest one event.") + .isEqualTo(true); assertThat(wasException).isTrue(); @@ -143,8 +161,7 @@ void shouldHandleEmptyCollectionResult() throws InterruptedException { for (int key : rowKeys) { CompletableFuture> future = new CompletableFuture<>(); asyncFunction.eval(future, key); - future - .whenComplete( + future.whenComplete( (rs, t) -> { if (t != null) { log.error(t.getMessage(), t); @@ -157,16 +174,16 @@ void shouldHandleEmptyCollectionResult() throws InterruptedException { } assertThat(latch.await(3, TimeUnit.SECONDS)) - .withFailMessage( - "Future complete in AsyncHttpTableLookupFunction was not called" - + " for at lest one event.") - .isEqualTo(true); + .withFailMessage( + "Future complete in AsyncHttpTableLookupFunction was not called" + + " for at lest one event.") + .isEqualTo(true); assertThat(completeCount.get()) - .withFailMessage( - "Future complete in AsyncHttpTableLookupFunction was not called" - + " for at lest one event.") - .isEqualTo(rowKeys.length); + .withFailMessage( + "Future complete in AsyncHttpTableLookupFunction was not called" + + " for at lest one event.") + .isEqualTo(rowKeys.length); // -1 since one will have one empty result. assertThat(result.size()).isEqualTo(rowKeys.length - 1); @@ -176,43 +193,46 @@ void shouldHandleEmptyCollectionResult() throws InterruptedException { private void mockPolling() { when(decorate.lookup(any())) - .thenAnswer( - invocationOnMock -> { - threadNames.add(Thread.currentThread().getName()); - // make sure we pile up all keyRows on threads - barrier.await(); - return singletonList(buildGenericRowData( - singletonList(invocationOnMock.getArgument(0)))); - }); + .thenAnswer( + invocationOnMock -> { + threadNames.add(Thread.currentThread().getName()); + // make sure we pile up all keyRows on threads + barrier.await(); + return singletonList( + buildGenericRowData( + singletonList(invocationOnMock.getArgument(0)))); + }); } private void mockPollingWithException() { when(decorate.lookup(any())) - .thenAnswer( - invocationOnMock -> { - threadNames.add(Thread.currentThread().getName()); - // make sure we pile up all keyRows on threads - barrier.await(); - Integer argument = ((GenericRowData) invocationOnMock.getArgument(0)).getInt(0); - if (argument == 12) { - throw new RuntimeException("Exception On problematic item"); - } - return singletonList(buildGenericRowData(singletonList(argument))); - }); + .thenAnswer( + invocationOnMock -> { + threadNames.add(Thread.currentThread().getName()); + // make sure we pile up all keyRows on threads + barrier.await(); + Integer argument = + ((GenericRowData) invocationOnMock.getArgument(0)).getInt(0); + if (argument == 12) { + throw new RuntimeException("Exception On problematic item"); + } + return singletonList(buildGenericRowData(singletonList(argument))); + }); } private void mockPollingWithEmptyList() { when(decorate.lookup(any())) - .thenAnswer( - invocationOnMock -> { - threadNames.add(Thread.currentThread().getName()); - // make sure we pile up all keyRows on threads - barrier.await(); - Integer argument = ((GenericRowData) invocationOnMock.getArgument(0)).getInt(0); - if (argument == 12) { - return emptyList(); - } - return singletonList(buildGenericRowData(singletonList(argument))); - }); + .thenAnswer( + invocationOnMock -> { + threadNames.add(Thread.currentThread().getName()); + // make sure we pile up all keyRows on threads + barrier.await(); + Integer argument = + ((GenericRowData) invocationOnMock.getArgument(0)).getInt(0); + if (argument == 12) { + return emptyList(); + } + return singletonList(buildGenericRowData(singletonList(argument))); + }); } } diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactoryTest.java similarity index 57% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactoryTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactoryTest.java index 756af45f..2b3282d1 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactoryTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/BodyBasedRequestFactoryTest.java @@ -1,28 +1,52 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.net.URI; import java.util.Collection; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; -import org.testcontainers.shaded.com.google.common.collect.ImmutableList; import static org.assertj.core.api.Assertions.assertThat; +/** Test for {@link BodyBasedRequestFactory}. */ public class BodyBasedRequestFactoryTest { @ParameterizedTest @MethodSource("configProvider") void testconstructUri(TestSpec testSpec) throws Exception { - LookupQueryInfo lookupQueryInfo = new LookupQueryInfo(testSpec.url, - testSpec.bodyBasedUrlQueryParams, - testSpec.pathBasedUrlParams); - HttpLookupConfig httpLookupConfig = HttpLookupConfig.builder() - .lookupMethod(testSpec.lookupMethod) - .url(testSpec.url) - .useAsync(false) - .build(); + LookupQueryInfo lookupQueryInfo = + new LookupQueryInfo( + testSpec.url, + testSpec.bodyBasedUrlQueryParams, + testSpec.pathBasedUrlParams); + HttpLookupConfig httpLookupConfig = + HttpLookupConfig.builder() + .lookupMethod(testSpec.lookupMethod) + .url(testSpec.url) + .useAsync(false) + .build(); BodyBasedRequestFactory bodyBasedRequestFactory = new BodyBasedRequestFactory("test", null, null, httpLookupConfig); @@ -38,11 +62,12 @@ private static class TestSpec { String lookupMethod; String expected; - private TestSpec(Map bodyBasedUrlQueryParams, - Map pathBasedUrlParams, - String url, - String lookupMethod, - String expected) { + private TestSpec( + Map bodyBasedUrlQueryParams, + Map pathBasedUrlParams, + String url, + String lookupMethod, + String expected) { this.bodyBasedUrlQueryParams = bodyBasedUrlQueryParams; this.pathBasedUrlParams = pathBasedUrlParams; this.url = url; @@ -68,57 +93,53 @@ public String toString() { } static Collection configProvider() { - return ImmutableList.builder() - .addAll(getTestSpecs("GET")) - .addAll(getTestSpecs("POST")) - .build(); + return Stream.concat(getTestSpecs("GET").stream(), getTestSpecs("POST").stream()) + .collect(Collectors.toList()); } - @NotNull - private static ImmutableList getTestSpecs(String lookupMethod) { - return ImmutableList.of( + private static List getTestSpecs(String lookupMethod) { + return List.of( // 1 path param new TestSpec( null, - Map. of("param1", "value1"), + Map.of("param1", "value1"), "http://service/{param1}", lookupMethod, "http://service/value1"), // 2 path param new TestSpec( null, - Map. of("param1", "value1", "param2", "value2"), + Map.of("param1", "value1", "param2", "value2"), "http://service/{param1}/param2/{param2}", lookupMethod, "http://service/value1/param2/value2"), // 1 query param new TestSpec( - Map. of("param3", "value3"), + Map.of("param3", "value3"), null, "http://service", lookupMethod, "http://service?param3=value3"), // 1 query param with a parameter on base url new TestSpec( - Map. of("param3", "value3"), + Map.of("param3", "value3"), null, "http://service?extrakey=extravalue", lookupMethod, "http://service?extrakey=extravalue¶m3=value3"), // 2 query params new TestSpec( - Map. of("param3", "value3", "param4", "value4"), + Map.of("param3", "value3", "param4", "value4"), null, "http://service", lookupMethod, "http://service?param3=value3¶m4=value4"), // 2 query params and 2 path params new TestSpec( - Map. of("param3", "value3", "param4", "value4"), - Map. of("param1", "value1", "param2", "value2"), + Map.of("param3", "value3", "param4", "value4"), + Map.of("param1", "value1", "param2", "value2"), "http://service/{param1}/param2/{param2}", lookupMethod, - "http://service/value1/param2/value2?param3=value3¶m4=value4") - ); + "http://service/value1/param2/value2?param3=value3¶m4=value4")); } } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactoryTest.java new file mode 100644 index 00000000..08e031ab --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceFactoryTest.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.api.TableConfig; +import org.apache.flink.table.api.ValidationException; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ResolvedSchema; +import org.apache.flink.table.catalog.UniqueConstraint; +import org.apache.flink.table.connector.source.DynamicTableSource; + +import org.junit.jupiter.api.Test; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.apache.flink.table.factories.utils.FactoryMocks.createTableSource; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.junit.jupiter.api.Assertions.assertFalse; + +/** Test for {@link HttpLookupTableSourceFactory}. */ +public class HttpLookupTableSourceFactoryTest { + + private static final ResolvedSchema SCHEMA = + new ResolvedSchema( + Arrays.asList( + Column.physical("id", DataTypes.STRING().notNull()), + Column.physical("msg", DataTypes.STRING().notNull()), + Column.physical("uuid", DataTypes.STRING().notNull()), + Column.physical( + "details", + DataTypes.ROW( + DataTypes.FIELD( + "isActive", DataTypes.BOOLEAN()), + DataTypes.FIELD( + "nestedDetails", + DataTypes.ROW( + DataTypes.FIELD( + "balance", + DataTypes.STRING())))) + .notNull())), + Collections.emptyList(), + UniqueConstraint.primaryKey("id", List.of("id"))); + + @Test + void validateHttpLookupSourceOptions() { + + HttpLookupTableSourceFactory httpLookupTableSourceFactory = + new HttpLookupTableSourceFactory(); + TableConfig tableConfig = new TableConfig(); + httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); + tableConfig.set( + HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), "aaa"); + + try { + httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); + assertFalse(true, "Expected an error."); + } catch (IllegalArgumentException e) { + // expected + } + // should now work. + tableConfig.set( + HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key(), "bbb"); + + httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); + } + + @Test + void shouldCreateForMandatoryFields() { + Map options = getMandatoryOptions(); + DynamicTableSource source = createTableSource(SCHEMA, options); + assertThat(source).isNotNull(); + assertThat(source).isInstanceOf(HttpLookupTableSource.class); + } + + @Test + void shouldThrowIfMissingUrl() { + Map options = Collections.singletonMap("connector", "rest-lookup"); + assertThatExceptionOfType(ValidationException.class) + .isThrownBy(() -> createTableSource(SCHEMA, options)); + } + + @Test + void shouldAcceptWithUrlArgs() { + Map options = getOptions(Map.of("url-args", "id;msg")); + DynamicTableSource source = createTableSource(SCHEMA, options); + assertThat(source).isNotNull(); + assertThat(source).isInstanceOf(HttpLookupTableSource.class); + } + + @Test + void shouldHandleEmptyUrlArgs() { + Map options = getOptions(Collections.emptyMap()); + DynamicTableSource source = createTableSource(SCHEMA, options); + assertThat(source).isNotNull(); + assertThat(source).isInstanceOf(HttpLookupTableSource.class); + } + + private Map getMandatoryOptions() { + return Map.of( + "connector", "rest-lookup", + "url", "http://localhost:" + WireMockServerPortAllocator.PORT_BASE + "/service", + "format", "json"); + } + + private Map getOptions(Map optionalOptions) { + if (optionalOptions.isEmpty()) { + return getMandatoryOptions(); + } + + Map allOptions = new HashMap<>(); + allOptions.putAll(getMandatoryOptions()); + allOptions.putAll(optionalOptions); + + return allOptions; + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceITCaseTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceITCaseTest.java new file mode 100644 index 00000000..a1c4d154 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceITCaseTest.java @@ -0,0 +1,1118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.RuntimeExecutionMode; +import org.apache.flink.api.common.restartstrategy.RestartStrategies; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.ExecutionOptions; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.streaming.api.CheckpointingMode; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.TableResult; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; +import org.apache.flink.table.connector.source.lookup.cache.LookupCache; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.binary.BinaryStringData; +import org.apache.flink.table.runtime.functions.table.lookup.LookupCacheManager; +import org.apache.flink.table.test.lookup.cache.LookupCacheAssert; +import org.apache.flink.types.Row; +import org.apache.flink.util.CloseableIterator; +import org.apache.flink.util.StringUtils; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.client.MappingBuilder; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import com.github.tomakehurst.wiremock.matching.StringValuePattern; +import com.github.tomakehurst.wiremock.stubbing.Scenario; +import com.github.tomakehurst.wiremock.stubbing.StubMapping; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.io.File; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.matching; +import static com.github.tomakehurst.wiremock.client.WireMock.matchingJsonPath; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.put; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link HttpLookupTableSource} connection. */ +@Slf4j +@ExtendWith(MockitoExtension.class) +class HttpLookupTableSourceITCaseTest { + + private static int serverPort; + + private static int secServerPort; + + private static final String CERTS_PATH = "src/test/resources/security/certs/"; + + private static final String SERVER_KEYSTORE_PATH = + "src/test/resources/security/certs/serverKeyStore.jks"; + + private static final String SERVER_TRUSTSTORE_PATH = + "src/test/resources/security/certs/serverTrustStore.jks"; + + private static final String ENDPOINT = "/client"; + + /** Comparator for Flink SQL result. */ + private static final Comparator ROW_COMPARATOR = + (row1, row2) -> { + String row1Id = (String) Objects.requireNonNull(row1.getField("id")); + String row2Id = (String) Objects.requireNonNull(row2.getField("id")); + + return row1Id.compareTo(row2Id); + }; + + private StreamTableEnvironment tEnv; + + private WireMockServer wireMockServer; + + @SuppressWarnings("unchecked") + @BeforeEach + void setup() { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + serverPort = WireMockServerPortAllocator.getServerPort(); + secServerPort = WireMockServerPortAllocator.getSecureServerPort(); + wireMockServer = + new WireMockServer( + WireMockConfiguration.wireMockConfig() + .port(serverPort) + .httpsPort(secServerPort) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password") + .extensions(JsonTransform.class)); + wireMockServer.start(); + + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + env.setRestartStrategy(RestartStrategies.noRestart()); + Configuration config = new Configuration(); + config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.STREAMING); + env.configure(config, getClass().getClassLoader()); + env.enableCheckpointing(1000, CheckpointingMode.EXACTLY_ONCE); + env.setParallelism( + 1); // wire mock server has problem with scenario state during parallel execution + + tEnv = StreamTableEnvironment.create(env); + } + + @AfterEach + void tearDown() { + wireMockServer.stop(); + } + + @ParameterizedTest + @ValueSource(strings = {"", "GET", "POST", "PUT"}) + void testHttpLookupJoin(String methodName) throws Exception { + + // GIVEN + if (StringUtils.isNullOrWhitespaceOnly(methodName) || methodName.equalsIgnoreCase("GET")) { + setupServerStub(wireMockServer); + } else { + setUpServerBodyStub( + methodName, + wireMockServer, + List.of(matchingJsonPath("$.id"), matchingJsonPath("$.id2"))); + } + + String lookupTable = + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + ((StringUtils.isNullOrWhitespaceOnly(methodName)) + ? "" + : "'lookup-method' = '" + methodName + "',") + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'," + + "'table.exec.async-lookup.buffer-capacity' = '50'," + + "'table.exec.async-lookup.timeout' = '120s'" + + ")"; + + // WHEN + SortedSet rows = testLookupJoin(lookupTable, 4); + + // THEN + assertEnrichedRows(rows); + } + + @Test + void testHttpLookupJoinNoDataFromEndpoint() { + + // GIVEN + setupServerStubEmptyResponse(wireMockServer); + + String lookupTable = + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + // WHEN/THEN + assertThrows(TimeoutException.class, () -> testLookupJoin(lookupTable, 4)); + } + + @Test + void testLookupWithRetry() throws Exception { + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .inScenario("retry") + .whenScenarioStateIs(Scenario.STARTED) + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn(aResponse().withBody(new byte[0]).withStatus(501)) + .willSetStateTo("temporal_issue_gone")); + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .inScenario("retry") + .whenScenarioStateIs("temporal_issue_gone") + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn( + aResponse().withTransformers(JsonTransform.NAME).withStatus(200))); + + var lookupTable = + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'lookup.max-retries' = '3'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'http.source.lookup.retry-strategy.type' = 'fixed-delay'," + + "'http.source.lookup.retry-strategy.fixed-delay.delay' = '1ms'," + + "'http.source.lookup.success-codes' = '2XX'," + + "'http.source.lookup.retry-codes' = '501'" + + ")"; + + var result = testLookupJoin(lookupTable, 1); + + assertEquals(1, result.size()); + wireMockServer.verify(2, getRequestedFor(urlPathEqualTo(ENDPOINT))); + } + + @Test + void testLookupIgnoreResponse() throws Exception { + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .inScenario("404_on_first") + .whenScenarioStateIs(Scenario.STARTED) + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn(aResponse().withBody(JsonTransform.NAME).withStatus(404)) + .willSetStateTo("second_request")); + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .inScenario("404_on_first") + .whenScenarioStateIs("second_request") + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn( + aResponse().withTransformers(JsonTransform.NAME).withStatus(200))); + + var lookupTable = + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'http.source.lookup.success-codes' = '2XX,404'," + + "'http.source.lookup.ignored-response-codes' = '404'" + + ")"; + + var result = testLookupJoin(lookupTable, 3); + + assertEquals(2, result.size()); + wireMockServer.verify(3, getRequestedFor(urlPathEqualTo(ENDPOINT))); + } + + @Test + void testHttpsMTlsLookupJoin() throws Exception { + + // GIVEN + File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); + File clientCert = new File(CERTS_PATH + "client.crt"); + File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); + + setupServerStub(wireMockServer); + + String lookupTable = + String.format( + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'url' = 'https://localhost:" + + secServerPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'," + + "'http.security.cert.server' = '%s'," + + "'http.security.cert.client' = '%s'," + + "'http.security.key.client' = '%s'" + + ")", + serverTrustedCert.getAbsolutePath(), + clientCert.getAbsolutePath(), + clientPrivateKey.getAbsolutePath()); + + // WHEN + SortedSet rows = testLookupJoin(lookupTable, 4); + + // THEN + assertEnrichedRows(rows); + } + + @Test + void testLookupJoinProjectionPushDown() throws Exception { + + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.row.aStringColumn"), + matchingJsonPath("$.row.anIntColumn"), + matchingJsonPath("$.row.aFloatColumn"))); + + String fields = + "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " id STRING,\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'lookup-request.format' = 'json'," + + "'lookup-request.format.json.fail-on-missing-field' = 'true'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'POST'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // WHEN + // SQL query that performs JOIN on both tables. + String joinQuery = + "CREATE TEMPORARY VIEW lookupResult AS " + + "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.`row` = c.`row`\n" + + ")"; + + tEnv.executeSql(joinQuery); + + // SQL query that performs a projection pushdown to limit the number of columns + String lastQuery = "SELECT r.id, r.enrichedInt FROM lookupResult r;"; + + TableResult result = tEnv.executeSql(lastQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + collectedRows.stream().forEach(row -> assertThat(row.getArity()).isEqualTo(2)); + + assertThat(collectedRows.size()).isEqualTo(5); + } + + @Test + void testLookupJoinProjectionPushDownNested() throws Exception { + + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.row.aStringColumn"), + matchingJsonPath("$.row.anIntColumn"), + matchingJsonPath("$.row.aFloatColumn"))); + + String fields = + "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " id STRING,\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'lookup-request.format' = 'json'," + + "'lookup-request.format.json.fail-on-missing-field' = 'true'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'POST'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // WHEN + // SQL query that performs JOIN on both tables. + String joinQuery = + "CREATE TEMPORARY VIEW lookupResult AS " + + "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.`row` = c.`row`\n" + + ")"; + + tEnv.executeSql(joinQuery); + + // SQL query that performs a project pushdown to take a subset of columns with nested value + String lastQuery = "SELECT r.id, r.enrichedInt, r.`row`.aStringColumn FROM lookupResult r;"; + + TableResult result = tEnv.executeSql(lastQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + collectedRows.stream().forEach(row -> assertThat(row.getArity()).isEqualTo(3)); + + assertThat(collectedRows.size()).isEqualTo(5); + } + + @Test + void testLookupJoinOnRowType() throws Exception { + + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.row.aStringColumn"), + matchingJsonPath("$.row.anIntColumn"), + matchingJsonPath("$.row.aFloatColumn"))); + + String fields = + "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " id STRING,\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'lookup-request.format' = 'json'," + + "'lookup-request.format.json.fail-on-missing-field' = 'true'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'POST'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // WHEN + // SQL query that performs JOIN on both tables. + String joinQuery = + "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.`row` = c.`row`\n" + + ")"; + + TableResult result = tEnv.executeSql(joinQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + // TODO add assert on values + assertThat(collectedRows.size()).isEqualTo(5); + } + + @Test + void testLookupJoinOnRowTypeAndRootColumn() throws Exception { + + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.enrichedString"), + matchingJsonPath("$.row.aStringColumn"), + matchingJsonPath("$.row.anIntColumn"), + matchingJsonPath("$.row.aFloatColumn"))); + + String fields = + "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " id STRING,\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'lookup-request.format' = 'json'," + + "'lookup-request.format.json.fail-on-missing-field' = 'true'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'POST'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // WHEN + // SQL query that performs JOIN on both tables. + String joinQuery = + "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.id = c.enrichedString AND\n" + + " o.`row` = c.`row`\n" + + ")"; + + TableResult result = tEnv.executeSql(joinQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + // TODO add assert on values + assertThat(collectedRows.size()).isEqualTo(5); + } + + @Test + void testLookupJoinOnRowWithRowType() throws Exception { + testLookupJoinOnRowWithRowTypeImpl(); + } + + @ParameterizedTest + @CsvSource({ + "user:password, Basic dXNlcjpwYXNzd29yZA==, false", + "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, false", + "abc123, abc123, true", + "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, true", + "Bearer dXNlcjpwYXNzd29yZA==, Bearer dXNlcjpwYXNzd29yZA==, true" + }) + void testLookupWithUseRawAuthHeader( + String authHeaderRawValue, String expectedAuthHeaderValue, boolean useRawAuthHeader) + throws Exception { + + // Test with http.source.lookup.use-raw-authorization-header set to either + // true or false, and asserting Authorization header is processed as expected, either with + // transformation for Basic Auth, or kept as-is when it is not used for Basic Auth. + testLookupJoinOnRowWithRowTypeImpl( + authHeaderRawValue, expectedAuthHeaderValue, useRawAuthHeader); + } + + private void testLookupJoinOnRowWithRowTypeImpl() throws Exception { + testLookupJoinOnRowWithRowTypeImpl(null, null, false); + } + + private void testLookupJoinOnRowWithRowTypeImpl( + String authHeaderRawValue, String expectedAuthHeaderValue, boolean useRawAuthHeader) + throws Exception { + + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.nestedRow.aStringColumn"), + matchingJsonPath("$.nestedRow.anIntColumn"), + matchingJsonPath("$.nestedRow.aRow.anotherStringColumn"), + matchingJsonPath("$.nestedRow.aRow.anotherIntColumn")), + // For testing the http.source.lookup.use-raw-authorization-header + // configuration parameter: + expectedAuthHeaderValue != null ? "Authorization" : null, + expectedAuthHeaderValue // expected value of extra header + ); + + String fields = + " `nestedRow` ROW<" + + " `aStringColumn` STRING," + + " `anIntColumn` INT," + + " `aRow` ROW<`anotherStringColumn` STRING, `anotherIntColumn` INT>" + + " >\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + " proc_time AS PROCTIME(),\n" + + " id STRING,\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String useRawAuthHeaderString = useRawAuthHeader ? "'true'" : "'false'"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'POST'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + (authHeaderRawValue != null + ? ("'http.source.lookup.use-raw-authorization-header' = " + + useRawAuthHeaderString + + "," + + "'http.source.lookup.header.Authorization' = '" + + authHeaderRawValue + + "',") + : "") + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // SQL query that performs JOIN on both tables. + String joinQuery = + "SELECT o.id, o.`nestedRow`, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.`nestedRow` = c.`nestedRow`\n" + + ")"; + + TableResult result = tEnv.executeSql(joinQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + // TODO add assert on values + assertThat(collectedRows.size()).isEqualTo(5); + } + + @Test + void testNestedLookupJoinWithoutCast() throws Exception { + + // TODO ADD MORE ASSERTS + // GIVEN + setUpServerBodyStub( + "POST", + wireMockServer, + List.of( + matchingJsonPath("$.bool"), + matchingJsonPath("$.tinyint"), + matchingJsonPath("$.smallint"), + matchingJsonPath("$.map"), + matchingJsonPath("$.doubles"), + matchingJsonPath("$.multiSet"), + matchingJsonPath("$.time"), + matchingJsonPath("$.map2map"))); + + String fields = + " `bool` BOOLEAN,\n" + + " `tinyint` TINYINT,\n" + + " `smallint` SMALLINT,\n" + + " `idInt` INT,\n" + + " `bigint` BIGINT,\n" + + " `float` FLOAT,\n" + + " `name` STRING,\n" + + " `decimal` DECIMAL(9, 6),\n" + + " `doubles` ARRAY,\n" + + " `date` DATE,\n" + + " `time` TIME(0),\n" + + " `timestamp3` TIMESTAMP(3),\n" + + " `timestamp9` TIMESTAMP(9),\n" + + " `timestampWithLocalZone` TIMESTAMP_LTZ(9),\n" + + " `map` MAP,\n" + + " `multiSet` MULTISET,\n" + + " `map2map` MAP>,\n" + + " `row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>,\n" + + " `nestedRow` ROW<" + + " `aStringColumn` STRING," + + " `anIntColumn` INT," + + " `aRow` ROW<`anotherStringColumn` STRING, `anotherIntColumn` INT>" + + " >,\n" + + " `aTable` ARRAY>\n"; + + String sourceTable = + "CREATE TABLE Orders (\n" + + "id STRING," + + " proc_time AS PROCTIME(),\n" + + fields + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '5'" + + ")"; + + String lookupTable = + "CREATE TABLE Customers (\n" + + " `enrichedInt` INT,\n" + + " `enrichedString` STRING,\n" + + " \n" + + fields + + ") WITH (" + + "'format' = 'json'," + + "'lookup-request.format' = 'json'," + + "'lookup-request.format.json.fail-on-missing-field' = 'true'," + + "'lookup-method' = 'POST'," + + "'connector' = 'rest-lookup'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + "'asyncPolling' = 'true'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // SQL query that performs JOIN on both tables. + String joinQuery = + "SELECT o.id, o.name, c.enrichedInt, c.enrichedString FROM Orders AS o" + + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" + + " ON (\n" + + " o.`bool` = c.`bool` AND\n" + + " o.`tinyint` = c.`tinyint` AND\n" + + " o.`smallint` = c.`smallint` AND\n" + + " o.idInt = c.idInt AND\n" + + " o.`bigint` = c.`bigint` AND\n" + + " o.`float` = c.`float` AND\n" + + " o.name = c.name AND\n" + + " o.`decimal` = c.`decimal` AND\n" + + " o.doubles = c.doubles AND\n" + + " o.`date` = c.`date` AND\n" + + " o.`time` = c.`time` AND\n" + + " o.timestamp3 = c.timestamp3 AND\n" + + " o.timestamp9 = c.timestamp9 AND\n" + + " o.timestampWithLocalZone = c.timestampWithLocalZone AND\n" + + " o.`map` = c.`map` AND\n" + + " o.`multiSet` = c.`multiSet` AND\n" + + " o.map2map = c.map2map AND\n" + + " o.`row` = c.`row` AND\n" + + " o.nestedRow = c.nestedRow AND\n" + + " o.aTable = c.aTable\n" + + ")"; + + TableResult result = tEnv.executeSql(joinQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + SortedSet collectedRows = getCollectedRows(result); + + // TODO add assert on values + assertThat(collectedRows.size()).isEqualTo(5); + } + + @ParameterizedTest + @ValueSource(booleans = {true, false}) + void testHttpLookupJoinWithCache(boolean isAsync) throws Exception { + // GIVEN + LookupCacheManager.keepCacheOnRelease(true); + + setupServerStub(wireMockServer); + + String lookupTable = + "CREATE TABLE Customers (" + + "id STRING," + + "id2 STRING," + + "msg STRING," + + "uuid STRING," + + "details ROW<" + + "isActive BOOLEAN," + + "nestedDetails ROW<" + + "balance STRING" + + ">" + + ">" + + ") WITH (" + + "'format' = 'json'," + + "'connector' = 'rest-lookup'," + + "'lookup-method' = 'GET'," + + "'url' = 'http://localhost:" + + serverPort + + "/client'," + + "'http.source.lookup.header.Content-Type' = 'application/json'," + + (isAsync ? "'asyncPolling' = 'true'," : "") + + "'lookup.cache' = 'partial'," + + "'lookup.partial-cache.max-rows' = '100'" + + ")"; + + // WHEN + SortedSet rows = testLookupJoin(lookupTable, 4); + + // THEN + try { + assertEnrichedRows(rows); + + LookupCacheAssert.assertThat(getCache()) + .hasSize(4) + .containsKey( + GenericRowData.of( + BinaryStringData.fromString("3"), + BinaryStringData.fromString("4"))) + .containsKey( + GenericRowData.of( + BinaryStringData.fromString("4"), + BinaryStringData.fromString("5"))) + .containsKey( + GenericRowData.of( + BinaryStringData.fromString("1"), + BinaryStringData.fromString("2"))) + .containsKey( + GenericRowData.of( + BinaryStringData.fromString("2"), + BinaryStringData.fromString("3"))); + } finally { + LookupCacheManager.getInstance().checkAllReleased(); + LookupCacheManager.getInstance().clear(); + LookupCacheManager.keepCacheOnRelease(false); + } + } + + private LookupCache getCache() { + Map managedCaches = + LookupCacheManager.getInstance().getManagedCaches(); + assertThat(managedCaches).as("There should be only 1 shared cache registered").hasSize(1); + return managedCaches.get(managedCaches.keySet().iterator().next()).getCache(); + } + + private SortedSet testLookupJoin(String lookupTable, int maxRows) throws Exception { + + String sourceTable = + "CREATE TABLE Orders (" + + "id STRING," + + " id2 STRING," + + " proc_time AS PROCTIME()" + + ") WITH (" + + "'connector' = 'datagen'," + + "'rows-per-second' = '1'," + + "'fields.id.kind' = 'sequence'," + + "'fields.id.start' = '1'," + + "'fields.id.end' = '" + + maxRows + + "'," + + "'fields.id2.kind' = 'sequence'," + + "'fields.id2.start' = '2'," + + "'fields.id2.end' = '" + + (maxRows + 1) + + "'" + + ")"; + + tEnv.executeSql(sourceTable); + tEnv.executeSql(lookupTable); + + // WHEN + // SQL query that performs JOIN on both tables. + String joinQuery = + "SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o " + + "JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c " + + "ON o.id = c.id " + + "AND o.id2 = c.id2"; + + TableResult result = tEnv.executeSql(joinQuery); + result.await(15, TimeUnit.SECONDS); + + // THEN + return getCollectedRows(result); + } + + private void assertEnrichedRows(Collection collectedRows) { + // validate every row and its column. + assertAll( + () -> { + assertThat(collectedRows.size()).isEqualTo(4); + int intElement = 0; + for (Row row : collectedRows) { + intElement++; + assertThat(row.getArity()).isEqualTo(6); + + // "id" nad "id2" columns should be different for every row. + assertThat(row.getField("id")).isEqualTo(String.valueOf(intElement)); + assertThat(row.getField("id2")).isEqualTo(String.valueOf(intElement + 1)); + + assertThat(row.getField("uuid")) + .isEqualTo("fbb68a46-80a9-46da-9d40-314b5287079c"); + assertThat(row.getField("isActive")).isEqualTo(true); + assertThat(row.getField("balance")).isEqualTo("$1,729.34"); + } + }); + } + + private SortedSet getCollectedRows(TableResult result) throws Exception { + + // We want to sort the result by "id" to make validation easier. + SortedSet collectedRows = new TreeSet<>(ROW_COMPARATOR); + try (CloseableIterator joinResult = result.collect()) { + while (joinResult.hasNext()) { + Row row = joinResult.next(); + log.info("Collected row " + row); + collectedRows.add(row); + } + } + return collectedRows; + } + + private void setupServerStub(WireMockServer wireMockServer) { + StubMapping stubMapping = + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn(aResponse().withTransformers(JsonTransform.NAME))); + + wireMockServer.addStubMapping(stubMapping); + } + + private void setupServerStubEmptyResponse(WireMockServer wireMockServer) { + StubMapping stubMapping = + wireMockServer.stubFor( + get(urlPathEqualTo(ENDPOINT)) + .withHeader("Content-Type", equalTo("application/json")) + .withQueryParam("id", matching("[0-9]+")) + .withQueryParam("id2", matching("[0-9]+")) + .willReturn(aResponse().withBody(new byte[0]))); + + wireMockServer.addStubMapping(stubMapping); + } + + private void setUpServerBodyStub( + String methodName, + WireMockServer wireMockServer, + List matchingJsonPaths) { + setUpServerBodyStub(methodName, wireMockServer, matchingJsonPaths, null, null); + } + + private void setUpServerBodyStub( + String methodName, + WireMockServer wireMockServer, + List matchingJsonPaths, + String extraHeader, + String expectedExtraHeaderValue) { + + MappingBuilder methodStub = + (methodName.equalsIgnoreCase("PUT") + ? put(urlEqualTo(ENDPOINT)) + : post(urlEqualTo(ENDPOINT))); + + methodStub.withHeader("Content-Type", equalTo("application/json")); + + if (extraHeader != null && expectedExtraHeaderValue != null) { + methodStub.withHeader(extraHeader, equalTo(expectedExtraHeaderValue)); + } + + // TODO think about writing custom matcher that will check node values against regexp + // or real values. Currently we check only if JsonPath exists. Also, we should check if + // there are no extra fields. + for (StringValuePattern pattern : matchingJsonPaths) { + methodStub.withRequestBody(pattern); + } + + methodStub.willReturn(aResponse().withTransformers(JsonTransform.NAME)); + + StubMapping stubMapping = wireMockServer.stubFor(methodStub); + + wireMockServer.addStubMapping(stubMapping); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceTest.java new file mode 100644 index 00000000..8b4451af --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/HttpLookupTableSourceTest.java @@ -0,0 +1,258 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.configuration.ConfigOptions; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.metrics.groups.CacheMetricGroup; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ResolvedSchema; +import org.apache.flink.table.catalog.UniqueConstraint; +import org.apache.flink.table.connector.source.LookupTableSource; +import org.apache.flink.table.connector.source.lookup.AsyncLookupFunctionProvider; +import org.apache.flink.table.connector.source.lookup.LookupFunctionProvider; +import org.apache.flink.table.connector.source.lookup.PartialCachingAsyncLookupProvider; +import org.apache.flink.table.connector.source.lookup.PartialCachingLookupProvider; +import org.apache.flink.table.connector.source.lookup.cache.LookupCache; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext; +import org.apache.flink.table.types.DataType; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import javax.annotation.Nullable; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.apache.flink.table.factories.utils.FactoryMocks.createTableSource; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** Test for {@link HttpLookupTableSource}. */ +class HttpLookupTableSourceTest { + + public static final DataType PHYSICAL_ROW_DATA_TYPE = + row(List.of(DataTypes.FIELD("id", DataTypes.STRING().notNull()))); + + private static final ResolvedSchema SCHEMA = + new ResolvedSchema( + Arrays.asList( + Column.physical("id", DataTypes.STRING().notNull()), + Column.physical("msg", DataTypes.STRING().notNull()), + Column.physical("uuid", DataTypes.STRING().notNull()), + Column.physical( + "details", + DataTypes.ROW( + DataTypes.FIELD( + "isActive", DataTypes.BOOLEAN()), + DataTypes.FIELD( + "nestedDetails", + DataTypes.ROW( + DataTypes.FIELD( + "balance", + DataTypes.STRING())))) + .notNull())), + Collections.emptyList(), + UniqueConstraint.primaryKey("id", List.of("id"))); + + // lookupKey index {{0}} means first column. + private final int[][] lookupKey = {{0}}; + + @BeforeEach + public void setUp() { + + LookupRow expectedLookupRow = new LookupRow(); + expectedLookupRow.addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "id", + RowData.createFieldGetter( + DataTypes.STRING().notNull().getLogicalType(), 0))); + expectedLookupRow.setLookupPhysicalRowDataType(PHYSICAL_ROW_DATA_TYPE); + } + + @Test + @SuppressWarnings("unchecked") + void shouldCreateTableSourceWithParams() { + HttpLookupTableSource tableSource = + (HttpLookupTableSource) createTableSource(SCHEMA, getOptions()); + + LookupTableSource.LookupRuntimeProvider lookupProvider = + tableSource.getLookupRuntimeProvider(new LookupRuntimeProviderContext(lookupKey)); + HttpTableLookupFunction tableFunction = + (HttpTableLookupFunction) + ((LookupFunctionProvider) lookupProvider).createLookupFunction(); + + LookupRow actualLookupRow = tableFunction.getLookupRow(); + assertThat(actualLookupRow).isNotNull(); + assertThat(actualLookupRow.getLookupEntries()).isNotEmpty(); + assertThat(actualLookupRow.getLookupPhysicalRowDataType()) + .isEqualTo(PHYSICAL_ROW_DATA_TYPE); + + HttpLookupConfig actualLookupConfig = tableFunction.getOptions(); + assertThat(actualLookupConfig).isNotNull(); + assertThat( + actualLookupConfig + .getReadableConfig() + .get(ConfigOptions.key("connector").stringType().noDefaultValue())) + .withFailMessage( + "Readable config probably was not passed from Table Factory or it is empty.") + .isNotNull(); + } + + @Test + @SuppressWarnings("unchecked") + void shouldCreateAsyncTableSourceWithParams() { + Map options = getOptionsWithAsync(); + + HttpLookupTableSource tableSource = + (HttpLookupTableSource) createTableSource(SCHEMA, options); + + AsyncLookupFunctionProvider lookupProvider = + (AsyncLookupFunctionProvider) + tableSource.getLookupRuntimeProvider( + new LookupRuntimeProviderContext(lookupKey)); + + AsyncHttpTableLookupFunction tableFunction = + (AsyncHttpTableLookupFunction) lookupProvider.createAsyncLookupFunction(); + + LookupRow actualLookupRow = tableFunction.getLookupRow(); + assertThat(actualLookupRow).isNotNull(); + assertThat(actualLookupRow.getLookupEntries()).isNotEmpty(); + assertThat(actualLookupRow.getLookupPhysicalRowDataType()) + .isEqualTo(PHYSICAL_ROW_DATA_TYPE); + + HttpLookupConfig actualLookupConfig = tableFunction.getOptions(); + assertThat(actualLookupConfig).isNotNull(); + assertThat(actualLookupConfig.isUseAsync()).isTrue(); + assertThat( + actualLookupConfig + .getReadableConfig() + .get(HttpLookupConnectorOptions.ASYNC_POLLING)) + .withFailMessage( + "Readable config probably was not passed" + + " from Table Factory or it is empty.") + .isTrue(); + } + + @ParameterizedTest + @MethodSource("configProvider") + void testGetLookupRuntimeProvider(TestSpec testSpec) { + LookupCache cache = + new LookupCache() { + @Override + public void open(CacheMetricGroup cacheMetricGroup) {} + + @Nullable + @Override + public Collection getIfPresent(RowData rowData) { + return null; + } + + @Override + public Collection put( + RowData rowData, Collection collection) { + return null; + } + + @Override + public void invalidate(RowData rowData) {} + + @Override + public long size() { + return 0; + } + + @Override + public void close() throws Exception {} + }; + + HttpLookupConfig options = HttpLookupConfig.builder().useAsync(testSpec.isAsync).build(); + LookupTableSource.LookupRuntimeProvider lookupRuntimeProvider = + getLookupRuntimeProvider(testSpec.hasCache ? cache : null, options); + assertTrue(testSpec.expected.isInstance(lookupRuntimeProvider)); + } + + private static class TestSpec { + + boolean hasCache; + boolean isAsync; + + Class expected; + + private TestSpec(boolean hasCache, boolean isAsync, Class expected) { + this.hasCache = hasCache; + this.isAsync = isAsync; + this.expected = expected; + } + + @Override + public String toString() { + return "TestSpec{" + + "hasCache=" + + hasCache + + ", isAsync=" + + isAsync + + ", expected=" + + expected + + '}'; + } + } + + static Collection configProvider() { + return List.of( + new TestSpec(false, false, LookupFunctionProvider.class), + new TestSpec(true, false, PartialCachingLookupProvider.class), + new TestSpec(false, true, AsyncLookupFunctionProvider.class), + new TestSpec(true, true, PartialCachingAsyncLookupProvider.class)); + } + + private static LookupTableSource.LookupRuntimeProvider getLookupRuntimeProvider( + LookupCache cache, HttpLookupConfig options) { + HttpLookupTableSource tableSource = + new HttpLookupTableSource(null, options, null, null, cache); + int[][] lookupKeys = {{1, 2}}; + LookupTableSource.LookupContext lookupContext = + new LookupRuntimeProviderContext(lookupKeys); + return tableSource.getLookupRuntimeProvider(null, null, null); + } + + private Map getOptionsWithAsync() { + Map options = getOptions(); + options = new HashMap<>(options); + options.put("asyncPolling", "true"); + return options; + } + + private Map getOptions() { + return Map.of( + "connector", "rest-lookup", + "url", "http://localhost:" + WireMockServerPortAllocator.PORT_BASE + "/service", + "format", "json"); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientConnectionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientConnectionTest.java similarity index 67% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientConnectionTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientConnectionTest.java index 59fbb265..ba1fa0c5 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientConnectionTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientConnectionTest.java @@ -1,18 +1,32 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; -import java.util.Collection; -import java.util.Iterator; -import java.util.List; -import java.util.Properties; -import java.util.stream.Stream; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.client.MappingBuilder; -import com.github.tomakehurst.wiremock.matching.RequestPatternBuilder; -import com.github.tomakehurst.wiremock.stubbing.StubMapping; import org.apache.flink.api.common.serialization.DeserializationSchema; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericGetQueryCreator; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonQueryCreator; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; +import org.apache.flink.connector.http.utils.SerializationSchemaUtils; import org.apache.flink.formats.json.JsonFormatFactory; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.connector.source.DynamicTableSource; @@ -23,6 +37,11 @@ import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext; import org.apache.flink.table.types.DataType; import org.apache.flink.util.ConfigurationException; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.client.MappingBuilder; +import com.github.tomakehurst.wiremock.matching.RequestPatternBuilder; +import com.github.tomakehurst.wiremock.stubbing.StubMapping; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -36,6 +55,13 @@ import org.junit.jupiter.params.provider.ValueSource; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; + +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Properties; +import java.util.stream.Stream; + import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson; @@ -45,21 +71,16 @@ import static com.github.tomakehurst.wiremock.client.WireMock.put; import static com.github.tomakehurst.wiremock.client.WireMock.putRequestedFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static org.apache.flink.connector.http.TestHelper.readTestFile; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.RESULT_TYPE; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_SUCCESS_CODES; +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.fail; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericGetQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonQueryCreator; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import com.getindata.connectors.http.internal.utils.SerializationSchemaUtils; -import static com.getindata.connectors.http.TestHelper.readTestFile; -import static com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants.RESULT_TYPE; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_HTTP_SUCCESS_CODES; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - +/** Test for {@link JavaNetHttpPollingClient} connection. */ @ExtendWith(MockitoExtension.class) class JavaNetHttpPollingClientConnectionTest { @@ -72,8 +93,7 @@ class JavaNetHttpPollingClientConnectionTest { private static WireMockServer wireMockServer; - @Mock - private Context dynamicTableFactoryContext; + @Mock private Context dynamicTableFactoryContext; private DynamicTableSource.Context dynamicTableSourceContext; @@ -87,9 +107,11 @@ class JavaNetHttpPollingClientConnectionTest { private DataType lookupPhysicalDataType; + public static final int SERVER_PORT = WireMockServerPortAllocator.getServerPort(); + @BeforeAll static void setUpAll() { - wireMockServer = new WireMockServer(); + wireMockServer = new WireMockServer(SERVER_PORT); wireMockServer.start(); } @@ -105,22 +127,19 @@ void setUp() { int[][] lookupKey = {{}}; this.dynamicTableSourceContext = new LookupRuntimeProviderContext(lookupKey); - this.lookupRowData = GenericRowData.of( - StringData.fromString("1"), - StringData.fromString("2") - ); + this.lookupRowData = + GenericRowData.of(StringData.fromString("1"), StringData.fromString("2")); - this.lookupPhysicalDataType = row(List.of( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()) - ) - ); + this.lookupPhysicalDataType = + row( + List.of( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()))); this.properties = new Properties(); this.properties.setProperty( HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Content-Type", - "application/json" - ); + "application/json"); this.properties.setProperty(RESULT_TYPE, "single-value"); this.configuration = new Configuration(); @@ -149,8 +168,8 @@ void shouldQuery200WithParams() throws ConfigurationException { assertThat(results).hasSize(1); RowData result = results.iterator().next(); assertThat(result.getArity()).isEqualTo(4); - assertThat(result.getString(1) - .toString()).isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); + assertThat(result.getString(1).toString()) + .isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); RowData detailsRow = result.getRow(3, 2); assertThat(detailsRow.getBoolean(0)).isEqualTo(true); @@ -165,7 +184,8 @@ void shouldQuery200WithBodyParams(String methodName) throws ConfigurationExcepti // GIVEN this.stubMapping = setUpServerBodyStub(methodName); - JavaNetHttpPollingClient pollingClient = setUpPollingClient(setUpBodyRequestFactory(methodName)); + JavaNetHttpPollingClient pollingClient = + setUpPollingClient(setUpBodyRequestFactory(methodName)); // WHEN Collection results = pollingClient.pull(lookupRowData); @@ -183,8 +203,8 @@ void shouldQuery200WithBodyParams(String methodName) throws ConfigurationExcepti assertThat(results).hasSize(1); RowData result = results.iterator().next(); assertThat(result.getArity()).isEqualTo(4); - assertThat(result.getString(1) - .toString()).isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); + assertThat(result.getString(1).toString()) + .isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); RowData detailsRow = result.getRow(3, 2); assertThat(detailsRow.getBoolean(0)).isEqualTo(true); @@ -198,8 +218,7 @@ private static Stream clientErrorCodeConfig() { Arguments.of("2XX", "", false), Arguments.of("2XX", "201", true), Arguments.of("200,201,202", "202", false), - Arguments.of("200,201", "202", false) - ); + Arguments.of("200,201", "202", false)); } @Test @@ -268,13 +287,14 @@ void shouldQuery200WithArrayResultWithNulls() throws ConfigurationException { void shouldHandleCodeBasedOnConfiguration( String successCodesExpression, String ignoredResponseCodesExpression, - boolean isExpectedResponseEmpty - ) throws ConfigurationException { + boolean isExpectedResponseEmpty) + throws ConfigurationException { // GIVEN this.stubMapping = setUpServerStub(201); configuration.setString(SOURCE_LOOKUP_HTTP_SUCCESS_CODES, successCodesExpression); - configuration.setString(SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES, ignoredResponseCodesExpression); + configuration.setString( + SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES, ignoredResponseCodesExpression); JavaNetHttpPollingClient pollingClient = setUpPollingClient(); // WHEN @@ -314,21 +334,20 @@ void shouldProcessWithMissingArguments() throws ConfigurationException { "Basic dXNlcjpwYXNzd29yZA==, false", "Basic dXNlcjpwYXNzd29yZA==, true" }) - public void shouldConnectWithBasicAuth(String authorizationHeaderValue, - boolean useRawAuthHeader) throws ConfigurationException { + public void shouldConnectWithBasicAuth( + String authorizationHeaderValue, boolean useRawAuthHeader) + throws ConfigurationException { // GIVEN this.stubMapping = setupServerStubForBasicAuth(); properties.setProperty( HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Authorization", - authorizationHeaderValue - ); + authorizationHeaderValue); properties.setProperty( HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW, - Boolean.toString(useRawAuthHeader) - ); + Boolean.toString(useRawAuthHeader)); JavaNetHttpPollingClient pollingClient = setUpPollingClient(); @@ -341,8 +360,8 @@ public void shouldConnectWithBasicAuth(String authorizationHeaderValue, assertThat(results).hasSize(1); RowData result = results.iterator().next(); assertThat(result.getArity()).isEqualTo(4); - assertThat(result.getString(1) - .toString()).isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); + assertThat(result.getString(1).toString()) + .isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); RowData detailsRow = result.getRow(3, 2); assertThat(detailsRow.getBoolean(0)).isEqualTo(true); @@ -360,22 +379,25 @@ public JavaNetHttpPollingClient setUpPollingClient() throws ConfigurationExcepti } private GetRequestFactory setUpGetRequestFactory() { - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("id", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 0))) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("uuid", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 1)) - ); + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "id", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "uuid", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))); lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); - boolean useRawAuthHeader = Boolean.parseBoolean( - (String) properties.get(HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW)); + boolean useRawAuthHeader = + Boolean.parseBoolean( + (String) + properties.get( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW)); return new GetRequestFactory( new GenericGetQueryCreator(lookupRow), @@ -384,8 +406,7 @@ private GetRequestFactory setUpGetRequestFactory() { .url(getBaseUrl()) .readableConfig(configuration) .properties(properties) - .build() - ); + .build()); } private BodyBasedRequestFactory setUpBodyRequestFactory(String methodName) { @@ -395,41 +416,44 @@ private BodyBasedRequestFactory setUpBodyRequestFactory(String methodName) { .createEncodingFormat(dynamicTableFactoryContext, new Configuration()) .createRuntimeEncoder(null, lookupPhysicalDataType); - boolean useRawAuthHeader = Boolean.parseBoolean( - (String) properties.get(HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW)); + boolean useRawAuthHeader = + Boolean.parseBoolean( + (String) + properties.get( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_USE_RAW)); return new BodyBasedRequestFactory( methodName, new GenericJsonQueryCreator(jsonSerializer), HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(useRawAuthHeader), + HttpLookupConfig.builder().url(getBaseUrl()).properties(properties).build()); + } + + private JavaNetHttpPollingClient setUpPollingClient(HttpRequestFactory requestFactory) + throws ConfigurationException { + + HttpLookupConfig lookupConfig = HttpLookupConfig.builder() .url(getBaseUrl()) + .readableConfig(configuration) .properties(properties) - .build() - ); - } + .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) + .build(); - private JavaNetHttpPollingClient setUpPollingClient( - HttpRequestFactory requestFactory) throws ConfigurationException { - - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .url(getBaseUrl()) - .readableConfig(configuration) - .properties(properties) - .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) - .build(); - - DataType physicalDataType = DataTypes.ROW( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("msg", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()), - DataTypes.FIELD("details", DataTypes.ROW( - DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), - DataTypes.FIELD("nestedDetails", DataTypes.ROW( - DataTypes.FIELD("balance", DataTypes.STRING()) - )) - )) - ); + DataType physicalDataType = + DataTypes.ROW( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("msg", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()), + DataTypes.FIELD( + "details", + DataTypes.ROW( + DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), + DataTypes.FIELD( + "nestedDetails", + DataTypes.ROW( + DataTypes.FIELD( + "balance", DataTypes.STRING())))))); DeserializationSchema schemaDecoder = new JsonFormatFactory() @@ -457,13 +481,15 @@ private StubMapping setUpServerStub(int status) { .willReturn( aResponse() .withStatus(status) - .withBody(readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); + .withBody( + readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); } private StubMapping setUpServerBodyStub(String methodName) { - MappingBuilder methodStub = (methodName.equalsIgnoreCase("PUT") ? - put(urlEqualTo(ENDPOINT)) : - post(urlEqualTo(ENDPOINT))); + MappingBuilder methodStub = + (methodName.equalsIgnoreCase("PUT") + ? put(urlEqualTo(ENDPOINT)) + : post(urlEqualTo(ENDPOINT))); return wireMockServer.stubFor( methodStub @@ -472,7 +498,8 @@ private StubMapping setUpServerBodyStub(String methodName) { .willReturn( aResponse() .withStatus(200) - .withBody(readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); + .withBody( + readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); } private StubMapping setUpServerStubArrayResult(int status) { @@ -482,7 +509,10 @@ private StubMapping setUpServerStubArrayResult(int status) { .willReturn( aResponse() .withStatus(status) - .withBody(readTestFile(SAMPLES_FOLDER_ARRAY_RESULT + "HttpResult.json")))); + .withBody( + readTestFile( + SAMPLES_FOLDER_ARRAY_RESULT + + "HttpResult.json")))); } private StubMapping setUpServerStubArrayResultWithNulls(int status) { @@ -492,17 +522,21 @@ private StubMapping setUpServerStubArrayResultWithNulls(int status) { .willReturn( aResponse() .withStatus(status) - .withBody(readTestFile( - SAMPLES_FOLDER_ARRAY_RESULT_WITH_NULLS + "HttpResult.json")))); + .withBody( + readTestFile( + SAMPLES_FOLDER_ARRAY_RESULT_WITH_NULLS + + "HttpResult.json")))); } private StubMapping setupServerStubForBasicAuth() { - return wireMockServer.stubFor(get(urlEqualTo(ENDPOINT + "?id=1&uuid=2")) - .withHeader("Content-Type", equalTo("application/json")) - .withBasicAuth("user", "password") - .willReturn( - aResponse() - .withStatus(200) - .withBody(readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); + return wireMockServer.stubFor( + get(urlEqualTo(ENDPOINT + "?id=1&uuid=2")) + .withHeader("Content-Type", equalTo("application/json")) + .withBasicAuth("user", "password") + .willReturn( + aResponse() + .withStatus(200) + .withBody( + readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); } } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactoryTest.java new file mode 100644 index 00000000..4a16a173 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientFactoryTest.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.table.data.RowData; +import org.apache.flink.util.ConfigurationException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +/** Test for {@link JavaNetHttpPollingClientFactory}. */ +class JavaNetHttpPollingClientFactoryTest { + + private JavaNetHttpPollingClientFactory factory; + + @BeforeEach + public void setUp() { + factory = new JavaNetHttpPollingClientFactory(mock(GetRequestFactory.class)); + } + + @Test + @SuppressWarnings("unchecked") + void shouldCreateClient() throws ConfigurationException { + + assertThat( + factory.createPollClient( + HttpLookupConfig.builder().build(), + (DeserializationSchema) mock(DeserializationSchema.class))) + .isInstanceOf(JavaNetHttpPollingClient.class); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java new file mode 100644 index 00000000..13ce9cd4 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java @@ -0,0 +1,363 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.HttpsConnectionTestBase; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericGetQueryCreator; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; +import org.apache.flink.connector.http.utils.SerializationSchemaUtils; +import org.apache.flink.formats.json.JsonFormatFactory; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.connector.source.DynamicTableSource; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; +import org.apache.flink.table.factories.DynamicTableFactory.Context; +import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext; +import org.apache.flink.table.types.DataType; +import org.apache.flink.util.ConfigurationException; + +import com.github.tomakehurst.wiremock.WireMockServer; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.ValueSource; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.io.File; +import java.time.Duration; +import java.util.Collection; +import java.util.List; +import java.util.Optional; +import java.util.Properties; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static org.apache.flink.connector.http.TestHelper.readTestFile; +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link JavaNetHttpPollingClient} with https. */ +@ExtendWith(MockitoExtension.class) +public class JavaNetHttpPollingClientHttpsConnectionTest extends HttpsConnectionTestBase { + + private static final String SAMPLES_FOLDER = "/http/"; + + private static final String ENDPOINT = "/service"; + + @Mock private Context dynamicTableFactoryContext; + + private DynamicTableSource.Context dynamicTableSourceContext; + + private JavaNetHttpPollingClientFactory pollingClientFactory; + + private RowData lookupRowData; + + private DataType lookupPhysicalDataType; + + public static int httpServerPort; + + @BeforeEach + public void setUp() { + super.setUp(); + httpServerPort = WireMockServerPortAllocator.getSecureServerPort(); + int[][] lookupKey = {{0, 1}}; + this.dynamicTableSourceContext = new LookupRuntimeProviderContext(lookupKey); + + this.lookupRowData = + GenericRowData.of(StringData.fromString("1"), StringData.fromString("2")); + + this.lookupPhysicalDataType = + row( + List.of( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()))); + } + + @AfterEach + public void tearDown() { + super.tearDown(); + } + + @Test + public void testHttpsConnectionWithSelfSignedCert() throws ConfigurationException { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + + wireMockServer = + new WireMockServer( + options() + .httpsPort(httpServerPort) + .httpDisabled(true) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password")); + + wireMockServer.start(); + setupServerStub(); + properties.setProperty(HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "true"); + + setupAndTestConnection(); + } + + @ParameterizedTest + @ValueSource(strings = {"ca.crt", "server.crt"}) + public void testHttpsConnectionWithAddedCerts(String certName) throws ConfigurationException { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustedCert = new File(CERTS_PATH + certName); + + wireMockServer = + new WireMockServer( + options() + .httpsPort(httpServerPort) + .httpDisabled(true) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password")); + + wireMockServer.start(); + setupServerStub(); + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, trustedCert.getAbsolutePath()); + setupAndTestConnection(); + } + + @ParameterizedTest + @ValueSource(strings = {"clientPrivateKey.pem", "clientPrivateKey.der"}) + public void testMTlsConnection(String clientPrivateKeyName) throws ConfigurationException { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); + + File clientCert = new File(CERTS_PATH + "client.crt"); + File clientPrivateKey = new File(CERTS_PATH + clientPrivateKeyName); + + this.wireMockServer = + new WireMockServer( + options() + .httpDisabled(true) + .httpsPort(httpServerPort) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password")); + + wireMockServer.start(); + setupServerStub(); + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_CERT, clientCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, + clientPrivateKey.getAbsolutePath()); + setupAndTestConnection(); + } + + @Test + public void testMTlsConnectionUsingKeyStore() throws ConfigurationException { + String password = "password"; + + String clientKeyStoreName = "client_keyStore.p12"; + String serverKeyStoreName = "serverKeyStore.jks"; + String serverTrustStoreName = "serverTrustStore.jks"; + + File clientKeyStoreFile = new File(CERTS_PATH + clientKeyStoreName); + File serverKeyStoreFile = new File(CERTS_PATH + serverKeyStoreName); + File serverTrustStoreFile = new File(CERTS_PATH + serverTrustStoreName); + File serverTrustedCert = new File(CERTS_PATH + "ca_server_bundle.cert.pem"); + + this.wireMockServer = + new WireMockServer( + options() + .httpDisabled(true) + .httpsPort(httpServerPort) + .keystorePath(serverKeyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(serverTrustStoreFile.getAbsolutePath()) + .trustStorePassword("password")); + + wireMockServer.start(); + setupServerStub(); + properties.setProperty(HttpConnectorConfigConstants.KEY_STORE_PASSWORD, password); + properties.setProperty( + HttpConnectorConfigConstants.KEY_STORE_PATH, clientKeyStoreFile.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + setupAndTestConnection(); + } + + private void setupAndTestConnection() throws ConfigurationException { + // test with basic auth + setupAndTestConnectionWithAuth( + HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor()); + // test with OIDC auth + setupAndTestConnectionWithAuth( + HttpHeaderUtils.createOIDCAuthorizationHeaderPreprocessor( + "http://abc", "aaa", Optional.of(Duration.ofSeconds(5)))); + } + + private void setupAndTestConnectionWithAuth(HeaderPreprocessor headerPreprocessor) + throws ConfigurationException { + setUpPollingClientFactory(wireMockServer.baseUrl(), headerPreprocessor); + testPollingClientConnection(); + } + + @ParameterizedTest + @CsvSource( + value = { + "invalid.crt, client.crt, clientPrivateKey.pem", + "ca.crt, invalid.crt, clientPrivateKey.pem", + "ca.crt, client.crt, invalid.pem" + }) + public void shouldThrowOnInvalidPath( + String serverCertName, String clientCertName, String clientKeyName) { + + File serverTrustedCert = new File(CERTS_PATH + serverCertName); + File clientCert = new File(CERTS_PATH + clientCertName); + File clientPrivateKey = new File(CERTS_PATH + clientKeyName); + + properties.setProperty( + HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, + serverTrustedCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_CERT, clientCert.getAbsolutePath()); + properties.setProperty( + HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, + clientPrivateKey.getAbsolutePath()); + + assertThrows(RuntimeException.class, () -> setUpPollingClient(properties)); + } + + private void testPollingClientConnection() throws ConfigurationException { + JavaNetHttpPollingClient pollingClient = setUpPollingClient(properties); + Collection result = pollingClient.pull(lookupRowData); + + assertResult(result); + } + + private JavaNetHttpPollingClient setUpPollingClient(Properties properties) + throws ConfigurationException { + + HttpLookupConfig lookupConfig = + HttpLookupConfig.builder() + .url("https://localhost:" + httpServerPort + ENDPOINT) + .properties(properties) + .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) + .build(); + + DataType physicalDataType = + DataTypes.ROW( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("msg", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()), + DataTypes.FIELD( + "details", + DataTypes.ROW( + DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), + DataTypes.FIELD( + "nestedDetails", + DataTypes.ROW( + DataTypes.FIELD( + "balance", DataTypes.STRING())))))); + + DeserializationSchema schemaDecoder = + new JsonFormatFactory() + .createDecodingFormat(dynamicTableFactoryContext, new Configuration()) + .createRuntimeDecoder(dynamicTableSourceContext, physicalDataType); + + try { + schemaDecoder.open( + SerializationSchemaUtils.createDeserializationInitContext( + JavaNetHttpPollingClientConnectionTest.class)); + } catch (Exception e) { + throw new RuntimeException("Unable to open schema decoder: " + e.getMessage(), e); + } + + return pollingClientFactory.createPollClient(lookupConfig, schemaDecoder); + } + + private void setupServerStub() { + wireMockServer.stubFor( + get(urlEqualTo("/service?id=1&uuid=2")) + .willReturn( + aResponse() + .withStatus(200) + .withBody( + readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); + } + + private void setUpPollingClientFactory(String baseUrl, HeaderPreprocessor headerPreprocessor) { + + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "id", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "uuid", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))); + lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); + + GetRequestFactory requestFactory = + new GetRequestFactory( + new GenericGetQueryCreator(lookupRow), + headerPreprocessor, + HttpLookupConfig.builder().url(baseUrl + ENDPOINT).build()); + this.pollingClientFactory = new JavaNetHttpPollingClientFactory(requestFactory); + } + + private void assertResult(Collection results) { + assertThat(results).hasSize(1); + RowData result = results.iterator().next(); + assertThat(result.getArity()).isEqualTo(4); + assertThat(result.getString(1).toString()) + .isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); + + RowData detailsRow = result.getRow(3, 2); + assertThat(detailsRow.getBoolean(0)).isEqualTo(true); + + RowData nestedDetailsRow = detailsRow.getRow(1, 1); + assertThat(nestedDetailsRow.getString(0).toString()).isEqualTo("$1,729.34"); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientTest.java new file mode 100644 index 00000000..9eb7cf64 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientTest.java @@ -0,0 +1,237 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.serialization.DeserializationSchema; +import org.apache.flink.api.common.serialization.SerializationSchema; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.config.HttpConnectorConfigConstants; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericGetQueryCreator; +import org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonQueryCreator; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; +import org.apache.flink.formats.json.JsonFormatFactory; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; +import org.apache.flink.table.factories.DynamicTableFactory; +import org.apache.flink.table.types.DataType; +import org.apache.flink.util.ConfigurationException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import static org.apache.flink.connector.http.TestHelper.assertPropertyArray; +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link JavaNetHttpPollingClient}. */ +@ExtendWith(MockitoExtension.class) +public class JavaNetHttpPollingClientTest { + + @Mock private HttpClient httpClient; + + @Mock private DeserializationSchema decoder; + + @Mock private LookupRow lookupRow; + + @Mock private DynamicTableFactory.Context dynamicTableFactoryContext; + private HeaderPreprocessor headerPreprocessor; + + private HttpLookupConfig options; + + private static final String BASE_URL = "http://localhost.com"; + + @BeforeEach + public void setUp() { + this.headerPreprocessor = HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); + this.options = HttpLookupConfig.builder().url(BASE_URL).build(); + } + + @Test + public void shouldBuildClientWithoutHeaders() throws ConfigurationException { + + JavaNetHttpPollingClient client = + new JavaNetHttpPollingClient( + httpClient, + decoder, + options, + new GetRequestFactory( + new GenericGetQueryCreator(lookupRow), + headerPreprocessor, + options)); + + assertThat(((GetRequestFactory) client.getRequestFactory()).getHeadersAndValues()) + .isEmpty(); + } + + @Test + public void shouldBuildGetClientUri() throws ConfigurationException { + // GIVEN + JavaNetHttpPollingClient client = + new JavaNetHttpPollingClient( + httpClient, + decoder, + options, + new GetRequestFactory( + new GenericGetQueryCreator(lookupRow), + headerPreprocessor, + options)); + + DataType lookupPhysicalDataType = + row( + List.of( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()))); + + RowData lookupRowData = + GenericRowData.of(StringData.fromString("1"), StringData.fromString("2")); + + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "id", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "uuid", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))); + lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); + + GenericGetQueryCreator queryCreator = new GenericGetQueryCreator(lookupRow); + LookupQueryInfo lookupQueryInfo = queryCreator.createLookupQuery(lookupRowData); + + // WHEN + URI uri = ((GetRequestFactory) client.getRequestFactory()).constructGetUri(lookupQueryInfo); + + // THEN + assertThat(uri.toString()).isEqualTo(BASE_URL + "?id=1&uuid=2"); + } + + @Test + public void shouldBuildBodyBasedClientUri() { + // GIVEN + DataType lookupPhysicalDataType = + row( + List.of( + DataTypes.FIELD("id", DataTypes.STRING()), + DataTypes.FIELD("uuid", DataTypes.STRING()))); + + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "id", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "uuid", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))); + + lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); + + SerializationSchema jsonSerializer = + new JsonFormatFactory() + .createEncodingFormat(dynamicTableFactoryContext, new Configuration()) + .createRuntimeEncoder(null, lookupPhysicalDataType); + + BodyBasedRequestFactory requestFactory = + new BodyBasedRequestFactory( + "POST", + new GenericJsonQueryCreator(jsonSerializer), + HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(), + HttpLookupConfig.builder().url(BASE_URL).build()); + + Map urlBodyBasedQueryParameters = new LinkedHashMap<>(); + urlBodyBasedQueryParameters.put("key1", "value1"); + urlBodyBasedQueryParameters.put("key2", "value2"); + + LookupQueryInfo lookupQueryInfo = + new LookupQueryInfo("{}", urlBodyBasedQueryParameters, null); + + // WHEN + HttpRequest httpRequest = requestFactory.setUpRequestMethod(lookupQueryInfo).build(); + + // THEN + assertThat(httpRequest.uri().toString()).isEqualTo(BASE_URL + "?key1=value1&key2=value2"); + } + + @Test + public void shouldBuildClientWithHeaders() throws ConfigurationException { + + // GIVEN + Properties properties = new Properties(); + properties.setProperty("property", "val1"); + properties.setProperty("my.property", "val2"); + properties.setProperty( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Origin", + "https://developer.mozilla.org"); + + properties.setProperty( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Cache-Control", + "no-cache, no-store, max-age=0, must-revalidate"); + properties.setProperty( + HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + + "Access-Control-Allow-Origin", + "*"); + + // WHEN + HttpLookupConfig lookupConfig = HttpLookupConfig.builder().properties(properties).build(); + + JavaNetHttpPollingClient client = + new JavaNetHttpPollingClient( + httpClient, + decoder, + lookupConfig, + new GetRequestFactory( + new GenericGetQueryCreator(lookupRow), + headerPreprocessor, + lookupConfig)); + + String[] headersAndValues = + ((GetRequestFactory) client.getRequestFactory()).getHeadersAndValues(); + assertThat(headersAndValues).hasSize(6); + + // THEN + // assert that we have property followed by its value. + assertPropertyArray(headersAndValues, "Origin", "https://developer.mozilla.org"); + assertPropertyArray( + headersAndValues, + "Cache-Control", + "no-cache, no-store, max-age=0, must-revalidate"); + assertPropertyArray(headersAndValues, "Access-Control-Allow-Origin", "*"); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientWithWireTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientWithWireTest.java new file mode 100644 index 00000000..cefb4b2b --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JavaNetHttpPollingClientWithWireTest.java @@ -0,0 +1,191 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.api.common.RuntimeExecutionMode; +import org.apache.flink.api.common.restartstrategy.RestartStrategies; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.ExecutionOptions; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; +import org.apache.flink.connector.http.utils.HttpHeaderUtils; +import org.apache.flink.streaming.api.CheckpointingMode; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.util.ConfigurationException; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.File; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.time.Duration; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; +import static com.github.tomakehurst.wiremock.client.WireMock.post; +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; +import static org.apache.flink.connector.http.TestHelper.readTestFile; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; + +/** Test for {@link JavaNetHttpPollingClient} with wire. */ +public class JavaNetHttpPollingClientWithWireTest { + private static final String BASE_URL = "http://localhost.com"; + + private static final String SAMPLES_FOLDER = "/auth/"; + private static final int SERVER_PORT = WireMockServerPortAllocator.getServerPort(); + + private static final int HTTPS_SERVER_PORT = WireMockServerPortAllocator.getSecureServerPort(); + + private static final String SERVER_KEYSTORE_PATH = + "src/test/resources/security/certs/serverKeyStore.jks"; + + private static final String SERVER_TRUSTSTORE_PATH = + "src/test/resources/security/certs/serverTrustStore.jks"; + + private static final String ENDPOINT = "/auth"; + private static final String BEARER_REQUEST = "Bearer Dummy"; + + private WireMockServer wireMockServer; + + @SuppressWarnings("unchecked") + @BeforeEach + public void setup() { + + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + + wireMockServer = + new WireMockServer( + WireMockConfiguration.wireMockConfig() + .port(SERVER_PORT) + .httpsPort(HTTPS_SERVER_PORT) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password") + .extensions(JsonTransform.class)); + wireMockServer.start(); + + StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); + env.setRestartStrategy(RestartStrategies.noRestart()); + Configuration config = new Configuration(); + config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.STREAMING); + env.configure(config, getClass().getClassLoader()); + env.enableCheckpointing(1000, CheckpointingMode.EXACTLY_ONCE); + } + + @AfterEach + public void tearDown() { + wireMockServer.stop(); + } + + @Test + public void shouldUpdateHttpRequestIfRequiredGet() throws ConfigurationException { + HttpRequest httpRequest = + HttpRequest.newBuilder() + .GET() + .uri(URI.create(BASE_URL)) + .timeout(Duration.ofSeconds(1)) + .setHeader("Origin", "*") + .setHeader("X-Content-Type-Options", "nosniff") + .setHeader("Content-Type", "application/json") + .build(); + shouldUpdateHttpRequestIfRequired(httpRequest); + } + + @Test + public void shouldUpdateHttpRequestIfRequiredPut() throws ConfigurationException { + HttpRequest httpRequest = + HttpRequest.newBuilder() + .PUT(HttpRequest.BodyPublishers.ofString("foo")) + .uri(URI.create(BASE_URL)) + .timeout(Duration.ofSeconds(1)) + .setHeader("Origin", "*") + .setHeader("X-Content-Type-Options", "nosniff") + .setHeader("Content-Type", "application/json") + .build(); + shouldUpdateHttpRequestIfRequired(httpRequest); + } + + private void shouldUpdateHttpRequestIfRequired(HttpRequest httpRequest) + throws ConfigurationException { + setUpServerBodyStub(); + JavaNetHttpPollingClient client = + new JavaNetHttpPollingClient( + mock(HttpClient.class), + null, + HttpLookupConfig.builder().url(BASE_URL).build(), + null); + LookupQueryInfo lookupQueryInfo = null; + HttpLookupSourceRequestEntry request = + new HttpLookupSourceRequestEntry(httpRequest, lookupQueryInfo); + + Configuration configuration = new Configuration(); + HeaderPreprocessor oidcHeaderPreProcessor = + HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); + HttpRequest newHttpRequest = + client.updateHttpRequestIfRequired(request, oidcHeaderPreProcessor); + assertThat(httpRequest).isEqualTo(newHttpRequest); + configuration.setString( + SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), + "http://localhost:" + SERVER_PORT + "/auth"); + configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST, BEARER_REQUEST); + configuration.set(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, Duration.ofSeconds(1L)); + client = + new JavaNetHttpPollingClient( + mock(HttpClient.class), + null, + HttpLookupConfig.builder() + .url(BASE_URL) + .readableConfig(configuration) + .build(), + null); + oidcHeaderPreProcessor = HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); + // change oidcHeaderPreProcessor to use the mock http client for the authentication flow + newHttpRequest = client.updateHttpRequestIfRequired(request, oidcHeaderPreProcessor); + assertThat(httpRequest).isNotEqualTo(newHttpRequest); + assertThat(httpRequest.headers().map().keySet().size()).isEqualTo(3); + assertThat(newHttpRequest.headers().map().keySet().size()).isEqualTo(4); + assertThat(httpRequest.headers().map().get("Content-Type")) + .isEqualTo(newHttpRequest.headers().map().get("Content-Type")); + } + + private void setUpServerBodyStub() { + wireMockServer.stubFor( + post(urlEqualTo(ENDPOINT)) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withRequestBody(equalTo(BEARER_REQUEST)) + .willReturn( + aResponse() + .withStatus(200) + .withBody( + readTestFile(SAMPLES_FOLDER + "AuthResult.json")))); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JsonTransform.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JsonTransform.java new file mode 100644 index 00000000..564349a2 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/JsonTransform.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import com.github.tomakehurst.wiremock.common.FileSource; +import com.github.tomakehurst.wiremock.extension.Parameters; +import com.github.tomakehurst.wiremock.extension.ResponseTransformer; +import com.github.tomakehurst.wiremock.http.Request; +import com.github.tomakehurst.wiremock.http.Response; + +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Wiremock Extension that prepares HTTP REST endpoint response body. This extension is stateful, + * every next response will have id == counter and id2 == counter + 1 value in its response, where + * counter is incremented for every subsequent request. + */ +public class JsonTransform extends ResponseTransformer { + + public static final String NAME = "JsonTransform"; + + private static final String RESULT_JSON = + "{\n" + + "\t\"id\": \"&COUNTER&\",\n" + + "\t\"id2\": \"&COUNTER_2&\",\n" + + "\t\"uuid\": \"fbb68a46-80a9-46da-9d40-314b5287079c\",\n" + + "\t\"picture\": \"http://placehold.it/32x32\",\n" + + "\t\"msg\": \"&PARAM&, cnt: &COUNTER&\",\n" + + "\t\"age\": 30,\n" + + "\t\"eyeColor\": \"green\",\n" + + "\t\"name\": \"Marva Fischer\",\n" + + "\t\"gender\": \"female\",\n" + + "\t\"company\": \"SILODYNE\",\n" + + "\t\"email\": \"marvafischer@silodyne.com\",\n" + + "\t\"phone\": \"+1 (990) 562-2120\",\n" + + "\t\"address\": \"601 Auburn Place, Bynum, New York, 7057\",\n" + + "\t\"about\": \"Proident Lorem et duis nisi tempor elit occaecat laboris" + + " dolore magna Lorem consequat. Deserunt velit minim nisi consectetur duis " + + "amet labore cupidatat. Pariatur sunt occaecat qui reprehenderit ipsum ex culpa " + + "ullamco ex duis adipisicing commodo sunt. Ad cupidatat magna ad in officia " + + "irure aute duis culpa et. Magna esse adipisicing consequat occaecat. Excepteur amet " + + "dolore occaecat sit officia dolore elit in cupidatat non anim.\\r\\n\",\n" + + "\t\"registered\": \"2020-07-11T11:13:32 -02:00\",\n" + + "\t\"latitude\": -35.237843,\n" + + "\t\"longitude\": 60.386104,\n" + + "\t\"tags\": [\n" + + "\t\t\"officia\",\n" + + "\t\t\"eiusmod\",\n" + + "\t\t\"labore\",\n" + + "\t\t\"ex\",\n" + + "\t\t\"aliqua\",\n" + + "\t\t\"consectetur\",\n" + + "\t\t\"excepteur\"\n" + + "\t],\n" + + "\t\"friends\": [\n" + + "\t\t{\n" + + "\t\t\t\"id\": 0,\n" + + "\t\t\t\"name\": \"Kemp Newman\"\n" + + "\t\t},\n" + + "\t\t{\n" + + "\t\t\t\"id\": 1,\n" + + "\t\t\t\"name\": \"Sears Blackburn\"\n" + + "\t\t},\n" + + "\t\t{\n" + + "\t\t\t\"id\": 2,\n" + + "\t\t\t\"name\": \"Lula Rogers\"\n" + + "\t\t}\n" + + "\t],\n" + + "\t\"details\": {\n" + + "\t\t\"isActive\": true,\n" + + "\t\t\"nestedDetails\": {\n" + + "\t\t\t\"index\": 0,\n" + + "\t\t\t\"guid\": \"d81fc542-6b49-4d59-8fb9-d57430d4871d\",\n" + + "\t\t\t\"balance\": \"$1,729.34\"\n" + + "\t\t}\n" + + "\t},\n" + + "\t\"greeting\": \"Hello, Marva Fischer! You have 7 unread messages.\",\n" + + "\t\"favoriteFruit\": \"banana\"\n" + + "}"; + private final AtomicInteger counter = new AtomicInteger(0); + + @Override + public Response transform( + Request request, Response response, FileSource files, Parameters parameters) { + int cnt = counter.getAndIncrement(); + + return Response.response() + .body(generateResponse(request.getUrl(), cnt)) + .status(response.getStatus()) + .statusMessage(response.getStatusMessage()) + .build(); + } + + @Override + public String getName() { + return NAME; + } + + private String generateResponse(String param, int counter) { + return RESULT_JSON + .replaceAll("&PARAM&", param) + .replaceAll("&COUNTER&", String.valueOf(counter)) + .replaceAll("&COUNTER_2&", String.valueOf(counter + 1)); + } + + @Override + public boolean applyGlobally() { + return false; + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfoTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfoTest.java similarity index 69% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfoTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfoTest.java index b5fa2d49..c6277468 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfoTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/LookupQueryInfoTest.java @@ -1,10 +1,30 @@ -package com.getindata.connectors.http.internal.table.lookup; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.junit.jupiter.api.Test; import java.util.Map; -import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +/** Test for {@link LookupQueryInfo} . */ class LookupQueryInfoTest { private LookupQueryInfo lookupQueryInfo; @@ -21,6 +41,7 @@ public void testConfiguredLookupQuery() { assertThat(lookupQueryInfo.hasBodyBasedUrlQueryParameters()).isTrue(); assertThat(lookupQueryInfo.getBodyBasedUrlQueryParameters()).isEqualTo("key1=value1"); } + @Test public void testEmptyLookupQueryInfo() { lookupQueryInfo = new LookupQueryInfo(null, null, null); @@ -46,30 +67,28 @@ public void test1BodyParam() { public void test1PathParam() { Map pathBasedUrlPathParameters = Map.of("key1", "value1"); - lookupQueryInfo = new LookupQueryInfo("http://service/{key1}", - null, pathBasedUrlPathParameters); + lookupQueryInfo = + new LookupQueryInfo("http://service/{key1}", null, pathBasedUrlPathParameters); assertThat(lookupQueryInfo.hasLookupQuery()).isTrue(); assertThat(lookupQueryInfo.hasPathBasedUrlParameters()).isTrue(); assertThat(lookupQueryInfo.getPathBasedUrlParameters()) .isEqualTo(pathBasedUrlPathParameters); } + @Test public void test2Path2BodyParams() { - Map pathBasedUrlPathParameters = - Map.of("key1", "value1", "key2", "value2"); - Map bodyBasedQueryParameters = - Map.of("key3", "value3", "key4", "value4"); + Map pathBasedUrlPathParameters = Map.of("key1", "value1", "key2", "value2"); + Map bodyBasedQueryParameters = Map.of("key3", "value3", "key4", "value4"); - lookupQueryInfo = new LookupQueryInfo(null, - bodyBasedQueryParameters, pathBasedUrlPathParameters); + lookupQueryInfo = + new LookupQueryInfo(null, bodyBasedQueryParameters, pathBasedUrlPathParameters); assertThat(lookupQueryInfo.hasLookupQuery()).isFalse(); assertThat(lookupQueryInfo.hasPathBasedUrlParameters()).isTrue(); assertThat(lookupQueryInfo.getPathBasedUrlParameters()) .isEqualTo(pathBasedUrlPathParameters); - assertThat(lookupQueryInfo.hasBodyBasedUrlQueryParameters()) - .isTrue(); + assertThat(lookupQueryInfo.hasBodyBasedUrlQueryParameters()).isTrue(); assertThat(lookupQueryInfo.getBodyBasedUrlQueryParameters()) .isEqualTo("key3=value3&key4=value4"); } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java new file mode 100644 index 00000000..e8e1aa1d --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; + +import org.junit.jupiter.api.Test; + +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link RowDataSingleValueLookupSchemaEntry}. */ +class RowDataSingleValueLookupSchemaEntryTest { + + // TODO Convert this to parametrized test and check all Flink types (Int, String etc). + @Test + public void shouldConvertFromSingleValue() { + + RowDataSingleValueLookupSchemaEntry entry = + new RowDataSingleValueLookupSchemaEntry( + "col1", RowData.createFieldGetter(DataTypes.BOOLEAN().getLogicalType(), 0)); + + List lookupArgs = entry.convertToLookupArg(GenericRowData.of(true)); + + assertThat(lookupArgs).containsExactly(new LookupArg("col1", "true")); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntryTest.java new file mode 100644 index 00000000..6224a955 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/RowTypeLookupSchemaEntryTest.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; + +import org.junit.jupiter.api.Test; + +import java.util.List; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link RowTypeLookupSchemaEntry}. */ +class RowTypeLookupSchemaEntryTest { + + @Test + public void testEmptyRow() { + // GIVEN + RowTypeLookupSchemaEntry lookupSchemaEntry = + new RowTypeLookupSchemaEntry( + "aRow", + RowData.createFieldGetter( + DataTypes.ROW(DataTypes.FIELD("col1", DataTypes.STRING())) + .getLogicalType(), + 0)) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))); + + GenericRowData rowData = new GenericRowData(1); + rowData.setField(0, null); + + // WHEN + List lookupArgs = lookupSchemaEntry.convertToLookupArg(rowData); + + // THEN + assertThat(lookupArgs).isEmpty(); + } + + @Test + public void testRowWithMultipleSingleValues() { + + // GIVEN + RowTypeLookupSchemaEntry lookupSchemaEntry = + new RowTypeLookupSchemaEntry( + "aRow", + RowData.createFieldGetter( + DataTypes.ROW( + DataTypes.FIELD("col1", DataTypes.STRING()), + DataTypes.FIELD("col2", DataTypes.STRING()), + DataTypes.FIELD("col3", DataTypes.STRING())) + .getLogicalType(), + 0)) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col2", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col3", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 2))); + + GenericRowData rowData = + GenericRowData.of( + GenericRowData.of( + StringData.fromString("val1"), + StringData.fromString("val2"), + StringData.fromString("val3"))); + + // WHEN + List lookupArgs = lookupSchemaEntry.convertToLookupArg(rowData); + + // THEN + assertThat(lookupArgs) + .containsExactly( + new LookupArg("col1", "val1"), + new LookupArg("col2", "val2"), + new LookupArg("col3", "val3")); + } + + @Test + public void testRowWithNestedRowValues() { + + // GIVEN + RowTypeLookupSchemaEntry nestedRowLookupSchemaEntry = + new RowTypeLookupSchemaEntry( + "aRow", + RowData.createFieldGetter( + DataTypes.FIELD( + "nestedRow", + DataTypes.ROW( + DataTypes.FIELD( + "col1", DataTypes.STRING()), + DataTypes.FIELD( + "col2", + DataTypes.STRING()))) + .getDataType() + .getLogicalType(), + 0)) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col2", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))); + + RowTypeLookupSchemaEntry rootSchemaEntry = + new RowTypeLookupSchemaEntry( + "aRow", + RowData.createFieldGetter( + DataTypes.ROW( + DataTypes.ROW( + DataTypes.FIELD( + "nestedRow", + DataTypes.ROW( + DataTypes.FIELD( + "col1", + DataTypes + .STRING()), + DataTypes.FIELD( + "col2", + DataTypes + .STRING())))), + DataTypes.FIELD("col3", DataTypes.STRING()) + .getDataType()) + .getLogicalType(), + 0)) + .addLookupEntry(nestedRowLookupSchemaEntry); + + GenericRowData rowData = + GenericRowData.of( + GenericRowData.of( + GenericRowData.of( + StringData.fromString("val1"), + StringData.fromString("val2")))); + + // WHEN + List lookupArgs = rootSchemaEntry.convertToLookupArg(rowData); + + // THEN + assertThat(lookupArgs) + .containsExactly(new LookupArg("col1", "val1"), new LookupArg("col2", "val2")); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/TableSourceHelperTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/TableSourceHelperTest.java new file mode 100644 index 00000000..11940a6d --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/TableSourceHelperTest.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup; + +import org.apache.flink.table.types.DataType; +import org.apache.flink.table.types.logical.LogicalType; +import org.apache.flink.table.types.logical.LogicalTypeRoot; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.when; + +/** Test for {@link TableSourceHelper}. */ +@ExtendWith(MockitoExtension.class) +class TableSourceHelperTest { + + @Mock private DataType dataType; + + @Mock private LogicalType logicalType; + + @BeforeEach + public void setUp() { + when(dataType.getLogicalType()).thenReturn(logicalType); + } + + @Test + void testNotComposite() { + when(logicalType.getTypeRoot()).thenReturn(LogicalTypeRoot.BIGINT); + + assertThat(TableSourceHelper.getFieldNames(dataType.getLogicalType())).isEmpty(); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomFormatFactory.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomFormatFactory.java similarity index 60% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomFormatFactory.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomFormatFactory.java index fb4b8443..4c4a4c10 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomFormatFactory.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomFormatFactory.java @@ -1,7 +1,22 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; -import java.util.Set; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.ConfigOption; @@ -14,20 +29,21 @@ import org.apache.flink.table.factories.SerializationFormatFactory; import org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock; +import java.util.Collections; +import java.util.Set; + +/** Custom format factory. */ public class CustomFormatFactory implements SerializationFormatFactory { public static final String IDENTIFIER = "query-creator-test-format"; public static final String REQUIRED_OPTION = "required-option-one"; - /** - * TODO remove static - used for testing only - */ + /** TODO remove static - used for testing only. */ static boolean requiredOptionsWereUsed = false; @Override public EncodingFormat> createEncodingFormat( - Context context, - ReadableConfig readableConfig) { + Context context, ReadableConfig readableConfig) { FactoryUtil.validateFactoryOptions(this, readableConfig); return new EncodingFormatMock(","); } diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomJsonFormatFactory.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomJsonFormatFactory.java similarity index 60% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomJsonFormatFactory.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomJsonFormatFactory.java index 9fdde425..48a61d9f 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/CustomJsonFormatFactory.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/CustomJsonFormatFactory.java @@ -1,7 +1,22 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; -import java.util.Set; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.ConfigOption; @@ -14,21 +29,22 @@ import org.apache.flink.table.factories.FactoryUtil; import org.apache.flink.table.factories.SerializationFormatFactory; +import java.util.Collections; +import java.util.Set; + +/** Custom json format factory. */ public class CustomJsonFormatFactory extends JsonFormatFactory implements SerializationFormatFactory { public static final String IDENTIFIER = "json-query-creator-test-format"; public static final String REQUIRED_OPTION = "required-option-one"; - /** - * Consider removing this static only used for testing only - */ + /** Consider removing this static only used for testing only. */ static boolean requiredOptionsWereUsed = false; @Override public EncodingFormat> createEncodingFormat( - Context context, - ReadableConfig readableConfig) { + Context context, ReadableConfig readableConfig) { FactoryUtil.validateFactoryOptions(this, readableConfig); return super.createEncodingFormat(context, readableConfig); } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java new file mode 100644 index 00000000..8b3d7a3b --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.table.lookup.RowDataSingleValueLookupSchemaEntry; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.data.DecimalData; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; +import org.apache.flink.table.types.DataType; + +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; + +/** ElasticSearchLiteQueryCreatorTest. */ +public class ElasticSearchLiteQueryCreatorTest { + + @Test + public void testWithEmptyLookupResult() { + + // GIVEN + LookupRow lookupRow = new LookupRow(); + lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); + + GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); + + // WHEN + var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo(""); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testQueryCreationForSingleQueryStringParam() { + + // GIVEN + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))); + lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); + + GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); + + // WHEN + var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("q=key1:%22val1%22"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testQueryCreationForSingleQueryIntParam() { + + // GIVEN + BigDecimal decimalValue = BigDecimal.valueOf(10); + DataType decimalValueType = + DataTypes.DECIMAL(decimalValue.precision(), decimalValue.scale()); + + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + decimalValueType.getLogicalType(), 0))); + lookupRow.setLookupPhysicalRowDataType(decimalValueType); + + GenericRowData lookupDataRow = + GenericRowData.of( + DecimalData.fromBigDecimal( + decimalValue, decimalValue.precision(), decimalValue.scale())); + + // WHEN + var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("q=key1:%2210%22"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testGenericGetQueryCreationForMultipleQueryParam() { + + // GIVEN + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key2", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key3", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 2))); + + lookupRow.setLookupPhysicalRowDataType( + row( + List.of( + DataTypes.FIELD("key1", DataTypes.STRING()), + DataTypes.FIELD("key2", DataTypes.STRING()), + DataTypes.FIELD("key3", DataTypes.STRING())))); + + GenericRowData lookupDataRow = + GenericRowData.of( + StringData.fromString("val1"), + StringData.fromString("val2"), + StringData.fromString("3")); + + // WHEN + var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()) + .isEqualTo("q=key1:%22val1%22%20AND%20key2:%22val2%22%20AND%20key3:%223%22"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorTest.java new file mode 100644 index 00000000..2fe5d3a5 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericGetQueryCreatorTest.java @@ -0,0 +1,208 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.table.lookup.RowDataSingleValueLookupSchemaEntry; +import org.apache.flink.connector.http.table.lookup.RowTypeLookupSchemaEntry; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.data.DecimalData; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; +import org.apache.flink.table.types.DataType; + +import org.junit.jupiter.api.Test; + +import java.math.BigDecimal; +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link GenericGetQueryCreator}. */ +public class GenericGetQueryCreatorTest { + + @Test + public void testQueryCreationForSingleQueryStringParam() { + + // GIVEN + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))); + lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); + + GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); + + // WHEN + var queryCreator = new GenericGetQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=val1"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testQueryCreationForSingleQueryIntParam() { + + // GIVEN + BigDecimal decimalValue = BigDecimal.valueOf(10); + DataType decimalValueType = + DataTypes.DECIMAL(decimalValue.precision(), decimalValue.scale()); + + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + decimalValueType.getLogicalType(), 0))); + lookupRow.setLookupPhysicalRowDataType(decimalValueType); + + GenericRowData lookupDataRow = + GenericRowData.of( + DecimalData.fromBigDecimal( + decimalValue, decimalValue.precision(), decimalValue.scale())); + + // WHEN + var queryCreator = new GenericGetQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=10"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testQueryCreationForMultipleQueryParam() { + + // GIVEN + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key2", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 1))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key3", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 2))); + + lookupRow.setLookupPhysicalRowDataType( + row( + List.of( + DataTypes.FIELD("key1", DataTypes.STRING()), + DataTypes.FIELD("key2", DataTypes.STRING()), + DataTypes.FIELD("key3", DataTypes.STRING())))); + + GenericRowData lookupDataRow = + GenericRowData.of( + StringData.fromString("val1"), + StringData.fromString("val2"), + StringData.fromString("3")); + + // WHEN + var queryCreator = new GenericGetQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=val1&key2=val2&key3=3"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } + + @Test + public void testQueryCreationForRowType() { + + // GIVEN + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))) + .addLookupEntry( + new RowTypeLookupSchemaEntry( + "aRow", + RowData.createFieldGetter( + DataTypes.FIELD( + "aRow", + DataTypes.ROW( + DataTypes.FIELD( + "col2", + DataTypes + .STRING()), + DataTypes.FIELD( + "col3", + DataTypes + .STRING()))) + .getDataType() + .getLogicalType(), + 1)) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col2", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), + 0))) + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "col3", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), + 1)))); + + // ROW> + lookupRow.setLookupPhysicalRowDataType( + row( + List.of( + DataTypes.FIELD("col1", DataTypes.STRING()), + DataTypes.FIELD( + "aRow", + DataTypes.ROW( + DataTypes.FIELD("col2", DataTypes.STRING()), + DataTypes.FIELD("col3", DataTypes.STRING())))))); + + GenericRowData lookupDataRow = + GenericRowData.of( + StringData.fromString("val1"), + GenericRowData.of( + StringData.fromString("val2"), StringData.fromString("val3"))); + + // WHEN + var queryCreator = new GenericGetQueryCreator(lookupRow); + var createdQuery = queryCreator.createLookupQuery(lookupDataRow); + + // THEN + assertThat(createdQuery.getLookupQuery()).isEqualTo("col1=val1&col2=val2&col3=val3"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java new file mode 100644 index 00000000..1f78257e --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.LookupQueryCreator; +import org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.table.lookup.RowDataSingleValueLookupSchemaEntry; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ResolvedSchema; +import org.apache.flink.table.data.GenericRowData; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.data.StringData; +import org.apache.flink.table.factories.DynamicTableFactory; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory.REQUEST_BODY_FIELDS; +import static org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory.REQUEST_QUERY_PARAM_FIELDS; +import static org.apache.flink.connector.http.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory.REQUEST_URL_MAP; +import static org.apache.flink.connector.http.table.lookup.querycreators.QueryCreatorUtils.getTableContext; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link GenericGetQueryCreatorFactory}. */ +class GenericJsonAndUrlQueryCreatorFactoryTest { + private Configuration config = new Configuration(); + + private DynamicTableFactory.Context tableContext; + + @BeforeEach + public void setUp() { + CustomJsonFormatFactory.requiredOptionsWereUsed = false; + this.tableContext = + getTableContext( + this.config, + ResolvedSchema.of(Column.physical("key1", DataTypes.STRING()))); + } + + @Test + public void lookupQueryInfoTestStr() { + assertThat(CustomJsonFormatFactory.requiredOptionsWereUsed) + .withFailMessage( + "CustomJsonFormat was not cleared, " + + "make sure `CustomJsonFormatFactory.requiredOptionsWereUsed" + + "= false` " + + "was called before this test execution.") + .isFalse(); + + this.config.setString("lookup-request.format", CustomJsonFormatFactory.IDENTIFIER); + this.config.setString( + String.format( + "lookup-request.format.%s.%s", + CustomJsonFormatFactory.IDENTIFIER, + CustomJsonFormatFactory.REQUIRED_OPTION), + "optionValue"); + this.config.set(REQUEST_QUERY_PARAM_FIELDS, List.of("key1")); + // with sync + createUsingFactory(false); + // with async + createUsingFactory(true); + } + + @Test + public void lookupQueryInfoTestRequiredConfig() { + GenericJsonAndUrlQueryCreatorFactory genericJsonAndUrlQueryCreatorFactory = + new GenericJsonAndUrlQueryCreatorFactory(); + assertThrows( + RuntimeException.class, + () -> { + genericJsonAndUrlQueryCreatorFactory.createLookupQueryCreator( + config, null, null); + }); + // do not specify REQUEST_ARG_PATHS_CONFIG + assertThrows( + RuntimeException.class, + () -> { + genericJsonAndUrlQueryCreatorFactory.createLookupQueryCreator( + config, null, null); + }); + } + + private void createUsingFactory(boolean async) { + this.config.setBoolean(HttpLookupConnectorOptions.ASYNC_POLLING, async); + LookupRow lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))); + + lookupRow.setLookupPhysicalRowDataType( + row(List.of(DataTypes.FIELD("key1", DataTypes.STRING())))); + LookupQueryCreator lookupQueryCreator = + new GenericJsonAndUrlQueryCreatorFactory() + .createLookupQueryCreator(config, lookupRow, tableContext); + GenericRowData lookupRowData = GenericRowData.of(StringData.fromString("val1")); + + LookupQueryInfo lookupQueryInfo = lookupQueryCreator.createLookupQuery(lookupRowData); + assertThat(CustomJsonFormatFactory.requiredOptionsWereUsed).isTrue(); + assertThat(lookupQueryInfo.hasLookupQuery()).isTrue(); + assertThat(lookupQueryInfo.hasBodyBasedUrlQueryParameters()).isFalse(); + assertThat(lookupQueryInfo.hasPathBasedUrlParameters()).isFalse(); + } + + @Test + void optionsTests() { + GenericJsonAndUrlQueryCreatorFactory factory = new GenericJsonAndUrlQueryCreatorFactory(); + assertThat(factory.requiredOptions()).isEmpty(); + assertThat(factory.optionalOptions()).contains(REQUEST_QUERY_PARAM_FIELDS); + assertThat(factory.optionalOptions()).contains(REQUEST_BODY_FIELDS); + assertThat(factory.optionalOptions()).contains(REQUEST_URL_MAP); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java similarity index 52% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java index ae3b85ee..570846b3 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorTest.java @@ -1,16 +1,28 @@ /* - * © Copyright IBM Corp. 2025 + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ -package com.getindata.connectors.http.internal.table.lookup.querycreators; -import java.util.List; -import java.util.Map; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.table.lookup.RowDataSingleValueLookupSchemaEntry; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.catalog.Column; import org.apache.flink.table.catalog.ResolvedSchema; @@ -21,20 +33,25 @@ import org.apache.flink.table.types.FieldsDataType; import org.apache.flink.types.Row; import org.apache.flink.util.Preconditions; -import org.jetbrains.annotations.NotNull; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode; + import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; + +import java.util.List; +import java.util.Map; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.LOOKUP_METHOD; +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.apache.flink.connector.http.table.lookup.querycreators.QueryCreatorUtils.getTableContext; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertThrows; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.table.lookup.RowDataSingleValueLookupSchemaEntry; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.LOOKUP_METHOD; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; -import static com.getindata.connectors.http.internal.table.lookup.querycreators.QueryCreatorUtils.getTableContext; - +/** Test for {@link GenericJsonQueryCreator}. */ class GenericJsonAndUrlQueryCreatorTest { private static final String KEY_1 = "key1"; private static final String KEY_2 = "key2"; @@ -44,32 +61,31 @@ class GenericJsonAndUrlQueryCreatorTest { private static final List QUERY_PARAMS = List.of(KEY_1); // Path param ArgPath required a stringified json object. As we have PersonBean // we can use that. - private static final Map URL_PARAMS = Map.of(KEY_1, KEY_1); - private static final DataType DATATYPE_1 = row(List.of( - DataTypes.FIELD(KEY_1, DataTypes.STRING()) - )); - private static final DataType DATATYPE_1_2 = row(List.of( - DataTypes.FIELD(KEY_1, DataTypes.STRING()), - DataTypes.FIELD(KEY_2, DataTypes.STRING()) - )); - private static final ResolvedSchema RESOLVED_SCHEMA = ResolvedSchema.of(Column.physical(KEY_1, - DataTypes.STRING())); + private static final Map urlParams = Map.of(KEY_1, KEY_1); + private static final DataType DATATYPE_1 = + row(List.of(DataTypes.FIELD(KEY_1, DataTypes.STRING()))); + private static final DataType DATATYPE_1_2 = + row( + List.of( + DataTypes.FIELD(KEY_1, DataTypes.STRING()), + DataTypes.FIELD(KEY_2, DataTypes.STRING()))); + private static final ResolvedSchema RESOLVED_SCHEMA = + ResolvedSchema.of(Column.physical(KEY_1, DataTypes.STRING())); private static final RowData ROWDATA = getRowData(1, VALUE); @ParameterizedTest - @ValueSource(strings = {"GET", "PUT", "POST" }) + @ValueSource(strings = {"GET", "PUT", "POST"}) public void createLookupQueryTestStrAllOps(String operation) { - //GIVEN + // GIVEN LookupRow lookupRow = getLookupRow(KEY_1); Configuration config = getConfiguration(operation); GenericJsonAndUrlQueryCreator universalJsonQueryCreator = - (GenericJsonAndUrlQueryCreator) new GenericJsonAndUrlQueryCreatorFactory() - .createLookupQueryCreator( - config, - lookupRow, - getTableContext(config, - RESOLVED_SCHEMA) - ); + (GenericJsonAndUrlQueryCreator) + new GenericJsonAndUrlQueryCreatorFactory() + .createLookupQueryCreator( + config, + lookupRow, + getTableContext(config, RESOLVED_SCHEMA)); // WHEN var createdQuery = universalJsonQueryCreator.createLookupQuery(ROWDATA); // THEN @@ -86,71 +102,71 @@ public void createLookupQueryTestStrAllOps(String operation) { @Test public void createLookupQueryTest() { // GIVEN - List query_params = List.of(KEY_1, KEY_2); - final String URL_INSERT = "AAA"; - Map url_params = Map.of(KEY_1, URL_INSERT); + List queryParams = List.of(KEY_1, KEY_2); + final String urlInsert = "AAA"; + Map urlParams = Map.of(KEY_1, urlInsert); LookupRow lookupRow = getLookupRow(KEY_1, KEY_2); - ResolvedSchema resolvedSchema = ResolvedSchema.of( - Column.physical(KEY_1, DataTypes.STRING()), - Column.physical(KEY_2, DataTypes.STRING())); + ResolvedSchema resolvedSchema = + ResolvedSchema.of( + Column.physical(KEY_1, DataTypes.STRING()), + Column.physical(KEY_2, DataTypes.STRING())); Configuration config = getConfiguration("GET"); - config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_QUERY_PARAM_FIELDS, query_params); - config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_URL_MAP, url_params); + config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_QUERY_PARAM_FIELDS, queryParams); + config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_URL_MAP, urlParams); lookupRow.setLookupPhysicalRowDataType(DATATYPE_1_2); GenericJsonAndUrlQueryCreator genericJsonAndUrlQueryCreator = - (GenericJsonAndUrlQueryCreator) new GenericJsonAndUrlQueryCreatorFactory() - .createLookupQueryCreator( - config, - lookupRow, - getTableContext(config, - resolvedSchema) - ); + (GenericJsonAndUrlQueryCreator) + new GenericJsonAndUrlQueryCreatorFactory() + .createLookupQueryCreator( + config, lookupRow, getTableContext(config, resolvedSchema)); var row = getRowData(2, VALUE); row.setField(1, StringData.fromString(VALUE)); // WHEN var createdQuery = genericJsonAndUrlQueryCreator.createLookupQuery(row); // THEN - assertThat(createdQuery.getPathBasedUrlParameters().get(URL_INSERT)).isEqualTo(VALUE); + assertThat(createdQuery.getPathBasedUrlParameters().get(urlInsert)).isEqualTo(VALUE); assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - assertThat(createdQuery.getLookupQuery()).isEqualTo(KEY_1 + "=" + VALUE - + "&" + KEY_2 + "=" + VALUE); + assertThat(createdQuery.getLookupQuery()) + .isEqualTo(KEY_1 + "=" + VALUE + "&" + KEY_2 + "=" + VALUE); } @Test public void failSerializationOpenTest() { // GIVEN LookupRow lookupRow = getLookupRow(KEY_1); - ResolvedSchema resolvedSchema = ResolvedSchema.of(Column.physical(KEY_1, - DataTypes.STRING())); + ResolvedSchema resolvedSchema = + ResolvedSchema.of(Column.physical(KEY_1, DataTypes.STRING())); Configuration config = getConfiguration("GET"); lookupRow.setLookupPhysicalRowDataType(DATATYPE_1); GenericJsonAndUrlQueryCreator genericJsonAndUrlQueryCreator = - (GenericJsonAndUrlQueryCreator) new GenericJsonAndUrlQueryCreatorFactory() - .createLookupQueryCreator( - config, - lookupRow, - getTableContext(config, - resolvedSchema) - ); + (GenericJsonAndUrlQueryCreator) + new GenericJsonAndUrlQueryCreatorFactory() + .createLookupQueryCreator( + config, lookupRow, getTableContext(config, resolvedSchema)); // create a SerializationSchema that throws and exception in open - SerializationSchema mockSerialiationSchema = new SerializationSchema() { - @Override - public void open(InitializationContext context) throws Exception { - throw new Exception("Exception for testing"); - } - @Override - public byte[] serialize(RowData element) { - return new byte[0]; - } - }; + SerializationSchema mockSerialiationSchema = + new SerializationSchema() { + @Override + public void open(InitializationContext context) throws Exception { + throw new Exception("Exception for testing"); + } + + @Override + public byte[] serialize(RowData element) { + return new byte[0]; + } + }; // WHEN genericJsonAndUrlQueryCreator.setSerializationSchema(mockSerialiationSchema); var row = new GenericRowData(1); // THEN - assertThrows(RuntimeException.class, () -> { - genericJsonAndUrlQueryCreator.createLookupQuery(row); - }); + assertThrows( + RuntimeException.class, + () -> { + genericJsonAndUrlQueryCreator.createLookupQuery(row); + }); } + @Test void convertToQueryParametersUnsupportedEncodingTest() { // GIVEN @@ -159,27 +175,28 @@ void convertToQueryParametersUnsupportedEncodingTest() { // WHEN JsonNode personNode = mapper.valueToTree(person); // THEN - assertThrows(RuntimeException.class, () -> { - GenericJsonAndUrlQueryCreator.convertToQueryParameters( - (ObjectNode) personNode, "bad encoding"); - }); + assertThrows( + RuntimeException.class, + () -> { + GenericJsonAndUrlQueryCreator.convertToQueryParameters( + (ObjectNode) personNode, "bad encoding"); + }); } + @Test void rowDataToRowTest() { // GIVEN // String final String value = VALUE; int intValue = 10; - GenericRowData rowData = GenericRowData.of( - StringData.fromString(value), - intValue, - intValue - ); - DataType dataType = row(List.of( - DataTypes.FIELD(KEY_1, DataTypes.STRING()), - DataTypes.FIELD(KEY_2, DataTypes.DATE()), - DataTypes.FIELD(KEY_3, DataTypes.TIMESTAMP_LTZ()) - )); + GenericRowData rowData = + GenericRowData.of(StringData.fromString(value), intValue, intValue); + DataType dataType = + row( + List.of( + DataTypes.FIELD(KEY_1, DataTypes.STRING()), + DataTypes.FIELD(KEY_2, DataTypes.DATE()), + DataTypes.FIELD(KEY_3, DataTypes.TIMESTAMP_LTZ()))); // WHEN Row row = rowDataToRow(rowData, dataType); // THEN @@ -188,7 +205,7 @@ void rowDataToRowTest() { assertThat(row.getField(KEY_3).equals("1970-01-01T00:00:00.010Z")); } - private static void validateCreatedQueryForGet( LookupQueryInfo createdQuery) { + private static void validateCreatedQueryForGet(LookupQueryInfo createdQuery) { // check there is no body params and we have the expected lookup query assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); assertThat(createdQuery.getLookupQuery()).isEqualTo(KEY_1 + "=" + VALUE); @@ -196,46 +213,38 @@ private static void validateCreatedQueryForGet( LookupQueryInfo createdQuery) { private static void validateCreatedQueryForPutAndPost(LookupQueryInfo createdQuery) { // check we have the expected body params and lookup query - assertThat(createdQuery - .getBodyBasedUrlQueryParameters()) - .isEqualTo(KEY_1 + "=" + VALUE); - assertThat(createdQuery.getLookupQuery()).isEqualTo( - "{\"" - + KEY_1 - + "\":\"" + VALUE - + "\"}"); + assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEqualTo(KEY_1 + "=" + VALUE); + assertThat(createdQuery.getLookupQuery()) + .isEqualTo("{\"" + KEY_1 + "\":\"" + VALUE + "\"}"); } - private static @NotNull GenericRowData getRowData(int arity, String value) { + private static GenericRowData getRowData(int arity, String value) { var row = new GenericRowData(arity); row.setField(0, StringData.fromString(value)); return row; } - private static @NotNull Configuration getConfiguration(String operation) { + private static Configuration getConfiguration(String operation) { Configuration config = new Configuration(); - config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_QUERY_PARAM_FIELDS, - QUERY_PARAMS); + config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_QUERY_PARAM_FIELDS, QUERY_PARAMS); if (!operation.equals("GET")) { // add the body content for PUT and POST - config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_BODY_FIELDS, - QUERY_PARAMS); + config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_BODY_FIELDS, QUERY_PARAMS); } - config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_URL_MAP, URL_PARAMS); + config.set(GenericJsonAndUrlQueryCreatorFactory.REQUEST_URL_MAP, urlParams); config.setString(LOOKUP_METHOD, operation); return config; } - private static @NotNull LookupRow getLookupRow(String... keys ) { + private static LookupRow getLookupRow(String... keys) { LookupRow lookupRow = new LookupRow(); for (int keyNumber = 0; keyNumber < keys.length; keyNumber++) { lookupRow.addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - keys[keyNumber], - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), keyNumber) - )); + new RowDataSingleValueLookupSchemaEntry( + keys[keyNumber], + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), keyNumber))); lookupRow.setLookupPhysicalRowDataType(DATATYPE_1); } return lookupRow; diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java new file mode 100644 index 00000000..4cf4c3f0 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.table.lookup.LookupRow; +import org.apache.flink.connector.http.table.lookup.RowDataSingleValueLookupSchemaEntry; +import org.apache.flink.table.api.DataTypes; +import org.apache.flink.table.api.Schema; +import org.apache.flink.table.catalog.CatalogTable; +import org.apache.flink.table.catalog.Column; +import org.apache.flink.table.catalog.ObjectIdentifier; +import org.apache.flink.table.catalog.ResolvedCatalogTable; +import org.apache.flink.table.catalog.ResolvedSchema; +import org.apache.flink.table.data.RowData; +import org.apache.flink.table.factories.DynamicTableFactory; +import org.apache.flink.table.factories.FactoryUtil; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link GenericJsonQueryCreatorFactory }. */ +class GenericJsonQueryCreatorFactoryTest { + + private Configuration config; + private LookupRow lookupRow; + + private DynamicTableFactory.Context tableContext; + + @BeforeEach + public void setUp() { + this.config = new Configuration(); + this.lookupRow = new LookupRow(); + lookupRow = + new LookupRow() + .addLookupEntry( + new RowDataSingleValueLookupSchemaEntry( + "key1", + RowData.createFieldGetter( + DataTypes.STRING().getLogicalType(), 0))); + + lookupRow.setLookupPhysicalRowDataType( + row(List.of(DataTypes.FIELD("key1", DataTypes.STRING())))); + + CustomFormatFactory.requiredOptionsWereUsed = false; + + ResolvedSchema resolvedSchema = + ResolvedSchema.of(Column.physical("key1", DataTypes.STRING())); + + this.tableContext = + new FactoryUtil.DefaultDynamicTableContext( + ObjectIdentifier.of("default", "default", "test"), + new ResolvedCatalogTable( + CatalogTable.of( + Schema.newBuilder() + .fromResolvedSchema(resolvedSchema) + .build(), + null, + Collections.emptyList(), + Collections.emptyMap()), + resolvedSchema), + Collections.emptyMap(), + config, + Thread.currentThread().getContextClassLoader(), + false); + } + + @Test + public void shouldPassPropertiesToQueryCreatorFormat() { + assertThat(CustomFormatFactory.requiredOptionsWereUsed) + .withFailMessage( + "CustomFormatFactory was not cleared, " + + "make sure `CustomFormatFactory.requiredOptionsWereUsed = false` " + + "was called before this test execution.") + .isFalse(); + + this.config.setString("lookup-request.format", CustomFormatFactory.IDENTIFIER); + this.config.setString( + String.format( + "lookup-request.format.%s.%s", + CustomFormatFactory.IDENTIFIER, CustomFormatFactory.REQUIRED_OPTION), + "optionValue"); + + new GenericJsonQueryCreatorFactory() + .createLookupQueryCreator(config, lookupRow, tableContext); + + assertThat(CustomFormatFactory.requiredOptionsWereUsed).isTrue(); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorTest.java similarity index 57% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorTest.java index 664d2027..7241a2eb 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/GenericJsonQueryCreatorTest.java @@ -1,9 +1,26 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.List; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.table.lookup.LookupQueryInfo; import org.apache.flink.formats.json.JsonFormatFactory; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.data.GenericRowData; @@ -11,37 +28,39 @@ import org.apache.flink.table.data.StringData; import org.apache.flink.table.factories.DynamicTableFactory.Context; import org.apache.flink.table.types.DataType; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; -import static org.assertj.core.api.Assertions.assertThat; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; +import java.util.List; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupTableSourceFactory.row; +import static org.assertj.core.api.Assertions.assertThat; +/** Test for {@link GenericJsonQueryCreator }. */ @ExtendWith(MockitoExtension.class) class GenericJsonQueryCreatorTest { - @Mock - private Context dynamicTableFactoryContext; + @Mock private Context dynamicTableFactoryContext; private GenericJsonQueryCreator jsonQueryCreator; @BeforeEach public void setUp() { - DataType lookupPhysicalDataType = row(List.of( - DataTypes.FIELD("id", DataTypes.INT()), - DataTypes.FIELD("uuid", DataTypes.STRING()) - ) - ); + DataType lookupPhysicalDataType = + row( + List.of( + DataTypes.FIELD("id", DataTypes.INT()), + DataTypes.FIELD("uuid", DataTypes.STRING()))); SerializationSchema jsonSerializer = - new JsonFormatFactory() - .createEncodingFormat(dynamicTableFactoryContext, new Configuration()) - .createRuntimeEncoder(null, lookupPhysicalDataType); + new JsonFormatFactory() + .createEncodingFormat(dynamicTableFactoryContext, new Configuration()) + .createRuntimeEncoder(null, lookupPhysicalDataType); this.jsonQueryCreator = new GenericJsonQueryCreator(jsonSerializer); } diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/PersonBean.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/PersonBean.java new file mode 100644 index 00000000..d156fdb0 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/PersonBean.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import lombok.Data; + +/** bean for testins. */ +@Data +public class PersonBean { + private final String firstName; + private final String lastName; +} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryCreatorUtils.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryCreatorUtils.java similarity index 54% rename from src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryCreatorUtils.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryCreatorUtils.java index 6ccdb444..f985df16 100644 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryCreatorUtils.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryCreatorUtils.java @@ -1,9 +1,26 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import java.util.Collections; +package org.apache.flink.connector.http.table.lookup.querycreators; import org.apache.flink.api.common.serialization.SerializationSchema; import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.table.lookup.LookupRow; import org.apache.flink.table.api.Schema; import org.apache.flink.table.catalog.CatalogTable; import org.apache.flink.table.catalog.ObjectIdentifier; @@ -15,11 +32,12 @@ import org.apache.flink.table.factories.FactoryUtil; import org.apache.flink.table.factories.SerializationFormatFactory; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; +import java.util.Collections; +/** Query Creator utils. */ public class QueryCreatorUtils { - public static DynamicTableFactory.Context getTableContext(Configuration config, - ResolvedSchema resolvedSchema) { + public static DynamicTableFactory.Context getTableContext( + Configuration config, ResolvedSchema resolvedSchema) { return new FactoryUtil.DefaultDynamicTableContext( ObjectIdentifier.of("default", "default", "test"), @@ -33,25 +51,23 @@ public static DynamicTableFactory.Context getTableContext(Configuration config, Collections.emptyMap(), config, Thread.currentThread().getContextClassLoader(), - false - ); + false); } - public static SerializationSchema getRowDataSerializationSchema(LookupRow lookupRow, - DynamicTableFactory.Context dynamicTableFactoryContext, - String formatIdentifier, - QueryFormatAwareConfiguration queryFormatAwareConfiguration) { + + public static SerializationSchema getRowDataSerializationSchema( + LookupRow lookupRow, + DynamicTableFactory.Context dynamicTableFactoryContext, + String formatIdentifier, + QueryFormatAwareConfiguration queryFormatAwareConfiguration) { SerializationFormatFactory jsonFormatFactory = FactoryUtil.discoverFactory( dynamicTableFactoryContext.getClassLoader(), SerializationFormatFactory.class, - formatIdentifier - ); - - EncodingFormat> - encoder = jsonFormatFactory.createEncodingFormat( - dynamicTableFactoryContext, - queryFormatAwareConfiguration - ); + formatIdentifier); + + EncodingFormat> encoder = + jsonFormatFactory.createEncodingFormat( + dynamicTableFactoryContext, queryFormatAwareConfiguration); final SerializationSchema serializationSchema = encoder.createRuntimeEncoder(null, lookupRow.getLookupPhysicalRowDataType()); diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java new file mode 100644 index 00000000..4215c53a --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.lookup.querycreators; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; +import org.apache.flink.configuration.Configuration; + +import org.junit.jupiter.api.Test; + +import java.util.Collections; +import java.util.Optional; + +import static org.assertj.core.api.Assertions.assertThat; + +/** Test for {@link QueryFormatAwareConfiguration }. */ +class QueryFormatAwareConfigurationTest { + + private static final ConfigOption configOption = + ConfigOptions.key("key").stringType().noDefaultValue(); + + @Test + public void testWithDot() { + QueryFormatAwareConfiguration queryConfig = + new QueryFormatAwareConfiguration( + "prefix.", + Configuration.fromMap(Collections.singletonMap("prefix.key", "val"))); + + Optional optional = queryConfig.getOptional(configOption); + assertThat(optional.get()).isEqualTo("val"); + } + + @Test + public void testWithoutDot() { + QueryFormatAwareConfiguration queryConfig = + new QueryFormatAwareConfiguration( + "prefix", + Configuration.fromMap(Collections.singletonMap("prefix.key", "val"))); + + Optional optional = queryConfig.getOptional(configOption); + assertThat(optional.get()).isEqualTo("val"); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/BatchRequestHttpDynamicSinkInsertTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/BatchRequestHttpDynamicSinkInsertTest.java new file mode 100644 index 00000000..56002085 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/BatchRequestHttpDynamicSinkInsertTest.java @@ -0,0 +1,364 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.http.RequestMethod; +import com.github.tomakehurst.wiremock.verification.LoggedRequest; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.File; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.anyRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static org.apache.flink.connector.http.TestHelper.readTestFile; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +/** Test Batch Request Http Dynamic Sink Insert. */ +public class BatchRequestHttpDynamicSinkInsertTest { + + private static int serverPort; + + private static int httpsServerPort; + + private static final String CERTS_PATH = "src/test/resources/security/certs/"; + + private static final String SERVER_KEYSTORE_PATH = + "src/test/resources/security/certs/serverKeyStore.jks"; + + private static final String SERVER_TRUSTSTORE_PATH = + "src/test/resources/security/certs/serverTrustStore.jks"; + + protected StreamExecutionEnvironment env; + + protected StreamTableEnvironment tEnv; + + private WireMockServer wireMockServer; + + @BeforeEach + public void setup() { + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + serverPort = WireMockServerPortAllocator.getServerPort(); + httpsServerPort = WireMockServerPortAllocator.getSecureServerPort(); + + this.wireMockServer = + new WireMockServer( + options() + .port(serverPort) + .httpsPort(httpsServerPort) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password")); + + wireMockServer.start(); + + env = StreamExecutionEnvironment.getExecutionEnvironment(); + tEnv = StreamTableEnvironment.create(env); + } + + @AfterEach + public void tearDown() { + wireMockServer.stop(); + } + + private static Stream requestBatch() { + return Stream.of( + Arguments.of(50, "allInOneBatch.txt"), + Arguments.of(5, "allInOneBatch.txt"), + Arguments.of(3, "twoBatches.txt"), + Arguments.of(2, "threeBatches.txt"), + Arguments.of(1, "fourSingleEventBatches.txt")); + } + + @ParameterizedTest + @MethodSource("requestBatch") + public void testHttpDynamicSinkDefaultPost(int requestBatchSize, String expectedRequests) + throws Exception { + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.sink.request.batch.size' = '%s',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + requestBatchSize, + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES\n" + + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" + + " (2, 'Rob', 'Zombie', 'Male', 'DGICA', 'GBP', TIMESTAMP '2021-10-25 20:53:54'), \n" + + " (3, 'Adam', 'Jones', 'Male', 'DGICA', 'PLN', TIMESTAMP '2021-10-26 20:53:54'), \n" + + " (4, 'Danny', 'Carey', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-27 20:53:54'), \n" + + " (5, 'Bob', 'Dylan', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-28 20:53:54')"; + tEnv.executeSql(insert).await(); + + verifyRequests(expectedRequests); + } + + @ParameterizedTest + @MethodSource("requestBatch") + public void testHttpDynamicSinkPut(int requestBatchSize, String expectedRequests) + throws Exception { + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'insert-method' = 'PUT',\n" + + " 'format' = 'json',\n" + + " 'http.sink.request.batch.size' = '%s',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + requestBatchSize, + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES\n" + + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" + + " (2, 'Rob', 'Zombie', 'Male', 'DGICA', 'GBP', TIMESTAMP '2021-10-25 20:53:54'), \n" + + " (3, 'Adam', 'Jones', 'Male', 'DGICA', 'PLN', TIMESTAMP '2021-10-26 20:53:54'), \n" + + " (4, 'Danny', 'Carey', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-27 20:53:54'), \n" + + " (5, 'Bob', 'Dylan', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-28 20:53:54')"; + tEnv.executeSql(insert).await(); + + verifyRequests(expectedRequests); + } + + private void verifyRequests(String expectedResponse) { + ObjectMapper mapper = new ObjectMapper(); + + var postedRequests = + wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))).stream() + .map(LoggedRequest::getBodyAsString) + .map(content -> getJsonSipleString(mapper, content)) + .collect(Collectors.toList()); + + var expectedResponses = + Arrays.stream(readTestFile("/json/sink/" + expectedResponse).split("#-----#")) + .map(content -> getJsonSipleString(mapper, content)) + .collect(Collectors.toList()); + + // TODO this ideally should use containsExactlyElementsOf however Wiremock uses multiple + // threads to add events to its internal journal which can brea the order of + // received events. Probably use WireMock Scenarios feature can help here and allow to + // verify the order. Or maybe there is some other solution for that. + assertThat(postedRequests).containsExactlyInAnyOrderElementsOf(expectedResponses); + } + + private static String getJsonSipleString(ObjectMapper mapper, String content) { + try { + return mapper.readTree(content).toString(); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testHttpDynamicSinkRawFormat() throws Exception { + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/octet-stream"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " last_name string" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'raw',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = "INSERT INTO http VALUES ('Clee'), ('John')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals("[Clee,John]", request.getBodyAsString()); + assertEquals(RequestMethod.POST, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + } + + @Test + public void testHttpRequestWithHeadersFromDdl() + throws ExecutionException, InterruptedException { + String originHeaderValue = "*"; + String xContentTypeOptionsHeaderValue = "nosniff"; + String contentTypeHeaderValue = "application/json"; + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " last_name string" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'raw',\n" + + " 'http.sink.header.Origin' = '%s',\n" + + " 'http.sink.header.X-Content-Type-Options' = '%s',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + originHeaderValue, + xContentTypeOptionsHeaderValue, + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = "INSERT INTO http VALUES ('Clee'), ('John')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals("[Clee,John]", request.getBodyAsString()); + assertEquals(RequestMethod.POST, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + assertEquals(originHeaderValue, request.getHeader("Origin")); + assertEquals(xContentTypeOptionsHeaderValue, request.getHeader("X-Content-Type-Options")); + } + + @Test + public void testHttpsWithMTls() throws Exception { + + File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); + + File clientCert = new File(CERTS_PATH + "client.crt"); + File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.sink.header.Content-Type' = '%s',\n" + + " 'http.security.cert.server' = '%s',\n" + + " 'http.security.cert.client' = '%s',\n" + + " 'http.security.key.client' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "https://localhost:" + httpsServerPort + "/myendpoint", + contentTypeHeaderValue, + serverTrustedCert.getAbsolutePath(), + clientCert.getAbsolutePath(), + clientPrivateKey.getAbsolutePath()); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " + + "TIMESTAMP '2021-08-24 15:22:59')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals( + "[{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," + + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," + + "\"tx_date\":\"2021-08-24 15:22:59\"}]", + request.getBodyAsString()); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkTest.java new file mode 100644 index 00000000..a4a0f882 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicSinkTest.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.table.sink.HttpDynamicSink.HttpDynamicTableSinkBuilder; +import org.apache.flink.table.connector.ChangelogMode; +import org.apache.flink.table.factories.FactoryUtil; +import org.apache.flink.table.factories.TestFormatFactory; +import org.apache.flink.table.types.AtomicDataType; +import org.apache.flink.table.types.logical.BooleanType; + +import org.junit.jupiter.api.Test; + +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.INSERT_METHOD; +import static org.apache.flink.connector.http.table.sink.HttpDynamicSinkConnectorOptions.URL; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + +/** Test for {@link HttpDynamicSink}. */ +public class HttpDynamicSinkTest { + + @Test + public void testAsSummaryString() { + var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); + + HttpDynamicSink dynamicSink = + new HttpDynamicTableSinkBuilder() + .setTableOptions(new Configuration()) + .setConsumedDataType(new AtomicDataType(new BooleanType(false))) + .setEncodingFormat(mockFormat) + .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) + .build(); + + assertThat(dynamicSink.asSummaryString()).isEqualTo("HttpSink"); + } + + @Test + public void copyEqualityTest() { + var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); + var sink = + new HttpDynamicTableSinkBuilder() + .setTableOptions( + new Configuration() { + { + this.set(URL, "localhost:8123"); + this.set(INSERT_METHOD, "POST"); + this.set(FactoryUtil.FORMAT, "json"); + } + }) + .setConsumedDataType(new AtomicDataType(new BooleanType(false))) + .setEncodingFormat(mockFormat) + .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) + .build(); + + assertEquals(sink, sink.copy()); + assertEquals(sink.hashCode(), sink.copy().hashCode()); + } + + private HttpDynamicTableSinkBuilder getSinkBuilder() { + var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); + var consumedDataType = new AtomicDataType(new BooleanType(false)); + + return new HttpDynamicTableSinkBuilder() + .setTableOptions( + new Configuration() { + { + this.set(URL, "localhost:8123"); + this.set(INSERT_METHOD, "POST"); + this.set(FactoryUtil.FORMAT, "json"); + } + }) + .setConsumedDataType(consumedDataType) + .setEncodingFormat(mockFormat) + .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) + .setMaxBatchSize(1); + } + + @Test + public void nonEqualsTest() { + var sink = getSinkBuilder().build(); + var sinkBatchSize = getSinkBuilder().setMaxBatchSize(10).build(); + var sinkSinkConfig = + getSinkBuilder() + .setTableOptions( + new Configuration() { + { + this.set(URL, "localhost:8124"); + this.set(INSERT_METHOD, "POST"); + this.set(FactoryUtil.FORMAT, "json"); + } + }) + .build(); + var sinkDataType = + getSinkBuilder() + .setConsumedDataType(new AtomicDataType(new BooleanType(true))) + .build(); + var sinkFormat = + getSinkBuilder() + .setEncodingFormat( + new TestFormatFactory.EncodingFormatMock(";", ChangelogMode.all())) + .build(); + var sinkHttpPostRequestCallback = + getSinkBuilder() + .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) + .build(); + + assertEquals(sink, sink); + assertNotEquals(null, sink); + assertNotEquals("test-string", sink); + assertNotEquals(sink, sinkBatchSize); + assertNotEquals(sink, sinkSinkConfig); + assertNotEquals(sink, sinkDataType); + assertNotEquals(sink, sinkFormat); + assertNotEquals(sink, sinkHttpPostRequestCallback); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactoryTest.java new file mode 100644 index 00000000..055fc513 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/HttpDynamicTableSinkFactoryTest.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.ValidationException; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** + * Unfortunately it seems that Flink is lazy with connector instantiation, so one has to call INSERT + * in order to test the Factory. + */ +public class HttpDynamicTableSinkFactoryTest { + + protected StreamExecutionEnvironment env; + protected StreamTableEnvironment tEnv; + + @BeforeEach + public void setup() { + env = StreamExecutionEnvironment.getExecutionEnvironment(); + tEnv = StreamTableEnvironment.create(env); + } + + @Test + public void requiredOptionsTest() { + final String noFormatOptionCreate = + String.format( + "CREATE TABLE formatHttp (\n" + + " id bigint\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, "http://localhost/"); + tEnv.executeSql(noFormatOptionCreate); + assertThrows( + ValidationException.class, + () -> tEnv.executeSql("INSERT INTO formatHttp VALUES (1)").await()); + + final String noUrlOptionCreate = + String.format( + "CREATE TABLE urlHttp (\n" + + " id bigint\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'format' = 'json'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER); + tEnv.executeSql(noUrlOptionCreate); + assertThrows( + ValidationException.class, + () -> tEnv.executeSql("INSERT INTO urlHttp VALUES (1)").await()); + } + + @Test + public void validateHttpSinkOptionsTest() { + final String invalidInsertMethod = + String.format( + "CREATE TABLE http (\n" + + " id bigint\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'insert-method' = 'GET'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, "http://localhost/"); + tEnv.executeSql(invalidInsertMethod); + assertThrows( + ValidationException.class, + () -> tEnv.executeSql("INSERT INTO http VALUES (1)").await()); + } + + @Test + public void nonexistentOptionsTest() { + final String invalidInsertMethod = + String.format( + "CREATE TABLE http (\n" + + " id bigint\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'some-random-totally-unexisting-option-!g*Av#' = '7123'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, "http://localhost/"); + tEnv.executeSql(invalidInsertMethod); + assertThrows( + ValidationException.class, + () -> tEnv.executeSql("INSERT INTO http VALUES (1)").await()); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/PerRequestHttpDynamicSinkInsertTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/PerRequestHttpDynamicSinkInsertTest.java new file mode 100644 index 00000000..03d0d2bc --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/table/sink/PerRequestHttpDynamicSinkInsertTest.java @@ -0,0 +1,331 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.table.sink; + +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; +import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; + +import com.github.tomakehurst.wiremock.WireMockServer; +import com.github.tomakehurst.wiremock.http.RequestMethod; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.File; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.ExecutionException; + +import static com.github.tomakehurst.wiremock.client.WireMock.any; +import static com.github.tomakehurst.wiremock.client.WireMock.anyRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** Test Per Request Request. */ +public class PerRequestHttpDynamicSinkInsertTest { + + private static int serverPort; + + private static int secureServerPort; + + private static final String CERTS_PATH = "src/test/resources/security/certs/"; + + private static final String SERVER_KEYSTORE_PATH = + "src/test/resources/security/certs/serverKeyStore.jks"; + + private static final String SERVER_TRUSTSTORE_PATH = + "src/test/resources/security/certs/serverTrustStore.jks"; + + protected StreamExecutionEnvironment env; + + protected StreamTableEnvironment tEnv; + + private WireMockServer wireMockServer; + + @BeforeEach + public void setup() { + File keyStoreFile = new File(SERVER_KEYSTORE_PATH); + File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); + serverPort = WireMockServerPortAllocator.getServerPort(); + secureServerPort = WireMockServerPortAllocator.getSecureServerPort(); + + this.wireMockServer = + new WireMockServer( + options() + .port(serverPort) + .httpsPort(secureServerPort) + .keystorePath(keyStoreFile.getAbsolutePath()) + .keystorePassword("password") + .keyManagerPassword("password") + .needClientAuth(true) + .trustStorePath(trustStoreFile.getAbsolutePath()) + .trustStorePassword("password")); + + wireMockServer.start(); + + env = StreamExecutionEnvironment.getExecutionEnvironment(); + tEnv = StreamTableEnvironment.create(env); + } + + @AfterEach + public void tearDown() { + wireMockServer.stop(); + } + + @Test + public void testHttpDynamicSinkDefaultPost() throws Exception { + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.sink.writer.request.mode' = 'single',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " + + "TIMESTAMP '2021-08-24 15:22:59')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals( + "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," + + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," + + "\"tx_date\":\"2021-08-24 15:22:59\"}", + request.getBodyAsString()); + assertEquals(RequestMethod.POST, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + } + + @Test + public void testHttpDynamicSinkPut() throws Exception { + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'insert-method' = 'PUT',\n" + + " 'format' = 'json',\n" + + " 'http.sink.writer.request.mode' = 'single',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES\n" + + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" + + " (2, 'Hedy', 'Hedgecock', 'Female', 'DGICA', 'CNY', " + + "TIMESTAMP '2021-10-24 20:53:54')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(2, postedRequests.size()); + + var jsonRequests = + new HashSet<>( + Set.of( + "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\",\"gender\":\"Female\"," + + "\"stock\":\"CDZI\",\"currency\":\"RUB\",\"tx_date\":\"2021-08-24 15:22:59\"}", + "{\"id\":2,\"first_name\":\"Hedy\",\"last_name\":\"Hedgecock\",\"gender\":\"Female\"," + + "\"stock\":\"DGICA\",\"currency\":\"CNY\",\"tx_date\":\"2021-10-24 20:53:54\"}")); + for (var request : postedRequests) { + assertEquals(RequestMethod.PUT, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + assertTrue(jsonRequests.contains(request.getBodyAsString())); + jsonRequests.remove(request.getBodyAsString()); + } + } + + @Test + public void testHttpDynamicSinkRawFormat() throws Exception { + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/octet-stream"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " last_name string" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'raw',\n" + + " 'http.sink.writer.request.mode' = 'single',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = "INSERT INTO http VALUES ('Clee')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals("Clee", request.getBodyAsString()); + assertEquals(RequestMethod.POST, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + } + + @Test + public void testHttpRequestWithHeadersFromDdl() + throws ExecutionException, InterruptedException { + String originHeaderValue = "*"; + String xContentTypeOptionsHeaderValue = "nosniff"; + String contentTypeHeaderValue = "application/json"; + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " last_name string" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'raw',\n" + + " 'http.sink.writer.request.mode' = 'single',\n" + + " 'http.sink.header.Origin' = '%s',\n" + + " 'http.sink.header.X-Content-Type-Options' = '%s',\n" + + " 'http.sink.header.Content-Type' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "http://localhost:" + serverPort + "/myendpoint", + originHeaderValue, + xContentTypeOptionsHeaderValue, + contentTypeHeaderValue); + + tEnv.executeSql(createTable); + + final String insert = "INSERT INTO http VALUES ('Clee')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals("Clee", request.getBodyAsString()); + assertEquals(RequestMethod.POST, request.getMethod()); + assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); + assertEquals(originHeaderValue, request.getHeader("Origin")); + assertEquals(xContentTypeOptionsHeaderValue, request.getHeader("X-Content-Type-Options")); + } + + @Test + public void testHttpsWithMTls() throws Exception { + + File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); + + File clientCert = new File(CERTS_PATH + "client.crt"); + File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); + + wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); + String contentTypeHeaderValue = "application/json"; + + final String createTable = + String.format( + "CREATE TABLE http (\n" + + " id bigint,\n" + + " first_name string,\n" + + " last_name string,\n" + + " gender string,\n" + + " stock string,\n" + + " currency string,\n" + + " tx_date timestamp(3)\n" + + ") with (\n" + + " 'connector' = '%s',\n" + + " 'url' = '%s',\n" + + " 'format' = 'json',\n" + + " 'http.sink.writer.request.mode' = 'single',\n" + + " 'http.sink.header.Content-Type' = '%s',\n" + + " 'http.security.cert.server' = '%s',\n" + + " 'http.security.cert.client' = '%s',\n" + + " 'http.security.key.client' = '%s'\n" + + ")", + HttpDynamicTableSinkFactory.IDENTIFIER, + "https://localhost:" + secureServerPort + "/myendpoint", + contentTypeHeaderValue, + serverTrustedCert.getAbsolutePath(), + clientCert.getAbsolutePath(), + clientPrivateKey.getAbsolutePath()); + + tEnv.executeSql(createTable); + + final String insert = + "INSERT INTO http\n" + + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " + + "TIMESTAMP '2021-08-24 15:22:59')"; + tEnv.executeSql(insert).await(); + + var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); + assertEquals(1, postedRequests.size()); + + var request = postedRequests.get(0); + assertEquals( + "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," + + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," + + "\"tx_date\":\"2021-08-24 15:22:59\"}", + request.getBodyAsString()); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/ConfigUtilsTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/ConfigUtilsTest.java new file mode 100644 index 00000000..b6cd7933 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/ConfigUtilsTest.java @@ -0,0 +1,292 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.config.ConfigException; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import java.net.Authenticator; +import java.net.InetAddress; +import java.net.MalformedURLException; +import java.net.PasswordAuthentication; +import java.net.UnknownHostException; +import java.util.Arrays; +import java.util.Map; +import java.util.Optional; +import java.util.Properties; + +import static org.apache.flink.connector.http.TestHelper.assertPropertyArray; +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +class ConfigUtilsTest { + public static final int PORT = WireMockServerPortAllocator.getServerPort(); + public static final int PROXY_PORT = WireMockServerPortAllocator.getServerPort(); + + @Test + public void shouldExtractPropertiesToMap() { + Properties properties = new Properties(); + properties.setProperty("property", "val1"); + properties.setProperty("my.property", "val2"); + properties.setProperty("my.super.property", "val3"); + properties.setProperty("my.property.detail", "val4"); + properties.setProperty("my.property.extra", "val5"); + properties.setProperty("another.my.property.extra", "val6"); + + Map mappedProperties = + ConfigUtils.propertiesToMap(properties, "my.property", String.class); + + assertThat(mappedProperties).hasSize(3); + assertThat(mappedProperties) + .containsAllEntriesOf( + Map.of( + "my.property", "val2", + "my.property.detail", "val4", + "my.property.extra", "val5")); + } + + @Test + public void shouldConvertNoProperty() { + Properties properties = new Properties(); + properties.setProperty("property", "val1"); + properties.setProperty("my.property", "val2"); + properties.setProperty("my.super.property", "val3"); + + Map mappedProperties = + ConfigUtils.propertiesToMap(properties, "my.custom", String.class); + assertThat(mappedProperties).isEmpty(); + } + + @Test + public void shouldGetProxyConfigWithAuthenticator() throws UnknownHostException { + String proxyHost = "proxy"; + Integer proxyPort = PROXY_PORT; + Optional proxyUsername = Optional.of("username"); + Optional proxyPassword = Optional.of("password"); + + ProxyConfig proxyConfig = + new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword); + assertThat(proxyConfig.getHost().equals("proxy")); + assertThat(proxyConfig.getAuthenticator().isPresent()); + + PasswordAuthentication auth = + proxyConfig + .getAuthenticator() + .orElseGet(null) + .requestPasswordAuthenticationInstance( + "proxy", // host + InetAddress.getByName("127.0.0.1"), // address + PROXY_PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.SERVER // Requestor type + ); + + PasswordAuthentication auth2 = + proxyConfig + .getAuthenticator() + .orElseGet(null) + .requestPasswordAuthenticationInstance( + "proxy", // host + InetAddress.getByName("127.0.0.1"), // address + PROXY_PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.PROXY // Requestor type + ); + + assertThat(auth).isNull(); + assertThat(auth2).isNotNull(); + assertThat(auth2.getUserName().equals("username")).isTrue(); + assertThat(Arrays.equals(auth2.getPassword(), "password".toCharArray())).isTrue(); + } + + @Test + public void shouldGetProxyConfigWithAuthenticatorServer() throws UnknownHostException { + String proxyHost = "proxy"; + Integer proxyPort = PORT; + Optional proxyUsername = Optional.of("username"); + Optional proxyPassword = Optional.of("password"); + + ProxyConfig proxyConfig = + new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword); + assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); + assertThat(proxyConfig.getAuthenticator().isPresent()).isTrue(); + + PasswordAuthentication auth = + proxyConfig + .getAuthenticator() + .orElseGet(null) + .requestPasswordAuthenticationInstance( + "proxy", // host + InetAddress.getByName("127.0.0.1"), // address + PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.SERVER // Requestor type + ); + + PasswordAuthentication auth2 = + proxyConfig + .getAuthenticator() + .orElseGet(null) + .requestPasswordAuthenticationInstance( + "proxy", // host + InetAddress.getByName("127.0.0.1"), // address + PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.PROXY // Requestor type + ); + + assertThat(auth).isNull(); + assertThat(auth2).isNotNull(); + } + + @Test + public void shouldGetProxyConfigWithAuthenticatorWrongHost() throws UnknownHostException { + String proxyHost = "proxy"; + Integer proxyPort = PORT; + Optional proxyUsername = Optional.of("username"); + Optional proxyPassword = Optional.of("password"); + + ProxyConfig proxyConfig = + new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword); + assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); + assertThat(proxyConfig.getAuthenticator().isPresent()).isTrue(); + + PasswordAuthentication auth = + proxyConfig + .getAuthenticator() + .get() + .requestPasswordAuthenticationInstance( + "wrong", // host + InetAddress.getByName("127.0.0.1"), // address + PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.PROXY // Requestor type + ); + + PasswordAuthentication auth2 = + proxyConfig + .getAuthenticator() + .orElseGet(null) + .requestPasswordAuthenticationInstance( + "proxy", // host + InetAddress.getByName("127.0.0.1"), // address + PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.PROXY // Requestor type + ); + + assertThat(auth).isNull(); + assertThat(auth2).isNotNull(); + } + + @Test + public void shouldGetProxyConfigWithoutAuthenticator() + throws MalformedURLException, UnknownHostException { + String proxyHost = "proxy"; + Optional proxyUsername = Optional.of("username"); + Optional proxyPassword = Optional.empty(); + + ProxyConfig proxyConfig = new ProxyConfig(proxyHost, 80, proxyUsername, proxyPassword); + assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); + assertThat(proxyConfig.getAuthenticator().isEmpty()).isTrue(); + } + + @Test + public void shouldHandleInvalidPropertyType() { + + Properties properties = new Properties(); + properties.put("a.property", 1); + + // Should ignore "invalid" property since does not match the prefix + Map mappedProperties = + ConfigUtils.propertiesToMap(properties, "my.custom", String.class); + assertThat(mappedProperties).isEmpty(); + + // should throw on invalid value, when name matches the prefix. + assertThatThrownBy( + () -> ConfigUtils.propertiesToMap(properties, "a.property", String.class)) + .isInstanceOf(ConfigException.class); + + // should throw on non String key regardless of key prefix. + Properties nonStringProperties = new Properties(); + nonStringProperties.put(new Object(), 1); + assertThatThrownBy( + () -> + ConfigUtils.propertiesToMap( + nonStringProperties, "a.property", String.class)) + .isInstanceOf(ConfigException.class); + } + + @ParameterizedTest(name = "Property full name - {0}") + @ValueSource(strings = {"property", "my.property", "my.super.property", ".my.super.property"}) + public void shouldGetPropertyName(String fullPropertyName) { + + String propertyLastElement = ConfigUtils.extractPropertyLastElement(fullPropertyName); + assertThat(propertyLastElement).isEqualTo("property"); + } + + @ParameterizedTest(name = "Property full name - {0}") + @ValueSource(strings = {"", " ", "my.super.property.", ".", "..."}) + public void shouldThrowOnInvalidProperty(String invalidProperty) { + + assertThatThrownBy(() -> ConfigUtils.extractPropertyLastElement(invalidProperty)) + .isInstanceOf(ConfigException.class); + } + + @Test + public void flatMapPropertyMap() { + Map propertyMap = + Map.of( + "propertyOne", "val1", + "propertyTwo", "val2", + "propertyThree", "val3"); + + String[] propertyArray = HttpHeaderUtils.toHeaderAndValueArray(propertyMap); + + // size is == propertyMap.key size + propertyMap.value.size + assertThat(propertyArray).hasSize(6); + + // assert that we have property followed by its value. + assertPropertyArray(propertyArray, "propertyOne", "val1"); + assertPropertyArray(propertyArray, "propertyTwo", "val2"); + assertPropertyArray(propertyArray, "propertyThree", "val3"); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/HttpHeaderUtilsTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/HttpHeaderUtilsTest.java new file mode 100644 index 00000000..7b712234 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/HttpHeaderUtilsTest.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.preprocessor.HeaderPreprocessor; + +import org.junit.jupiter.api.Test; + +import java.time.Duration; + +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; +import static org.apache.flink.connector.http.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +/** tests for {@link HttpHeaderUtils}. */ +public class HttpHeaderUtilsTest { + @Test + void shouldCreateOIDCHeaderPreprocessorTest() { + Configuration configuration = new Configuration(); + HeaderPreprocessor headerPreprocessor = + HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); + assertThat(headerPreprocessor).isNull(); + configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), "http://aaa"); + configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key(), "ccc"); + configuration.set(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, Duration.ofSeconds(1)); + headerPreprocessor = HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); + assertThat(headerPreprocessor).isNotNull(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactoryTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactoryTest.java new file mode 100644 index 00000000..01cd3c19 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/JavaNetHttpClientFactoryTest.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.connector.http.WireMockServerPortAllocator; +import org.apache.flink.connector.http.table.lookup.HttpLookupConfig; +import org.apache.flink.connector.http.table.lookup.Slf4JHttpLookupPostRequestCallback; +import org.apache.flink.util.concurrent.ExecutorThreadFactory; + +import org.junit.jupiter.api.Test; + +import java.net.Authenticator; +import java.net.InetAddress; +import java.net.PasswordAuthentication; +import java.net.UnknownHostException; +import java.net.http.HttpClient; +import java.util.Arrays; +import java.util.Properties; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_HOST; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_PASSWORD; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_PORT; +import static org.apache.flink.connector.http.config.HttpConnectorConfigConstants.SOURCE_PROXY_USERNAME; +import static org.assertj.core.api.Assertions.assertThat; + +class JavaNetHttpClientFactoryTest { + public static final int SERVER_PORT = WireMockServerPortAllocator.getServerPort(); + public static final int PROXY_SERVER_PORT = WireMockServerPortAllocator.PORT_BASE - 1; + + @Test + public void shouldGetClientWithAuthenticator() throws UnknownHostException { + Properties properties = new Properties(); + Configuration configuration = new Configuration(); + configuration.setString(SOURCE_PROXY_HOST, "google"); + configuration.setString(SOURCE_PROXY_PORT, String.valueOf(PROXY_SERVER_PORT)); + configuration.setString(SOURCE_PROXY_USERNAME, "username"); + configuration.setString(SOURCE_PROXY_PASSWORD, "password"); + + HttpLookupConfig lookupConfig = + HttpLookupConfig.builder() + .url("https://google.com") + .readableConfig(configuration) + .properties(properties) + .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) + .build(); + + HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); + + assertThat(client.authenticator().isPresent()).isTrue(); + assertThat(client.proxy().isPresent()).isTrue(); + + PasswordAuthentication auth = + client.authenticator() + .get() + .requestPasswordAuthenticationInstance( + "google", // host + InetAddress.getByName("127.0.0.1"), // address + SERVER_PORT, // port + "http", // protocol + "Please authenticate", // prompt + "basic", // scheme + null, // URL + Authenticator.RequestorType.PROXY // Requestor type + ); + + assertThat(auth.getUserName().equals("username")).isTrue(); + assertThat(Arrays.equals(auth.getPassword(), "password".toCharArray())).isTrue(); + } + + @Test + public void shouldGetClientWithoutAuthenticator() throws UnknownHostException { + Properties properties = new Properties(); + Configuration configuration = new Configuration(); + configuration.setString(SOURCE_PROXY_HOST, "google"); + configuration.setString(SOURCE_PROXY_PORT, String.valueOf(PROXY_SERVER_PORT)); + + HttpLookupConfig lookupConfig = + HttpLookupConfig.builder() + .url("https://google.com") + .readableConfig(configuration) + .properties(properties) + .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) + .build(); + + HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); + + assertThat(client.authenticator().isEmpty()).isTrue(); + assertThat(client.proxy().isPresent()).isTrue(); + } + + @Test + public void shouldGetClientWithoutProxy() { + Properties properties = new Properties(); + Configuration configuration = new Configuration(); + + HttpLookupConfig lookupConfig = + HttpLookupConfig.builder() + .url("https://google.com") + .readableConfig(configuration) + .properties(properties) + .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) + .build(); + + HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); + assertThat(client.authenticator().isEmpty()).isTrue(); + assertThat(client.proxy().isEmpty()).isTrue(); + } + + @Test + public void shouldGetClientWithExecutor() { + Properties properties = new Properties(); + ExecutorService httpClientExecutor = + Executors.newFixedThreadPool( + 1, + new ExecutorThreadFactory( + "http-sink-client-batch-request-worker", + ThreadUtils.LOGGING_EXCEPTION_HANDLER)); + + HttpClient client = JavaNetHttpClientFactory.createClient(properties, httpClientExecutor); + assertThat(client.followRedirects().equals(HttpClient.Redirect.NORMAL)).isTrue(); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/CharArrayBufferTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/CharArrayBufferTest.java new file mode 100644 index 00000000..db65cece --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/CharArrayBufferTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class CharArrayBufferTest { + + @Test + public void testInvalidCapacity() { + assertThrows(IllegalArgumentException.class, () -> new CharArrayBuffer(0)); + } + + @Test + public void testExpandCapacity() { + String testText = "Hello My Friend"; + + CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); + charArrayBuffer.append(testText); + + assertThat(charArrayBuffer.length()).isEqualTo(testText.length()); + } + + @Test + public void testSubSequence() { + String testText = "Hello My Friend"; + + CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); + charArrayBuffer.append(testText); + + assertAll( + () -> { + Assertions.assertThrows( + IndexOutOfBoundsException.class, + () -> charArrayBuffer.subSequence(-1, 1)); + Assertions.assertThrows( + IndexOutOfBoundsException.class, + () -> charArrayBuffer.subSequence(1, -1)); + Assertions.assertThrows( + IndexOutOfBoundsException.class, + () -> charArrayBuffer.subSequence(2, 1)); + Assertions.assertThrows( + IndexOutOfBoundsException.class, + () -> charArrayBuffer.subSequence(2, testText.length() + 5)); + assertThat(charArrayBuffer.subSequence(2, 10).toString()) + .isEqualTo("llo My Fri"); + }); + } + + private static Stream appendArgs() { + return Stream.of( + Arguments.of("", "baseString"), + Arguments.of(" ", "baseString "), + Arguments.of(null, "baseStringnull")); + } + + @ParameterizedTest + @MethodSource("appendArgs") + public void testAppend(String stringToAppend, String expected) { + CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); + charArrayBuffer.append("baseString"); + + assertAll( + () -> { + assertThat(charArrayBuffer.toString()).isEqualTo("baseString"); + charArrayBuffer.append(stringToAppend); + assertThat(charArrayBuffer.toString()).isEqualTo(expected); + }); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/ParserCursorTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/ParserCursorTest.java new file mode 100644 index 00000000..f83135b4 --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/ParserCursorTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertAll; +import static org.junit.jupiter.api.Assertions.assertThrows; + +/** Test for {@link ParserCursor}. */ +class ParserCursorTest { + + @Test + public void testBoundsValidation() { + + assertAll( + () -> { + assertThrows(IndexOutOfBoundsException.class, () -> new ParserCursor(-1, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> new ParserCursor(1, -1)); + }); + } + + @Test + public void testUpdatePostValidation() { + ParserCursor cursor = new ParserCursor(1, 2); + + assertAll( + () -> { + assertThrows(IndexOutOfBoundsException.class, () -> cursor.updatePos(0)); + assertThrows(IndexOutOfBoundsException.class, () -> cursor.updatePos(3)); + }); + } +} diff --git a/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/TokenParserTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/TokenParserTest.java new file mode 100644 index 00000000..cc096e8c --- /dev/null +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/TokenParserTest.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import static org.assertj.core.api.Assertions.assertThat; + +class TokenParserTest { + + @ParameterizedTest + @CsvSource({"a,a", "aa,aa", "a a,a a", "a ,a", " a,a"}) + public void testParse(String toParse, String expected) { + + CharArrayBuffer charBuff = new CharArrayBuffer(toParse.length()); + charBuff.append(toParse); + + TokenParser tokenParser = new TokenParser(); + String actual = + tokenParser.parseToken(charBuff, new ParserCursor(0, toParse.length()), null); + + assertThat(actual).isEqualTo(expected); + } +} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/uri/URIBuilderTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URIBuilderTest.java similarity index 54% rename from src/test/java/com/getindata/connectors/http/internal/utils/uri/URIBuilderTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URIBuilderTest.java index 5e21689e..43da9688 100644 --- a/src/test/java/com/getindata/connectors/http/internal/utils/uri/URIBuilderTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URIBuilderTest.java @@ -1,37 +1,62 @@ -package com.getindata.connectors.http.internal.utils.uri; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.apache.flink.connector.http.LookupArg; +import org.apache.flink.connector.http.WireMockServerPortAllocator; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import java.net.URI; import java.net.URISyntaxException; import java.util.List; import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; import static org.assertj.core.api.Assertions.assertThat; -import com.getindata.connectors.http.LookupArg; - +/** Test s={@link URIBuilder}. */ class URIBuilderTest { - private static final String HOST_URL = "http://localhost:8080/client"; + private static final String PORT = String.valueOf(WireMockServerPortAllocator.PORT_BASE); + + private static final String HOST_URL = "http://localhost:" + PORT; - private static final String HOST_URL_WITH_PARAMS = "http://localhost:8080/client?val=1"; + private static final String HOST_URL_CLIENT = HOST_URL + "/client"; - private static final String HOST_URL_WITH_PARAMS_MARK = "http://localhost:8080/client?"; + private static final String HOST_URL_WITH_PARAMS_MARK = HOST_URL_CLIENT + "?"; - private static final String HOST_URL_WITH_END = "http://localhost:8080/client/"; + private static final String HOST_URL_WITH_PARAMS = HOST_URL_WITH_PARAMS_MARK + "val=1"; + + private static final String HOST_URL_WITH_END = HOST_URL_CLIENT + "/"; private static final String HOST_URL_NO_PORT = "http://localhost/client"; private static final String HOST_URL_NO_SCHEMA = "localhost/client"; - private static final String HOST_URL_NO_PATH = "http://localhost:8080"; + private static final String HOST_URL_NO_PATH = HOST_URL; - private static final String HOST_URL_USER_INFO = "http://userMe@localhost:8080/client"; + private static final String HOST_URL_USER_INFO = "http://userMe@localhost:" + PORT + "/client"; - private static final String IPv4_URL = "http://127.0.0.1:8080/client"; + private static final String IPv4_URL = "http://127.0.0.1:" + PORT + "/client"; private static final String IPv4_URL_NO_PORT = "http://127.0.0.1/client"; @@ -45,36 +70,37 @@ class URIBuilderTest { public static Stream uriArgs() { return Stream.of( - Arguments.of( - List.of(new LookupArg("id", "1"), new LookupArg("name", "2")), - "?id=1&name=2"), - Arguments.of(List.of(new LookupArg("id", ""), new LookupArg("name", "")), - "?id=&name="), - Arguments.of( - List.of(new LookupArg("id", " "), new LookupArg("name", " ")), - "?id=+&name=+"), - Arguments.of( - List.of(new LookupArg("id", null), new LookupArg("name", null)), - "?id&name"), - Arguments.of( - List.of(new LookupArg("id", "1"), new LookupArg("name", "what+%25+is+in+HTTP+URL")), - "?id=1&name=what%2B%2525%2Bis%2Bin%2BHTTP%2BURL") - ); + Arguments.of( + List.of(new LookupArg("id", "1"), new LookupArg("name", "2")), + "?id=1&name=2"), + Arguments.of( + List.of(new LookupArg("id", ""), new LookupArg("name", "")), "?id=&name="), + Arguments.of( + List.of(new LookupArg("id", " "), new LookupArg("name", " ")), + "?id=+&name=+"), + Arguments.of( + List.of(new LookupArg("id", null), new LookupArg("name", null)), + "?id&name"), + Arguments.of( + List.of( + new LookupArg("id", "1"), + new LookupArg("name", "what+%25+is+in+HTTP+URL")), + "?id=1&name=what%2B%2525%2Bis%2Bin%2BHTTP%2BURL")); } @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHost(List arguments, String expectedUriArgs) - throws URISyntaxException { - uriBuilder = new URIBuilder(HOST_URL); + throws URISyntaxException { + uriBuilder = new URIBuilder(HOST_URL_CLIENT); - testUriBuilder(arguments, uriBuilder, HOST_URL, expectedUriArgs); + testUriBuilder(arguments, uriBuilder, HOST_URL_CLIENT, expectedUriArgs); } @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostNoPort(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_NO_PORT); testUriBuilder(arguments, uriBuilder, HOST_URL_NO_PORT, expectedUriArgs); @@ -83,7 +109,7 @@ public void shouldBuildUriForHostNoPort(List arguments, String expect @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForIPv4(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(IPv4_URL); testUriBuilder(arguments, uriBuilder, IPv4_URL, expectedUriArgs); @@ -92,7 +118,7 @@ public void shouldBuildUriForIPv4(List arguments, String expectedUriA @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForIPv4NoPort(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(IPv4_URL_NO_PORT); testUriBuilder(arguments, uriBuilder, IPv4_URL_NO_PORT, expectedUriArgs); @@ -101,7 +127,7 @@ public void shouldBuildUriForIPv4NoPort(List arguments, String expect @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostNoSchema(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_NO_SCHEMA); testUriBuilder(arguments, uriBuilder, HOST_URL_NO_SCHEMA, expectedUriArgs); @@ -110,7 +136,7 @@ public void shouldBuildUriForHostNoSchema(List arguments, String expe @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForIPv4NoSchema(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(IPv4_URL_NO_SCHEMA); testUriBuilder(arguments, uriBuilder, IPv4_URL_NO_SCHEMA, expectedUriArgs); @@ -119,7 +145,7 @@ public void shouldBuildUriForIPv4NoSchema(List arguments, String expe @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostUserInfo(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_USER_INFO); testUriBuilder(arguments, uriBuilder, HOST_URL_USER_INFO, expectedUriArgs); @@ -128,7 +154,7 @@ public void shouldBuildUriForHostUserInfo(List arguments, String expe @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForIPv4UserInfo(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(IPv4_URL_USER_INFO); testUriBuilder(arguments, uriBuilder, IPv4_URL_USER_INFO, expectedUriArgs); @@ -137,7 +163,7 @@ public void shouldBuildUriForIPv4UserInfo(List arguments, String expe @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostNoPath(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_NO_PATH); testUriBuilder(arguments, uriBuilder, HOST_URL_NO_PATH, expectedUriArgs); @@ -146,7 +172,7 @@ public void shouldBuildUriForHostNoPath(List arguments, String expect @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForIPv4NoPath(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(IPv4_URL_NO_PATH); testUriBuilder(arguments, uriBuilder, IPv4_URL_NO_PATH, expectedUriArgs); @@ -155,35 +181,33 @@ public void shouldBuildUriForIPv4NoPath(List arguments, String expect @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostWithParams(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_WITH_PARAMS); testUriBuilder( - arguments, - uriBuilder, - HOST_URL_WITH_PARAMS, - expectedUriArgs.replaceFirst("\\?", "&") - ); + arguments, + uriBuilder, + HOST_URL_WITH_PARAMS, + expectedUriArgs.replaceFirst("\\?", "&")); } @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostParamsMark(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_WITH_PARAMS_MARK); testUriBuilder( - arguments, - uriBuilder, - HOST_URL_WITH_PARAMS_MARK, - expectedUriArgs.replaceFirst("\\?", "") - ); + arguments, + uriBuilder, + HOST_URL_WITH_PARAMS_MARK, + expectedUriArgs.replaceFirst("\\?", "")); } @ParameterizedTest @MethodSource("uriArgs") public void shouldBuildUriForHostUrlEnd(List arguments, String expectedUriArgs) - throws URISyntaxException { + throws URISyntaxException { uriBuilder = new URIBuilder(HOST_URL_WITH_END); testUriBuilder(arguments, uriBuilder, HOST_URL_WITH_END, expectedUriArgs); @@ -192,11 +216,11 @@ public void shouldBuildUriForHostUrlEnd(List arguments, String expect @Test public void testHierarchicalUri() throws Exception { final URI uri = - new URI("http", "stuff", "localhost", 80, "/some stuff", "param=stuff", "fragment"); + new URI("http", "stuff", "localhost", 80, "/some stuff", "param=stuff", "fragment"); final URIBuilder uribuilder = new URIBuilder(uri); final URI result = uribuilder.build(); - assertThat(result).isEqualTo( - new URI("http://stuff@localhost:80/some%20stuff?param=stuff#fragment")); + assertThat(result) + .isEqualTo(new URI("http://stuff@localhost:80/some%20stuff?param=stuff#fragment")); } @Test @@ -210,34 +234,44 @@ public void testOpaqueUri() throws Exception { @Test public void testParameterWithSpecialChar() throws Exception { final URI uri = new URI("http", null, "localhost", 80, "/", "param=stuff", null); - final URIBuilder uribuilder = new URIBuilder(uri).addParameter("param", "1 + 1 = 2") - .addParameter("param", "blah&blah"); + final URIBuilder uribuilder = + new URIBuilder(uri) + .addParameter("param", "1 + 1 = 2") + .addParameter("param", "blah&blah"); final URI result = uribuilder.build(); assertThat(result) - .isEqualTo(new URI("http://localhost:80/?param=stuff¶m=1+%2B+1+%3D+2&" + - "param=blah%26blah") - ); + .isEqualTo( + new URI( + "http://localhost:80/?param=stuff¶m=1+%2B+1+%3D+2&" + + "param=blah%26blah")); } @Test public void testAddParameter() throws Exception { final URI uri = new URI("http", null, "localhost", 80, "/", "param=stuff&blah&blah", null); - final URIBuilder uribuilder = new URIBuilder(uri).addParameter("param", "some other stuff") - .addParameter("blah", "blah"); + final URIBuilder uribuilder = + new URIBuilder(uri) + .addParameter("param", "some other stuff") + .addParameter("blah", "blah"); final URI result = uribuilder.build(); assertThat(result) - .isEqualTo(new URI("http://localhost:80/?param=stuff&blah&blah&" + - "param=some+other+stuff&blah=blah") - ); + .isEqualTo( + new URI( + "http://localhost:80/?param=stuff&blah&blah&" + + "param=some+other+stuff&blah=blah")); } @Test public void testQueryEncoding() throws Exception { - final URI uri1 = new URI("https://somehost.com/stuff?client_id=1234567890" + - "&redirect_uri=https%3A%2F%2Fsomehost.com%2Fblah+blah%2F"); - final URI uri2 = new URIBuilder("https://somehost.com/stuff") - .addParameter("client_id", "1234567890") - .addParameter("redirect_uri", "https://somehost.com/blah blah/").build(); + final URI uri1 = + new URI( + "https://somehost.com/stuff?client_id=1234567890" + + "&redirect_uri=https%3A%2F%2Fsomehost.com%2Fblah+blah%2F"); + final URI uri2 = + new URIBuilder("https://somehost.com/stuff") + .addParameter("client_id", "1234567890") + .addParameter("redirect_uri", "https://somehost.com/blah blah/") + .build(); assertThat(uri2).isEqualTo(uri1); } @@ -251,15 +285,14 @@ private void testUriBuilder( List arguments, URIBuilder uriBuilder, String baseUrl, - String expectedUriArgs) throws URISyntaxException { + String expectedUriArgs) + throws URISyntaxException { for (LookupArg arg : arguments) { uriBuilder.addParameter(arg.getArgName(), arg.getArgValue()); } URI uri = uriBuilder.build(); - System.out.println(uri); assertThat(uri.toString()).isEqualTo(baseUrl + expectedUriArgs); } } - diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtilsTest.java b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtilsTest.java similarity index 53% rename from src/test/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtilsTest.java rename to flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtilsTest.java index d96237f2..a76dee6f 100644 --- a/src/test/java/com/getindata/connectors/http/internal/utils/uri/URLEncodedUtilsTest.java +++ b/flink-connector-http/src/test/java/org/apache/flink/connector/http/utils/uri/URLEncodedUtilsTest.java @@ -1,4 +1,24 @@ -package com.getindata.connectors.http.internal.utils.uri; +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.flink.connector.http.utils.uri; + +import org.junit.jupiter.api.Test; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -6,18 +26,17 @@ import java.util.Collections; import java.util.List; -import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +/** test for {@link URLEncodedUtils}. */ class URLEncodedUtilsTest { @Test public void testFormatSegments() { String segments = - URLEncodedUtils.formatSegments( - List.of("segmentOne", "segmentTwo", "segmentThree"), - StandardCharsets.UTF_8 - ); + URLEncodedUtils.formatSegments( + List.of("segmentOne", "segmentTwo", "segmentThree"), + StandardCharsets.UTF_8); assertThat(segments).isEqualTo("/segmentOne/segmentTwo/segmentThree"); } @@ -25,7 +44,7 @@ public void testFormatSegments() { @Test public void testFormatEmptySegments() { String segments = - URLEncodedUtils.formatSegments(Collections.emptyList(), StandardCharsets.UTF_8); + URLEncodedUtils.formatSegments(Collections.emptyList(), StandardCharsets.UTF_8); assertThat(segments).isEqualTo(""); } @@ -35,24 +54,21 @@ public void testFormatNullSegments() { List segmentList = new ArrayList<>(); segmentList.add(null); - String segments = - URLEncodedUtils.formatSegments(segmentList, StandardCharsets.UTF_8); + String segments = URLEncodedUtils.formatSegments(segmentList, StandardCharsets.UTF_8); assertThat(segments).isEqualTo("/null"); } @Test public void testNullParse() { - List components = - URLEncodedUtils.parse(null, StandardCharsets.UTF_8); + List components = URLEncodedUtils.parse(null, StandardCharsets.UTF_8); assertThat(components).isEmpty(); } @Test public void testBlankParse() { - List components = - URLEncodedUtils.parse(" ", StandardCharsets.UTF_8); + List components = URLEncodedUtils.parse(" ", StandardCharsets.UTF_8); assertThat(components).isEmpty(); } @@ -60,7 +76,7 @@ public void testBlankParse() { @Test public void testParse() { List components = - URLEncodedUtils.parse("Hello Me=val1", StandardCharsets.UTF_8); + URLEncodedUtils.parse("Hello Me=val1", StandardCharsets.UTF_8); assertThat(components).hasSize(1); assertThat(components.get(0)).isEqualTo(new NameValuePair("Hello Me", "val1")); @@ -92,20 +108,17 @@ public void testSplitEmptySegments() { @Test public void testParseSegments() { - List segments = URLEncodedUtils.parsePathSegments( - "/hello//%world", - StandardCharsets.UTF_8 - ); + List segments = + URLEncodedUtils.parsePathSegments("/hello//%world", StandardCharsets.UTF_8); assertThat(segments).isEqualTo(List.of("hello", "", "%world")); } @Test public void testParseSegmentsComplex() { - List segments = URLEncodedUtils.parsePathSegments( - "/hello//world?q=what+%25+is+in+HTTP", - StandardCharsets.UTF_8 - ); + List segments = + URLEncodedUtils.parsePathSegments( + "/hello//world?q=what+%25+is+in+HTTP", StandardCharsets.UTF_8); assertThat(segments).isEqualTo(List.of("hello", "", "world?q=what+%+is+in+HTTP")); } diff --git a/flink-connector-http/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory b/flink-connector-http/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory new file mode 100644 index 00000000..367a6f23 --- /dev/null +++ b/flink-connector-http/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory @@ -0,0 +1,24 @@ +# +# /* +# * Licensed to the Apache Software Foundation (ASF) under one +# * or more contributor license agreements. See the NOTICE file +# * distributed with this work for additional information +# * regarding copyright ownership. The ASF licenses this file +# * to you under the Apache License, Version 2.0 (the +# * "License"); you may not use this file except in compliance +# * with the License. You may obtain a copy of the License at +# * +# * http://www.apache.org/licenses/LICENSE-2.0 +# * +# * Unless required by applicable law or agreed to in writing, software +# * distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. +# */ +# + +org.apache.flink.connector.http.TestPostRequestCallbackFactory +org.apache.flink.connector.http.TestLookupPostRequestCallbackFactory +org.apache.flink.connector.http.table.lookup.querycreators.CustomFormatFactory +org.apache.flink.connector.http.table.lookup.querycreators.CustomJsonFormatFactory \ No newline at end of file diff --git a/src/test/resources/auth/AuthResult.json b/flink-connector-http/src/test/resources/auth/AuthResult.json similarity index 100% rename from src/test/resources/auth/AuthResult.json rename to flink-connector-http/src/test/resources/auth/AuthResult.json diff --git a/src/test/resources/http-array-result-with-nulls/HttpResult.json b/flink-connector-http/src/test/resources/http-array-result-with-nulls/HttpResult.json similarity index 100% rename from src/test/resources/http-array-result-with-nulls/HttpResult.json rename to flink-connector-http/src/test/resources/http-array-result-with-nulls/HttpResult.json diff --git a/src/test/resources/http-array-result/HttpResult.json b/flink-connector-http/src/test/resources/http-array-result/HttpResult.json similarity index 100% rename from src/test/resources/http-array-result/HttpResult.json rename to flink-connector-http/src/test/resources/http-array-result/HttpResult.json diff --git a/src/test/resources/http/HttpResult.json b/flink-connector-http/src/test/resources/http/HttpResult.json similarity index 100% rename from src/test/resources/http/HttpResult.json rename to flink-connector-http/src/test/resources/http/HttpResult.json diff --git a/src/test/resources/json/sink/allInOneBatch.txt b/flink-connector-http/src/test/resources/json/sink/allInOneBatch.txt similarity index 100% rename from src/test/resources/json/sink/allInOneBatch.txt rename to flink-connector-http/src/test/resources/json/sink/allInOneBatch.txt diff --git a/src/test/resources/json/sink/fourSingleEventBatches.txt b/flink-connector-http/src/test/resources/json/sink/fourSingleEventBatches.txt similarity index 100% rename from src/test/resources/json/sink/fourSingleEventBatches.txt rename to flink-connector-http/src/test/resources/json/sink/fourSingleEventBatches.txt diff --git a/src/test/resources/json/sink/threeBatches.txt b/flink-connector-http/src/test/resources/json/sink/threeBatches.txt similarity index 100% rename from src/test/resources/json/sink/threeBatches.txt rename to flink-connector-http/src/test/resources/json/sink/threeBatches.txt diff --git a/src/test/resources/json/sink/twoBatches.txt b/flink-connector-http/src/test/resources/json/sink/twoBatches.txt similarity index 100% rename from src/test/resources/json/sink/twoBatches.txt rename to flink-connector-http/src/test/resources/json/sink/twoBatches.txt diff --git a/src/test/resources/security/certs/ca.crt b/flink-connector-http/src/test/resources/security/certs/ca.crt similarity index 100% rename from src/test/resources/security/certs/ca.crt rename to flink-connector-http/src/test/resources/security/certs/ca.crt diff --git a/src/test/resources/security/certs/ca.key b/flink-connector-http/src/test/resources/security/certs/ca.key similarity index 100% rename from src/test/resources/security/certs/ca.key rename to flink-connector-http/src/test/resources/security/certs/ca.key diff --git a/src/test/resources/security/certs/ca.pass.key b/flink-connector-http/src/test/resources/security/certs/ca.pass.key similarity index 100% rename from src/test/resources/security/certs/ca.pass.key rename to flink-connector-http/src/test/resources/security/certs/ca.pass.key diff --git a/src/test/resources/security/certs/ca.srl b/flink-connector-http/src/test/resources/security/certs/ca.srl similarity index 100% rename from src/test/resources/security/certs/ca.srl rename to flink-connector-http/src/test/resources/security/certs/ca.srl diff --git a/src/test/resources/security/certs/ca_server_bundle.cert.pem b/flink-connector-http/src/test/resources/security/certs/ca_server_bundle.cert.pem similarity index 100% rename from src/test/resources/security/certs/ca_server_bundle.cert.pem rename to flink-connector-http/src/test/resources/security/certs/ca_server_bundle.cert.pem diff --git a/src/test/resources/security/certs/client.crt b/flink-connector-http/src/test/resources/security/certs/client.crt similarity index 100% rename from src/test/resources/security/certs/client.crt rename to flink-connector-http/src/test/resources/security/certs/client.crt diff --git a/src/test/resources/security/certs/client.csr b/flink-connector-http/src/test/resources/security/certs/client.csr similarity index 100% rename from src/test/resources/security/certs/client.csr rename to flink-connector-http/src/test/resources/security/certs/client.csr diff --git a/src/test/resources/security/certs/client.key b/flink-connector-http/src/test/resources/security/certs/client.key similarity index 100% rename from src/test/resources/security/certs/client.key rename to flink-connector-http/src/test/resources/security/certs/client.key diff --git a/src/test/resources/security/certs/client.pass.key b/flink-connector-http/src/test/resources/security/certs/client.pass.key similarity index 100% rename from src/test/resources/security/certs/client.pass.key rename to flink-connector-http/src/test/resources/security/certs/client.pass.key diff --git a/src/test/resources/security/certs/clientPrivateKey.der b/flink-connector-http/src/test/resources/security/certs/clientPrivateKey.der similarity index 100% rename from src/test/resources/security/certs/clientPrivateKey.der rename to flink-connector-http/src/test/resources/security/certs/clientPrivateKey.der diff --git a/src/test/resources/security/certs/clientPrivateKey.pem b/flink-connector-http/src/test/resources/security/certs/clientPrivateKey.pem similarity index 100% rename from src/test/resources/security/certs/clientPrivateKey.pem rename to flink-connector-http/src/test/resources/security/certs/clientPrivateKey.pem diff --git a/src/test/resources/security/certs/client_keyStore.p12 b/flink-connector-http/src/test/resources/security/certs/client_keyStore.p12 similarity index 100% rename from src/test/resources/security/certs/client_keyStore.p12 rename to flink-connector-http/src/test/resources/security/certs/client_keyStore.p12 diff --git a/src/test/resources/security/certs/server.crt b/flink-connector-http/src/test/resources/security/certs/server.crt similarity index 100% rename from src/test/resources/security/certs/server.crt rename to flink-connector-http/src/test/resources/security/certs/server.crt diff --git a/src/test/resources/security/certs/server.csr b/flink-connector-http/src/test/resources/security/certs/server.csr similarity index 100% rename from src/test/resources/security/certs/server.csr rename to flink-connector-http/src/test/resources/security/certs/server.csr diff --git a/src/test/resources/security/certs/server.key b/flink-connector-http/src/test/resources/security/certs/server.key similarity index 100% rename from src/test/resources/security/certs/server.key rename to flink-connector-http/src/test/resources/security/certs/server.key diff --git a/src/test/resources/security/certs/server.pass.key b/flink-connector-http/src/test/resources/security/certs/server.pass.key similarity index 100% rename from src/test/resources/security/certs/server.pass.key rename to flink-connector-http/src/test/resources/security/certs/server.pass.key diff --git a/src/test/resources/security/certs/serverKeyStore.jks b/flink-connector-http/src/test/resources/security/certs/serverKeyStore.jks similarity index 100% rename from src/test/resources/security/certs/serverKeyStore.jks rename to flink-connector-http/src/test/resources/security/certs/serverKeyStore.jks diff --git a/src/test/resources/security/certs/serverTrustStore.jks b/flink-connector-http/src/test/resources/security/certs/serverTrustStore.jks similarity index 100% rename from src/test/resources/security/certs/serverTrustStore.jks rename to flink-connector-http/src/test/resources/security/certs/serverTrustStore.jks diff --git a/flink-sql-connector-http/pom.xml b/flink-sql-connector-http/pom.xml new file mode 100644 index 00000000..3992cb4b --- /dev/null +++ b/flink-sql-connector-http/pom.xml @@ -0,0 +1,147 @@ + + + + + 4.0.0 + + org.apache.flink + flink-connector-http-parent + 1.0-SNAPSHOT + + + flink-sql-connector-http + Flink : Connectors : SQL : http + 1.0-SNAPSHOT + + jar + + + true + + + + + org.apache.flink + flink-connector-http + 1.0-SNAPSHOT + + + org.apache.flink + flink-test-utils + ${flink.version} + test + + + org.xerial.snappy + snappy-java + + + org.apache.commons + commons-lang3 + + + + + org.junit.jupiter + junit-jupiter-api + ${junit5.version} + test + + + org.apache.flink + flink-table-common + test + + + + + + + org.apache.maven.plugins + maven-shade-plugin + + + shade-flink + package + + shade + + + + + org.apache.flink:flink-connector-http + io.vavr:* + io.github.resilience4j:* + + + org.apache.flink:force-shading + io.netty:netty + + + + + + + org.apache.flink:flink-connector-http + io.vavr:* + io.github.resilience4j:* + + + + + + + + + + io.vavr + org.apache.flink.http.shaded.io.vavr + + + io.github.resilience4j + org.apache.flink.http.shaded.io.github.resilience4j + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + analyze-deps + + analyze + + verify + + org.apache.flink:flink-connector-http + + + + + + + + \ No newline at end of file diff --git a/flink-sql-connector-http/src/main/resources/META-INF/NOTICE b/flink-sql-connector-http/src/main/resources/META-INF/NOTICE new file mode 100644 index 00000000..8edd69a6 --- /dev/null +++ b/flink-sql-connector-http/src/main/resources/META-INF/NOTICE @@ -0,0 +1,12 @@ +flink-sql-connector-http +Copyright 2025 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt) + +- io.vavr:vavr-match:0.10.2 +- io.vavr:vavr:0.10.2 +- io.github.resilience4j:resilience4j-retry:1.7.1 +- io.github.resilience4j:resilience4j-core:1.7.1 diff --git a/flink-sql-connector-http/src/test/java/org/apache/flink/connector/http/PackagingITCase.java b/flink-sql-connector-http/src/test/java/org/apache/flink/connector/http/PackagingITCase.java new file mode 100644 index 00000000..24fcd621 --- /dev/null +++ b/flink-sql-connector-http/src/test/java/org/apache/flink/connector/http/PackagingITCase.java @@ -0,0 +1,42 @@ +/* + * + * * Licensed to the Apache Software Foundation (ASF) under one + * * or more contributor license agreements. See the NOTICE file + * * distributed with this work for additional information + * * regarding copyright ownership. The ASF licenses this file + * * to you under the Apache License, Version 2.0 (the + * * "License"); you may not use this file except in compliance + * * with the License. You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package org.apache.flink.connector.http; + +import org.apache.flink.packaging.PackagingTestUtils; +import org.apache.flink.table.factories.Factory; +import org.apache.flink.test.resources.ResourceTestUtils; + +import org.junit.jupiter.api.Test; + +import java.nio.file.Path; +import java.util.Arrays; + +class PackagingITCase { + + @Test + void testPackaging() throws Exception { + final Path jar = ResourceTestUtils.getResource(".*/flink-sql-connector-http[^/]*\\.jar"); + + PackagingTestUtils.assertJarContainsOnlyFilesMatching( + jar, Arrays.asList("org/apache/flink/", "META-INF/")); + PackagingTestUtils.assertJarContainsServiceEntry(jar, Factory.class); + } +} diff --git a/pom.xml b/pom.xml index 8e32e4ef..29624cbb 100644 --- a/pom.xml +++ b/pom.xml @@ -1,3 +1,4 @@ + - 4.0.0 - - com.getindata - flink-http-connector - 0.20.0-SNAPSHOT - jar - - flink-http-connector - The HTTP TableLookup connector that allows for pulling data from external system via HTTP GET method and HTTP Sink that allows for sending data to external system via HTTP requests. The goal for HTTP TableLookup connector was to use it in Flink SQL statement as a standard table that can be later joined with other stream using pure SQL Flink. - https://github.com/getindata/flink-http-connector - - - - The Apache License, Version 2.0 - http://www.apache.org/licenses/LICENSE-2.0.txt - - - - - - GetInData - office@getindata.com - GetInData - https://getindata.com - - - - - scm:git:git://github.com/getindata/flink-http-connector - scm:git:ssh://github.com/getindata/flink-http-connector - https://github.com/getindata/flink-http-connector/tree/main - - - - - ossrh - https://s01.oss.sonatype.org/content/repositories/snapshots - - - ossrh - https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ - - - - - UTF-8 - - - - 1.18.1 - - 11 - 2.12 - ${target.java.version} - ${target.java.version} - 1.18.22 - 2.18.1 - 5.10.1 - ${junit5.version} - 3.21.0 - 4.0.0 - 2.27.2 - 0.8.12 - 3.1.1 - 4.6.1 - 1.7.1 - 2.0.17 - - - - - apache.snapshots - Apache Development Snapshot Repository - https://repository.apache.org/content/repositories/snapshots/ - - false - - - true - - - - - - - - - org.apache.flink - flink-java - ${flink.version} - provided - - - org.apache.flink - flink-clients - ${flink.version} - provided - - - - org.slf4j - slf4j-api - ${slf4j.version} - - - - org.slf4j - slf4j-simple - ${slf4j.version} - test - - - - org.apache.flink - flink-table-api-java-bridge - ${flink.version} - provided - - - - org.apache.flink - flink-connector-base - ${flink.version} - provided - - - - org.apache.flink - flink-format-common - ${flink.version} - provided - - - - org.apache.flink - flink-json - ${flink.version} - provided - - - - com.fasterxml.jackson.core - jackson-core - ${jackson.version} - - - - com.fasterxml.jackson.core - jackson-databind - ${jackson.version} - - - - org.projectlombok - lombok - ${lombok.version} - provided - - - - io.github.resilience4j - resilience4j-retry - ${resilence4j.version} - - - - - org.apache.httpcomponents - httpclient - 4.5.13 - test - - - - org.apache.flink - flink-test-utils - ${flink.version} - test - - - - org.apache.flink - flink-table-test-utils + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + + + org.apache.flink + flink-connector-parent + 1.0.0 + + + flink-connector-http + flink-sql-connector-http + + + 4.0.0 + + flink-connector-http-parent + Flink : Connectors : http : Parent + 1.0-SNAPSHOT + pom + + https://github.com/apache/flink-connector-http + + + + The Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + + + + 2025 + + + https://github.com/apache/flink-connector-http + git@github.com:apache/flink-connector-http.git + + scm:git:https://gitbox.apache.org/repos/asf/flink-connector-http.git + + + + + + ossrh + https://s01.oss.sonatype.org/content/repositories/snapshots + + + ossrh + https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/ + + + + + UTF-8 + + + + 1.20.0 + 20.0 + 2.18.2 + 11 + 2.12 + 2.12.17 + ${target.java.version} + ${target.java.version} + 1.18.38 + 2.18.1 + 5.10.1 + ${junit5.version} + 3.23.1 + flink-connector-http-parent + + 5.2.0 + 5.2.0 + + 1.14.17 + 2.27.2 + 3.1.1 + 1.7.1 + false + 1.17.0 + + 1.7.36 + + false + 2.12.19 + 2.12.19 + + + -XX:+UseG1GC -Xms256m -XX:+IgnoreUnrecognizedVMOptions + ${flink.connector.module.config} + + + + + + apache.snapshots + Apache Development Snapshot Repository + https://repository.apache.org/content/repositories/snapshots/ + + false + + + true + + + + + + + + + + + org.apache.flink + flink-java ${flink.version} + + + + com.esotericsoftware.kryo + kryo + + + + + + org.apache.flink + flink-annotations + ${flink.version} + + + + org.apache.flink + flink-metrics-core + ${flink.version} + + + + org.apache.flink + flink-table-planner_${scala.binary.version} + ${flink.version} + + + + org.scala-lang + scala-library + ${scala-library.version} + + + + org.apache.flink + flink-clients + ${flink.version} + + + + org.apache.flink + flink-core + ${flink.version} + + + + org.apache.commons + commons-lang3 + + + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + org.apache.flink + flink-table-runtime + ${flink.version} + + + + org.apache.flink + flink-json + ${flink.version} + + + + org.apache.flink + flink-core-api + ${flink.version} + + + + + + + org.apache.flink + flink-table-api-java-bridge + ${flink.version} + + + + org.apache.flink + flink-table-api-java + ${flink.version} + + + + org.apache.flink + flink-table-common + ${flink.version} + + + + org.projectlombok + lombok + ${lombok.version} + + + + com.google.code.findbugs + jsr305 + 1.3.9 + + + + + io.vavr + vavr + 0.10.2 + compile + + + + io.github.resilience4j + resilience4j-retry + + + + org.slf4j + slf4j-api + + + ${resilence4j.version} + + + + io.github.resilience4j + resilience4j-core + + + org.slf4j + slf4j-api + + + ${resilence4j.version} + + + + + + + + org.apache.flink + flink-connector-base + ${flink.version} + provided + + + org.apache.flink + flink-connector-base + ${flink.version} + test-jar test - - - - org.apache.flink - flink-table-common - ${flink.version} - test-jar - test - - - - org.apache.flink - flink-connector-base - ${flink.version} - test-jar - test - - - - org.apache.flink - flink-test-utils - ${flink.version} - test - - - - org.apache.flink - flink-streaming-java - ${flink.version} - test - - - - org.apache.flink - flink-runtime - ${flink.version} - test - - - - org.apache.flink - flink-runtime-web - ${flink.version} - test - - - - org.junit.jupiter - junit-jupiter-api - ${junit.jupiter.version} - test - - - - org.junit.vintage - junit-vintage-engine - ${junit5.version} - test - - - - org.assertj - assertj-core - ${assertj.core.version} - test - - - - org.mockito - mockito-junit-jupiter - ${mockito.version} - test - - - - com.github.tomakehurst - wiremock - ${wiremock.version} - test - - - - org.mockito - mockito-inline - ${mockito-inline.version} - test - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - ${target.java.version} - ${target.java.version} - - - - - org.apache.maven.plugins - maven-surefire-plugin - 3.0.0-M5 - - - - org.apache.maven.plugins - maven-checkstyle-plugin - 3.1.2 - - - checkstyle - validate - - check - - - dev/checkstyle.xml - UTF-8 - true - true - true - true - - - - - - - org.jacoco - jacoco-maven-plugin - ${jacoco.plugin.version} - - - **/HttpLookupConnectorOptions.class - **/Slf4jHttpPostRequestCallback.class - **/SelfSignedTrustManager.class - org/apache/calcite** - org/apache/flink** - - - - - jacoco-initialize - - prepare-agent - - - - jacoco-report - test - - report - - - - jacoco-check - - check - - test - - - - - - LINE - COVEREDRATIO - 90% - - - BRANCH - COVEREDRATIO - 90% - - - METHOD - COVEREDRATIO - 80% - - - CLASS - MISSEDCOUNT - 10% - - - - - - - - - - - - - org.apache.maven.plugins - maven-shade-plugin - ${maven.shade.plugin.version} - - - - package - - shade - - - - - org.apache.flink:force-shading - com.google.code.findbugs:jsr305 - org.slf4j:* - org.apache.logging.log4j:* - - - - - - *:* - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - 3.3.0 - - - parse-version - - parse-version - - - - - - - - - - - - org.eclipse.m2e - lifecycle-mapping - 1.0.0 - - - - - - org.apache.maven.plugins - maven-shade-plugin - [3.1.1,) - - shade - - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - [3.1,) - - testCompile - compile - - - - - - - - - - - - - - - - - release - - - - org.apache.maven.plugins - maven-javadoc-plugin - 3.6.3 - - - https://nightlies.apache.org/flink/flink-docs-release-1.15/api/java/ - - - - - attach-javadocs - - jar - - - - - - - org.apache.maven.plugins - maven-source-plugin - 3.3.0 - - - attach-sources - - jar-no-fork - - - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.13 - true - - ossrh - https://s01.oss.sonatype.org/ - true - - - - - org.apache.maven.plugins - maven-gpg-plugin - 3.1.0 - - - --pinentry-mode - loopback - - - - - sign-artifacts - verify - - sign - - - - - - - - - - bump-patch - - - bumpPatch - - - - - - org.codehaus.mojo - versions-maven-plugin - - - - - set - - validate - - ${parsedVersion.majorVersion}.${parsedVersion.minorVersion}.${parsedVersion.nextIncrementalVersion}-SNAPSHOT - - - - - - - - - bump-minor - - - bumpMinor - - - - - - org.codehaus.mojo - versions-maven-plugin - - - - - set - - validate - - ${parsedVersion.majorVersion}.${parsedVersion.nextMinorVersion}.0-SNAPSHOT - - - - - - - - - bump-major - - - bumpMajor - - - + + + + org.apache.flink + flink-table-common + ${flink.version} + test-jar + test + + + + + org.apache.flink + flink-test-utils-junit + ${flink.version} + + + + com.fasterxml.jackson.core + jackson-annotations + + + org.slf4j + slf4j-api + + + org.apache.commons + commons-lang3 + + + org.apache.commons + commons-compress + + + + net.bytebuddy + byte-buddy + + + + + + org.apache.flink + flink-streaming-java + ${flink.version} + provided + + + + org.assertj + assertj-core + ${assertj.core.version} + + + + net.bytebuddy + byte-buddy + + + + + + org.junit.jupiter + junit-jupiter-api + ${junit5.version} + + + + org.junit.jupiter + junit-jupiter-params + ${junit5.version} + + + + com.google.guava + guava + 27.0-jre + + + + net.minidev + json-smart + 2.5.2 + + + + net.bytebuddy + byte-buddy + ${bytebuddy.version} + + + + org.mockito + mockito-junit-jupiter + ${mockito.version} + + + + org.junit.jupiter + junit-jupiter-api + + + net.bytebuddy + byte-buddy + + + + + + org.mockito + mockito-core + ${mockito.version} + + + + org.objenesis + objenesis + + + + + + org.mockito + mockito-inline + ${mockito-inline.version} + + + + com.github.tomakehurst + wiremock + ${wiremock.version} + + + + com.google.guava + guava + + + commons-io + commons + + + commons-io + commons-io + + + org.slf4j + slf4j-api + + + net.minidev + json-smart + + + org.apache.commons + commons-lang3 + + + org.ow2.asm + asm + + + + + + + + - - org.codehaus.mojo - versions-maven-plugin - - - - - set - - validate + + org.codehaus.mojo + exec-maven-plugin + false + + + org.apache.flink + flink-ci-tools + ${flink.version} + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.0.0-M5 - ${parsedVersion.nextMajorVersion}.0.0-SNAPSHOT + + + false + + 0${surefire.forkNumber} + + $${surefire.forkNumber} + US + en + ${project.basedir} + true + + -Xms256m -Xmx2048m -XX:+UseG1GC - - - + + + + default-test + test + + test + + + + ${test.unit.pattern} + + + ${additionalExcludes} + + + 1 + ${flink.surefire.baseArgLine} -Xmx${flink.XmxUnitTest} + + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + + io.github.zentol.japicmp + japicmp-maven-plugin + + + + org.apache.rat + apache-rat-plugin + false + + + org.apache.maven.plugins + maven-checkstyle-plugin + + + com.diffplug.spotless + spotless-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + + + org.apache.maven.plugins + maven-shade-plugin + + + + org.commonjava.maven.plugins + directory-maven-plugin + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.7.1 + + + analyze-deps + + analyze + + verify + + true + true + + + org.mockito:mockito-inline:jar:4.6.1 + + + + + + + + org.apache.maven.plugins + maven-enforcer-plugin + + + enforce-banned-deps + + enforce + + + + + + com.google.guava:guava + + org.powermock + + + com.google.guava:guava:*:*:test + + + + true + + + + - - - + diff --git a/src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java b/src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java deleted file mode 100644 index 3f9975ef..00000000 --- a/src/main/java/com/getindata/connectors/http/HttpPostRequestCallback.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.getindata.connectors.http; - -import java.io.Serializable; -import java.net.http.HttpResponse; -import java.util.Map; - -/** - * An interface for post request callback action, processing a response and its respective request. - * - *

One can customize the behaviour of such a callback by implementing both - * {@link HttpPostRequestCallback} and {@link HttpPostRequestCallbackFactory}. - * - * @param type of the HTTP request wrapper - */ -public interface HttpPostRequestCallback extends Serializable { - /** - * Process HTTP request and the matching response. - * @param response HTTP response - * @param requestEntry request's payload - * @param endpointUrl the URL of the endpoint - * @param headerMap mapping of header names to header values - */ - void call( - HttpResponse response, - RequestT requestEntry, - String endpointUrl, - Map headerMap - ); -} diff --git a/src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java b/src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java deleted file mode 100644 index 7e722c20..00000000 --- a/src/main/java/com/getindata/connectors/http/HttpPostRequestCallbackFactory.java +++ /dev/null @@ -1,67 +0,0 @@ -package com.getindata.connectors.http; - -import org.apache.flink.table.factories.Factory; - -import com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSource; -import com.getindata.connectors.http.internal.table.sink.HttpDynamicSink; - -/** - * The {@link Factory} that dynamically creates and injects {@link HttpPostRequestCallback} to - * {@link HttpDynamicSink} and {@link HttpLookupTableSource}. - * - *

Custom implementations of {@link HttpPostRequestCallbackFactory} can be registered along - * other factories in - *

resources/META-INF/services/org.apache.flink.table.factories.Factory
- * file and then referenced by their identifiers in: - *
    - *
  • - * The HttpSink DDL property field gid.connector.http.sink.request-callback - * for HTTP sink. - *
  • - *
  • - * The Http lookup DDL property field gid.connector.http.source.lookup.request-callback - * for HTTP lookup. - *
  • - *
- *
- * - *

The following example shows the minimum Table API example to create a {@link HttpDynamicSink} - * that uses a custom callback created by a factory that returns my-callback as its - * identifier. - * - *

{@code
- * CREATE TABLE http (
- *   id bigint,
- *   some_field string
- * ) with (
- *   'connector' = 'http-sink'
- *   'url' = 'http://example.com/myendpoint'
- *   'format' = 'json',
- *   'gid.connector.http.sink.request-callback' = 'my-callback'
- * )
- * }
- * - *

The following example shows the minimum Table API example to create a - * {@link HttpLookupTableSource} that uses a custom callback created by a factory that - * returns my-callback as its identifier. - * - *

{@code
- * CREATE TABLE httplookup (
- *   id bigint
- * ) with (
- *   'connector' = 'rest-lookup',
- *   'url' = 'http://example.com/myendpoint',
- *   'format' = 'json',
- *   'gid.connector.http.source.lookup.request-callback' = 'my-callback'
- * )
- * }
- * - * @param type of the HTTP request wrapper - */ - -public interface HttpPostRequestCallbackFactory extends Factory { - /** - * @return {@link HttpPostRequestCallback} custom request callback instance - */ - HttpPostRequestCallback createHttpPostRequestCallback(); -} diff --git a/src/main/java/com/getindata/connectors/http/HttpSink.java b/src/main/java/com/getindata/connectors/http/HttpSink.java deleted file mode 100644 index 23faf100..00000000 --- a/src/main/java/com/getindata/connectors/http/HttpSink.java +++ /dev/null @@ -1,74 +0,0 @@ -package com.getindata.connectors.http; - -import java.util.Properties; - -import org.apache.flink.annotation.PublicEvolving; -import org.apache.flink.connector.base.sink.writer.ElementConverter; - -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.SinkHttpClientBuilder; -import com.getindata.connectors.http.internal.sink.HttpSinkInternal; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; - -/** - * A public implementation for {@code HttpSink} that performs async requests against a specified - * HTTP endpoint using the buffering protocol specified in - * {@link org.apache.flink.connector.base.sink.AsyncSinkBase}. - * - *

- * To create a new instance of this class use {@link HttpSinkBuilder}. An example would be: - *

{@code
- * HttpSink httpSink =
- *     HttpSink.builder()
- *             .setEndpointUrl("http://example.com/myendpoint")
- *             .setElementConverter(
- *                 (s, _context) -> new HttpSinkRequestEntry("POST", "text/plain",
- *                 s.getBytes(StandardCharsets.UTF_8)))
- *             .build();
- * }
- * - * @param type of the elements that should be sent through HTTP request. - */ -@PublicEvolving -public class HttpSink extends HttpSinkInternal { - - HttpSink( - ElementConverter elementConverter, - int maxBatchSize, - int maxInFlightRequests, - int maxBufferedRequests, - long maxBatchSizeInBytes, - long maxTimeInBufferMS, - long maxRecordSizeInBytes, - String endpointUrl, - HttpPostRequestCallback httpPostRequestCallback, - HeaderPreprocessor headerPreprocessor, - SinkHttpClientBuilder sinkHttpClientBuilder, - Properties properties) { - - super(elementConverter, - maxBatchSize, - maxInFlightRequests, - maxBufferedRequests, - maxBatchSizeInBytes, - maxTimeInBufferMS, - maxRecordSizeInBytes, - endpointUrl, - httpPostRequestCallback, - headerPreprocessor, - sinkHttpClientBuilder, - properties - ); - } - - /** - * Create a {@link HttpSinkBuilder} constructing a new {@link HttpSink}. - * - * @param type of the elements that should be sent through HTTP request - * @return {@link HttpSinkBuilder} - */ - public static HttpSinkBuilder builder() { - return new HttpSinkBuilder<>(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/HttpStatusCodeValidationFailedException.java b/src/main/java/com/getindata/connectors/http/HttpStatusCodeValidationFailedException.java deleted file mode 100644 index cad25b29..00000000 --- a/src/main/java/com/getindata/connectors/http/HttpStatusCodeValidationFailedException.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.getindata.connectors.http; - -import java.net.http.HttpResponse; - -import lombok.Getter; - -@Getter -public class HttpStatusCodeValidationFailedException extends Exception { - private final HttpResponse response; - - public HttpStatusCodeValidationFailedException(String message, HttpResponse response) { - super(message); - this.response = response; - } -} diff --git a/src/main/java/com/getindata/connectors/http/LookupArg.java b/src/main/java/com/getindata/connectors/http/LookupArg.java deleted file mode 100644 index 3dc7c66a..00000000 --- a/src/main/java/com/getindata/connectors/http/LookupArg.java +++ /dev/null @@ -1,22 +0,0 @@ -package com.getindata.connectors.http; - -import lombok.Data; -import lombok.RequiredArgsConstructor; - -/** - * Transfer object that contains single lookup argument (column name) and its value. - */ -@Data -@RequiredArgsConstructor -public class LookupArg { - - /** - * Lookup argument name. - */ - private final String argName; - - /** - * Lookup argument value. - */ - private final String argValue; -} diff --git a/src/main/java/com/getindata/connectors/http/LookupQueryCreator.java b/src/main/java/com/getindata/connectors/http/LookupQueryCreator.java deleted file mode 100644 index 1da57896..00000000 --- a/src/main/java/com/getindata/connectors/http/LookupQueryCreator.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http; - -import java.io.Serializable; - -import org.apache.flink.table.data.RowData; - -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; - -/** - * An interface for a creator of a lookup query in the Http Lookup Source (e.g., the query that - * gets appended as query parameters to the URI in GET request or supplied as the payload of a - * body-based request along with optional query parameters). - * - *

One can customize how those queries are built by implementing {@link LookupQueryCreator} and - * {@link LookupQueryCreatorFactory}. - */ -public interface LookupQueryCreator extends Serializable { - - /** - * Create a lookup query (like the query appended to path in GET request) - * out of the provided arguments. - * - * @param lookupDataRow a {@link RowData} containing request parameters. - * @return a lookup query. - */ - LookupQueryInfo createLookupQuery(RowData lookupDataRow); -} diff --git a/src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java b/src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java deleted file mode 100644 index bade9351..00000000 --- a/src/main/java/com/getindata/connectors/http/LookupQueryCreatorFactory.java +++ /dev/null @@ -1,49 +0,0 @@ -package com.getindata.connectors.http; - -import java.io.Serializable; - -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.apache.flink.table.factories.Factory; - -import com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSource; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; - -/** - * The {@link Factory} that dynamically creates and injects {@link LookupQueryCreator} to - * {@link HttpLookupTableSource}. - * - *

Custom implementations of {@link LookupQueryCreatorFactory} can be registered along other - * factories in

resources/META-INF.services/org.apache.flink.table.factories.Factory
- * file and then referenced by their identifiers in the HttpLookupSource DDL property field - * gid.connector.http.source.lookup.query-creator. - * - *

The following example shows the minimum Table API example to create a - * {@link HttpLookupTableSource} that uses a custom query creator created by a factory that returns - * my-query-creator as its identifier. - * - *

{@code
- * CREATE TABLE http (
- *   id bigint,
- *   some_field string
- * ) WITH (
- *   'connector' = 'rest-lookup',
- *   'format' = 'json',
- *   'url' = 'http://example.com/myendpoint',
- *   'gid.connector.http.source.lookup.query-creator' = 'my-query-creator'
- * )
- * }
- */ -public interface LookupQueryCreatorFactory extends Factory, Serializable { - - /** - * @param readableConfig readable config - * @param lookupRow lookup row - * @param dynamicTableFactoryContext context - * @return {@link LookupQueryCreator} custom lookup query creator instance - */ - LookupQueryCreator createLookupQueryCreator( - ReadableConfig readableConfig, - LookupRow lookupRow, - DynamicTableFactory.Context dynamicTableFactoryContext); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessor.java b/src/main/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessor.java deleted file mode 100644 index 8dd62023..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessor.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -/** - * This implementation of {@link HeaderPreprocessor} acts as a registry for all {@link - * HeaderValuePreprocessor} that should be applied on HTTP request. - */ -public class ComposeHeaderPreprocessor implements HeaderPreprocessor { - - /** - * Default, pass through header value preprocessor used whenever dedicated preprocessor for a - * given header does not exist. - */ - private static final HeaderValuePreprocessor DEFAULT_VALUE_PREPROCESSOR = rawValue -> rawValue; - - /** - * Map with {@link HeaderValuePreprocessor} to apply. - */ - private final Map valuePreprocessors; - - /** - * Creates a new instance of ComposeHeaderPreprocessor for provided {@link - * HeaderValuePreprocessor} map. - * - * @param valuePreprocessors map of {@link HeaderValuePreprocessor} that should be used for this - * processor. If null, then default, pass through header value - * processor will be used for every header. - */ - public ComposeHeaderPreprocessor(Map valuePreprocessors) { - this.valuePreprocessors = (valuePreprocessors == null) - ? Collections.emptyMap() - : new HashMap<>(valuePreprocessors); - } - - @Override - public String preprocessValueForHeader(String headerName, String headerRawValue) { - return valuePreprocessors - .getOrDefault(headerName, DEFAULT_VALUE_PREPROCESSOR) - .preprocessHeaderValue(headerRawValue); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/HeaderPreprocessor.java b/src/main/java/com/getindata/connectors/http/internal/HeaderPreprocessor.java deleted file mode 100644 index 24fcbf0e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/HeaderPreprocessor.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.io.Serializable; - -/** - * Interface for header preprocessing - */ -public interface HeaderPreprocessor extends Serializable { - - /** - * Preprocess value of a header.Preprocessing can change or validate header value. - * @param headerName header name which value should be preprocessed. - * @param headerRawValue header value to process. - * @return preprocessed header value. - */ - String preprocessValueForHeader(String headerName, String headerRawValue); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/HeaderValuePreprocessor.java b/src/main/java/com/getindata/connectors/http/internal/HeaderValuePreprocessor.java deleted file mode 100644 index 5704dddf..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/HeaderValuePreprocessor.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.io.Serializable; - -/** - * Processor interface which modifies header value based on implemented logic. - * An example would be calculation of Value of Authorization header. - */ -public interface HeaderValuePreprocessor extends Serializable { - - /** - * Modifies header rawValue according to the implemented logic. - * @param rawValue header original value to modify - * @return modified header value. - */ - String preprocessHeaderValue(String rawValue); - -} diff --git a/src/main/java/com/getindata/connectors/http/internal/OIDCAuthHeaderValuePreprocessor.java b/src/main/java/com/getindata/connectors/http/internal/OIDCAuthHeaderValuePreprocessor.java deleted file mode 100644 index 945f3ba1..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/OIDCAuthHeaderValuePreprocessor.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.net.http.HttpClient; -import java.time.Duration; -import java.util.Optional; - -import lombok.extern.slf4j.Slf4j; - -import com.getindata.connectors.http.internal.auth.OidcAccessTokenManager; - -/** - * Header processor for HTTP OIDC Authentication mechanism. - */ -@Slf4j -public class OIDCAuthHeaderValuePreprocessor implements HeaderValuePreprocessor { - - - private final String oidcAuthURL; - private final String oidcTokenRequest; - private Duration oidcExpiryReduction = Duration.ofSeconds(1); - /** - * Add the access token to the request using OidcAuth authenticate method that - * gives us a valid access token. - * @param oidcAuthURL OIDC token endpoint - * @param oidcTokenRequest OIDC Token Request - * @param oidcExpiryReduction OIDC token expiry reduction - */ - - public OIDCAuthHeaderValuePreprocessor(String oidcAuthURL, - String oidcTokenRequest, - Optional oidcExpiryReduction) { - this.oidcAuthURL = oidcAuthURL; - this.oidcTokenRequest = oidcTokenRequest; - if (oidcExpiryReduction.isPresent()) { - this.oidcExpiryReduction = oidcExpiryReduction.get(); - } - } - - @Override - public String preprocessHeaderValue(String rawValue) { - OidcAccessTokenManager auth = new OidcAccessTokenManager( - HttpClient.newBuilder().build(), - oidcTokenRequest, - oidcAuthURL, - oidcExpiryReduction - ); - // apply the OIDC authentication by adding the dynamically calculated header value. - return "BEARER " + auth.authenticate(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/PollingClient.java b/src/main/java/com/getindata/connectors/http/internal/PollingClient.java deleted file mode 100644 index 925e10b6..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/PollingClient.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.util.Collection; - -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.functions.FunctionContext; - -/** - * A client that is used to get enrichment data from external component. - */ -public interface PollingClient { - - /** - * Gets enrichment data from external component using provided lookup arguments. - * @param lookupRow A {@link RowData} containing request parameters. - * @return an optional result of data lookup. - */ - Collection pull(RowData lookupRow); - - /** - * Initialize the client. - * @param ctx function context - */ - void open(FunctionContext ctx); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/PollingClientFactory.java b/src/main/java/com/getindata/connectors/http/internal/PollingClientFactory.java deleted file mode 100644 index 7981558d..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/PollingClientFactory.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.io.Serializable; - -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.util.ConfigurationException; - -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConfig; - -public interface PollingClientFactory extends Serializable { - - PollingClient createPollClient( - HttpLookupConfig options, - DeserializationSchema schemaDecoder - ) throws ConfigurationException; -} diff --git a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClient.java b/src/main/java/com/getindata/connectors/http/internal/SinkHttpClient.java deleted file mode 100644 index d2922f4a..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClient.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import com.getindata.connectors.http.internal.sink.HttpSinkInternal; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.sink.HttpSinkWriter; - -/** - * An HTTP client that is used by {@link HttpSinkWriter} to send HTTP requests processed by {@link - * HttpSinkInternal}. - */ -public interface SinkHttpClient { - - /** - * Sends HTTP requests to an external web service. - * - * @param requestEntries a set of request entries that should be sent to the destination - * @param endpointUrl the URL of the endpoint - * @return the new {@link CompletableFuture} wrapping {@link SinkHttpClientResponse} that - * completes when all requests have been sent and returned their statuses - */ - CompletableFuture putRequests( - List requestEntries, - String endpointUrl - ); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientBuilder.java b/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientBuilder.java deleted file mode 100644 index 7422bc2e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientBuilder.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.io.Serializable; -import java.util.Properties; - -import org.apache.flink.annotation.PublicEvolving; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.sink.httpclient.RequestSubmitterFactory; - -/** - * Builder building {@link SinkHttpClient}. - */ -@PublicEvolving -public interface SinkHttpClientBuilder extends Serializable { - - // TODO Consider moving HttpPostRequestCallback and HeaderPreprocessor, RequestSubmitter to be a - // SinkHttpClientBuilder fields. This method is getting more and more arguments. - SinkHttpClient build( - Properties properties, - HttpPostRequestCallback httpPostRequestCallback, - HeaderPreprocessor headerPreprocessor, - RequestSubmitterFactory requestSubmitterFactory - - ); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientResponse.java b/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientResponse.java deleted file mode 100644 index b5637377..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/SinkHttpClientResponse.java +++ /dev/null @@ -1,31 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.util.List; - -import lombok.Data; -import lombok.NonNull; -import lombok.ToString; - -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; - -/** - * Data class holding {@link HttpSinkRequestEntry} instances that {@link SinkHttpClient} attempted - * to write, divided into two lists — successful and failed ones. - */ -@Data -@ToString -public class SinkHttpClientResponse { - - /** - * A list of successfully written requests. - */ - @NonNull - private final List successfulRequests; - - /** - * A list of requests that {@link SinkHttpClient} failed to write. - */ - @NonNull - private final List failedRequests; -} diff --git a/src/main/java/com/getindata/connectors/http/internal/config/ConfigException.java b/src/main/java/com/getindata/connectors/http/internal/config/ConfigException.java deleted file mode 100644 index 0a387ad8..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/config/ConfigException.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.getindata.connectors.http.internal.config; - -/** - * A Runtime exception throw when there is any issue with configuration properties for Http - * Connector. - */ -public class ConfigException extends RuntimeException { - - private static final long serialVersionUID = 1L; - - public ConfigException(String message) { - super(message); - } - - public ConfigException(String message, Throwable t) { - super(message, t); - } - - /** - * Creates an exception object using predefined exception message template: - * {@code Invalid value + (value) + for configuration + (property name) + (additional message) } - * @param name configuration property name. - * @param value configuration property value. - * @param message custom message appended to the end of exception message. - */ - public ConfigException(String name, Object value, String message) { - super("Invalid value " + value + " for configuration " + name + (message == null ? "" - : ": " + message)); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/config/HttpConnectorConfigConstants.java b/src/main/java/com/getindata/connectors/http/internal/config/HttpConnectorConfigConstants.java deleted file mode 100644 index fa47529f..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/config/HttpConnectorConfigConstants.java +++ /dev/null @@ -1,143 +0,0 @@ -package com.getindata.connectors.http.internal.config; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.experimental.UtilityClass; - -/** - * A dictionary class containing properties or properties prefixes for Http connector. - */ -@UtilityClass -@NoArgsConstructor(access = AccessLevel.NONE) -// TODO Change this name to HttpConnectorConfigProperties -public final class HttpConnectorConfigConstants { - - public static final String PROP_DELIM = ","; - - /** - * A property prefix for http connector. - */ - public static final String GID_CONNECTOR_HTTP = "gid.connector.http."; - private static final String SOURCE_LOOKUP_PREFIX = GID_CONNECTOR_HTTP + "source.lookup."; - - /** - * A property prefix for http connector header properties - */ - public static final String SINK_HEADER_PREFIX = GID_CONNECTOR_HTTP + "sink.header."; - - public static final String LOOKUP_SOURCE_HEADER_PREFIX = SOURCE_LOOKUP_PREFIX + "header."; - - public static final String OIDC_AUTH_TOKEN_REQUEST = GID_CONNECTOR_HTTP - + "security.oidc.token.request"; - - public static final String OIDC_AUTH_TOKEN_ENDPOINT_URL = GID_CONNECTOR_HTTP - + "security.oidc.token.endpoint.url"; - - public static final String OIDC_AUTH_TOKEN_EXPIRY_REDUCTION = GID_CONNECTOR_HTTP - + "security.oidc.token.expiry.reduction"; - /** - * Whether to use the raw value of the Authorization header. If set, it prevents - * the special treatment of the header for Basic Authentication, thus preserving the passed - * raw value. Defaults to false. - */ - public static final String LOOKUP_SOURCE_HEADER_USE_RAW = SOURCE_LOOKUP_PREFIX + "use-raw-authorization-header"; - - public static final String RESULT_TYPE = SOURCE_LOOKUP_PREFIX + "result-type"; - - // --------- Error code handling configuration --------- - public static final String HTTP_ERROR_SINK_CODE_WHITE_LIST = GID_CONNECTOR_HTTP + "sink.error.code.exclude"; - - public static final String HTTP_ERROR_SINK_CODES_LIST = GID_CONNECTOR_HTTP + "sink.error.code"; - // ----------------------------------------------------- - - public static final String SOURCE_LOOKUP_REQUEST_CALLBACK_IDENTIFIER = - SOURCE_LOOKUP_PREFIX + "request-callback"; - - public static final String SINK_REQUEST_CALLBACK_IDENTIFIER = - GID_CONNECTOR_HTTP + "sink.request-callback"; - - public static final String SOURCE_LOOKUP_QUERY_CREATOR_IDENTIFIER = - SOURCE_LOOKUP_PREFIX + "query-creator"; - - // -------------- HTTPS security settings -------------- - public static final String ALLOW_SELF_SIGNED = - GID_CONNECTOR_HTTP + "security.cert.server.allowSelfSigned"; - - public static final String SERVER_TRUSTED_CERT = GID_CONNECTOR_HTTP + "security.cert.server"; - - public static final String CLIENT_CERT = GID_CONNECTOR_HTTP + "security.cert.client"; - - public static final String CLIENT_PRIVATE_KEY = GID_CONNECTOR_HTTP + "security.key.client"; - - public static final String KEY_STORE_PATH = GID_CONNECTOR_HTTP - + "security.keystore.path"; - - public static final String KEY_STORE_PASSWORD = GID_CONNECTOR_HTTP - + "security.keystore.password"; - - public static final String KEY_STORE_TYPE = GID_CONNECTOR_HTTP - + "security.keystore.type"; - - // ----------------------------------------------------- - - // ------ HTTPS timeouts and thread pool settings ------ - - public static final String LOOKUP_HTTP_TIMEOUT_SECONDS = - SOURCE_LOOKUP_PREFIX + "request.timeout"; - - public static final String SOURCE_CONNECTION_TIMEOUT = - SOURCE_LOOKUP_PREFIX + "connection.timeout"; - - public static final String SOURCE_PROXY_HOST = - SOURCE_LOOKUP_PREFIX + "proxy.host"; - - public static final String SOURCE_PROXY_PORT = - SOURCE_LOOKUP_PREFIX + "proxy.port"; - - public static final String SOURCE_PROXY_USERNAME = - SOURCE_LOOKUP_PREFIX + "proxy.username"; - - public static final String SOURCE_PROXY_PASSWORD = - SOURCE_LOOKUP_PREFIX + "proxy.password"; - - public static final String SINK_HTTP_TIMEOUT_SECONDS = - GID_CONNECTOR_HTTP + "sink.request.timeout"; - - public static final String LOOKUP_HTTP_PULING_THREAD_POOL_SIZE = - SOURCE_LOOKUP_PREFIX + "request.thread-pool.size"; - - public static final String LOOKUP_HTTP_RESPONSE_THREAD_POOL_SIZE = - SOURCE_LOOKUP_PREFIX + "response.thread-pool.size"; - - public static final String SINK_HTTP_WRITER_THREAD_POOL_SIZE = - GID_CONNECTOR_HTTP + "sink.writer.thread-pool.size"; - - // ----------------------------------------------------- - - - // ------ Sink request submitter settings ------ - public static final String SINK_HTTP_REQUEST_MODE = - GID_CONNECTOR_HTTP + "sink.writer.request.mode"; - - public static final String SINK_HTTP_BATCH_REQUEST_SIZE = - GID_CONNECTOR_HTTP + "sink.request.batch.size"; - - // --------------------------------------------- - public static final String SOURCE_RETRY_SUCCESS_CODES = SOURCE_LOOKUP_PREFIX + "success-codes"; - public static final String SOURCE_RETRY_RETRY_CODES = SOURCE_LOOKUP_PREFIX + "retry-codes"; - public static final String SOURCE_IGNORE_RESPONSE_CODES = SOURCE_LOOKUP_PREFIX + "ignored-response-codes"; - - public static final String SOURCE_RETRY_STRATEGY_PREFIX = SOURCE_LOOKUP_PREFIX + "retry-strategy."; - public static final String SOURCE_RETRY_STRATEGY_TYPE = SOURCE_RETRY_STRATEGY_PREFIX + "type"; - - private static final String SOURCE_RETRY_FIXED_DELAY_PREFIX = SOURCE_RETRY_STRATEGY_PREFIX + "fixed-delay."; - public static final String SOURCE_RETRY_FIXED_DELAY_DELAY = SOURCE_RETRY_FIXED_DELAY_PREFIX + "delay"; - - private static final String SOURCE_RETRY_EXP_DELAY_PREFIX = SOURCE_RETRY_STRATEGY_PREFIX + "exponential-delay."; - public static final String SOURCE_RETRY_EXP_DELAY_INITIAL_BACKOFF = - SOURCE_RETRY_EXP_DELAY_PREFIX + "initial-backoff"; - public static final String SOURCE_RETRY_EXP_DELAY_MAX_BACKOFF = - SOURCE_RETRY_EXP_DELAY_PREFIX + "max-backoff"; - public static final String SOURCE_RETRY_EXP_DELAY_MULTIPLIER = - SOURCE_RETRY_EXP_DELAY_PREFIX + "backoff-multiplier"; -} diff --git a/src/main/java/com/getindata/connectors/http/internal/config/SinkRequestSubmitMode.java b/src/main/java/com/getindata/connectors/http/internal/config/SinkRequestSubmitMode.java deleted file mode 100644 index 9b1b86a3..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/config/SinkRequestSubmitMode.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.getindata.connectors.http.internal.config; - -public enum SinkRequestSubmitMode { - - SINGLE("single"), - BATCH("batch"); - - private final String mode; - - SinkRequestSubmitMode(String mode) { - this.mode = mode; - } - - public String getMode() { - return mode; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetry.java b/src/main/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetry.java deleted file mode 100644 index bdb424aa..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/retry/HttpClientWithRetry.java +++ /dev/null @@ -1,71 +0,0 @@ -package com.getindata.connectors.http.internal.retry; - -import java.io.IOException; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.util.function.Supplier; - -import io.github.resilience4j.retry.Retry; -import io.github.resilience4j.retry.RetryConfig; -import lombok.Builder; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.metrics.MetricGroup; - -import com.getindata.connectors.http.HttpStatusCodeValidationFailedException; -import com.getindata.connectors.http.internal.status.HttpResponseChecker; - -@Slf4j -public class HttpClientWithRetry { - - private final HttpClient httpClient; - @Getter - private final HttpResponseChecker responseChecker; - private final Retry retry; - - @Builder - HttpClientWithRetry(HttpClient httpClient, - RetryConfig retryConfig, - HttpResponseChecker responseChecker) { - this.httpClient = httpClient; - this.responseChecker = responseChecker; - var adjustedRetryConfig = RetryConfig.from(retryConfig) - .retryExceptions(IOException.class) - .retryOnResult(this::isTemporalError) - .build(); - this.retry = Retry.of("http-lookup-connector", adjustedRetryConfig); - } - - public void registerMetrics(MetricGroup metrics){ - var group = metrics.addGroup("http_lookup_connector"); - group.gauge("successfulCallsWithRetryAttempt", - () -> retry.getMetrics().getNumberOfSuccessfulCallsWithRetryAttempt()); - group.gauge("successfulCallsWithoutRetryAttempt", - () -> retry.getMetrics().getNumberOfSuccessfulCallsWithoutRetryAttempt()); - } - - public HttpResponse send( - Supplier requestSupplier, - HttpResponse.BodyHandler responseBodyHandler - ) throws IOException, InterruptedException, HttpStatusCodeValidationFailedException { - try { - var response = Retry.decorateCheckedSupplier(retry, - () -> httpClient.send(requestSupplier.get(), responseBodyHandler)).apply(); - if (!responseChecker.isSuccessful(response)) { - throw new HttpStatusCodeValidationFailedException( - "Incorrect response code: " + response.statusCode(), response); - } - return response; - } catch (IOException | InterruptedException | HttpStatusCodeValidationFailedException e) { - throw e; //re-throw without wrapping - } catch (Throwable t) { - throw new RuntimeException("Unexpected exception", t); - } - } - - private boolean isTemporalError(Object response) { - return responseChecker.isTemporalError((HttpResponse) response); - } -} - diff --git a/src/main/java/com/getindata/connectors/http/internal/retry/RetryStrategyType.java b/src/main/java/com/getindata/connectors/http/internal/retry/RetryStrategyType.java deleted file mode 100644 index b9c8876d..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/retry/RetryStrategyType.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http.internal.retry; - -import lombok.AccessLevel; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - -@Getter -@RequiredArgsConstructor(access = AccessLevel.PRIVATE) -public enum RetryStrategyType { - FIXED_DELAY("fixed-delay"), - EXPONENTIAL_DELAY("exponential-delay"), - ; - - private final String code; - - public static RetryStrategyType fromCode(String code) { - if (code == null) { - throw new NullPointerException("Code is null"); - } - for (var strategy : RetryStrategyType.values()) { - if (strategy.getCode().equalsIgnoreCase(code)) { - return strategy; - } - } - throw new IllegalArgumentException("No enum constant for " + code); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkInternal.java b/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkInternal.java deleted file mode 100644 index de37faac..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkInternal.java +++ /dev/null @@ -1,188 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Properties; - -import org.apache.flink.connector.base.sink.AsyncSinkBase; -import org.apache.flink.connector.base.sink.writer.BufferedRequestState; -import org.apache.flink.connector.base.sink.writer.ElementConverter; -import org.apache.flink.core.io.SimpleVersionedSerializer; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.SchemaLifecycleAwareElementConverter; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.SinkHttpClientBuilder; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.config.SinkRequestSubmitMode; -import com.getindata.connectors.http.internal.sink.httpclient.BatchRequestSubmitterFactory; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.sink.httpclient.PerRequestRequestSubmitterFactory; -import com.getindata.connectors.http.internal.sink.httpclient.RequestSubmitterFactory; - -/** - * An internal implementation of HTTP Sink that performs async requests against a specified HTTP - * endpoint using the buffering protocol specified in {@link AsyncSinkBase}. - *

- * API of this class can change without any concerns as long as it does not have any influence on - * methods defined in {@link com.getindata.connectors.http.HttpSink} and {@link - * com.getindata.connectors.http.HttpSinkBuilder} classes. - * - *

The behaviour of the buffering may be specified by providing configuration during the sink - * build time. - * - *

    - *
  • {@code maxBatchSize}: the maximum size of a batch of entries that may be sent to the HTTP - * endpoint;
  • - *
  • {@code maxInFlightRequests}: the maximum number of in flight requests that may exist, if - * any more in flight requests need to be initiated once the maximum has been reached, then it - * will be blocked until some have completed;
  • - *
  • {@code maxBufferedRequests}: the maximum number of elements held in the buffer, requests to - * add elements will be blocked while the number of elements in the buffer is at the - * maximum;
  • - *
  • {@code maxBatchSizeInBytes}: the maximum size of a batch of entries that may be sent to - * the HTTP endpoint measured in bytes;
  • - *
  • {@code maxTimeInBufferMS}: the maximum amount of time an entry is allowed to live in the - * buffer, if any element reaches this age, the entire buffer will be flushed - * immediately;
  • - *
  • {@code maxRecordSizeInBytes}: the maximum size of a record the sink will accept into the - * buffer, a record of size larger than this will be rejected when passed to the sink.
  • - *
  • {@code httpPostRequestCallback}: the {@link HttpPostRequestCallback} implementation - * for processing of requests and responses;
  • - *
  • {@code properties}: properties related to the Http Sink.
  • - *
- * - * @param type of the elements that should be sent through HTTP request. - */ -public class HttpSinkInternal extends AsyncSinkBase { - - private final String endpointUrl; - - // having Builder instead of an instance of `SinkHttpClient` - // makes it possible to serialize `HttpSink` - private final SinkHttpClientBuilder sinkHttpClientBuilder; - - private final HttpPostRequestCallback httpPostRequestCallback; - - private final HeaderPreprocessor headerPreprocessor; - - private final Properties properties; - - protected HttpSinkInternal( - ElementConverter elementConverter, - int maxBatchSize, - int maxInFlightRequests, - int maxBufferedRequests, - long maxBatchSizeInBytes, - long maxTimeInBufferMS, - long maxRecordSizeInBytes, - String endpointUrl, - HttpPostRequestCallback httpPostRequestCallback, - HeaderPreprocessor headerPreprocessor, - SinkHttpClientBuilder sinkHttpClientBuilder, - Properties properties) { - - super( - elementConverter, - maxBatchSize, - maxInFlightRequests, - maxBufferedRequests, - maxBatchSizeInBytes, - maxTimeInBufferMS, - maxRecordSizeInBytes - ); - - Preconditions.checkArgument(!StringUtils.isNullOrWhitespaceOnly(endpointUrl), - "The endpoint URL must be set when initializing HTTP Sink."); - this.endpointUrl = endpointUrl; - this.httpPostRequestCallback = - Preconditions.checkNotNull( - httpPostRequestCallback, - "Post request callback must be set when initializing HTTP Sink." - ); - this.headerPreprocessor = Preconditions.checkNotNull( - headerPreprocessor, - "Header Preprocessor must be set when initializing HTTP Sink." - ); - this.sinkHttpClientBuilder = - Preconditions.checkNotNull(sinkHttpClientBuilder, - "The HTTP client builder must not be null when initializing HTTP Sink."); - this.properties = properties; - } - - @Override - public StatefulSinkWriter> createWriter( - InitContext context) throws IOException { - - ElementConverter elementConverter = getElementConverter(); - if (elementConverter instanceof SchemaLifecycleAwareElementConverter) { - // This cast is needed for Flink 1.15.3 build - ((SchemaLifecycleAwareElementConverter) elementConverter).open(context); - } - - return new HttpSinkWriter<>( - elementConverter, - context, - getMaxBatchSize(), - getMaxInFlightRequests(), - getMaxBufferedRequests(), - getMaxBatchSizeInBytes(), - getMaxTimeInBufferMS(), - getMaxRecordSizeInBytes(), - endpointUrl, - sinkHttpClientBuilder.build( - properties, - httpPostRequestCallback, - headerPreprocessor, - getRequestSubmitterFactory() - ), - Collections.emptyList(), - properties - ); - } - - @Override - public StatefulSinkWriter> restoreWriter( - InitContext context, - Collection> recoveredState) - throws IOException { - - return new HttpSinkWriter<>( - getElementConverter(), - context, - getMaxBatchSize(), - getMaxInFlightRequests(), - getMaxBufferedRequests(), - getMaxBatchSizeInBytes(), - getMaxTimeInBufferMS(), - getMaxRecordSizeInBytes(), - endpointUrl, - sinkHttpClientBuilder.build( - properties, - httpPostRequestCallback, - headerPreprocessor, - getRequestSubmitterFactory() - ), - recoveredState, - properties - ); - } - - @Override - public SimpleVersionedSerializer> - getWriterStateSerializer() { - return new HttpSinkWriterStateSerializer(); - } - - private RequestSubmitterFactory getRequestSubmitterFactory() { - - if (SinkRequestSubmitMode.SINGLE.getMode().equalsIgnoreCase( - properties.getProperty(HttpConnectorConfigConstants.SINK_HTTP_REQUEST_MODE))) { - return new PerRequestRequestSubmitterFactory(); - } - return new BatchRequestSubmitterFactory(getMaxBatchSize()); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkRequestEntry.java b/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkRequestEntry.java deleted file mode 100644 index b2596282..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkRequestEntry.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.io.Serializable; - -import lombok.EqualsAndHashCode; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.ToString; - -/** - * Represents a single {@link com.getindata.connectors.http.HttpSink} request. Contains the HTTP - * method name, Content-Type header value, and byte representation of the body of the request. - */ -@RequiredArgsConstructor -@EqualsAndHashCode -@ToString -public final class HttpSinkRequestEntry implements Serializable { - - /** - * HTTP method name to use when sending the request. - */ - @NonNull - public final String method; - - /** - * Body of the request, encoded as byte array. - */ - public final byte[] element; - - /** - * @return the size of the {@link HttpSinkRequestEntry#element} - */ - public long getSizeInBytes() { - return element.length; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriter.java b/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriter.java deleted file mode 100644 index d17e9213..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriter.java +++ /dev/null @@ -1,131 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.function.Consumer; - -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.api.connector.sink2.Sink; -import org.apache.flink.connector.base.sink.writer.AsyncSinkWriter; -import org.apache.flink.connector.base.sink.writer.BufferedRequestState; -import org.apache.flink.connector.base.sink.writer.ElementConverter; -import org.apache.flink.metrics.Counter; -import org.apache.flink.util.concurrent.ExecutorThreadFactory; - -import com.getindata.connectors.http.internal.SinkHttpClient; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.ThreadUtils; - -/** - * Sink writer created by {@link com.getindata.connectors.http.HttpSink} to write to an HTTP - * endpoint. - * - *

More details on the internals of this sink writer may be found in {@link AsyncSinkWriter} - * documentation. - * - * @param type of the elements that should be sent through HTTP request. - */ -@Slf4j -public class HttpSinkWriter extends AsyncSinkWriter { - - private static final String HTTP_SINK_WRITER_THREAD_POOL_SIZE = "4"; - - /** - * Thread pool to handle HTTP response from HTTP client. - */ - private final ExecutorService sinkWriterThreadPool; - - private final String endpointUrl; - - private final SinkHttpClient sinkHttpClient; - - private final Counter numRecordsSendErrorsCounter; - - public HttpSinkWriter( - ElementConverter elementConverter, - Sink.InitContext context, - int maxBatchSize, - int maxInFlightRequests, - int maxBufferedRequests, - long maxBatchSizeInBytes, - long maxTimeInBufferMS, - long maxRecordSizeInBytes, - String endpointUrl, - SinkHttpClient sinkHttpClient, - Collection> bufferedRequestStates, - Properties properties) { - - super(elementConverter, context, maxBatchSize, maxInFlightRequests, maxBufferedRequests, - maxBatchSizeInBytes, maxTimeInBufferMS, maxRecordSizeInBytes, bufferedRequestStates); - this.endpointUrl = endpointUrl; - this.sinkHttpClient = sinkHttpClient; - - var metrics = context.metricGroup(); - this.numRecordsSendErrorsCounter = metrics.getNumRecordsSendErrorsCounter(); - - int sinkWriterThreadPollSize = Integer.parseInt(properties.getProperty( - HttpConnectorConfigConstants.SINK_HTTP_WRITER_THREAD_POOL_SIZE, - HTTP_SINK_WRITER_THREAD_POOL_SIZE - )); - - this.sinkWriterThreadPool = - Executors.newFixedThreadPool( - sinkWriterThreadPollSize, - new ExecutorThreadFactory( - "http-sink-writer-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER)); - } - - // TODO: Reintroduce retries by adding backoff policy - @Override - protected void submitRequestEntries( - List requestEntries, - Consumer> requestResult) { - var future = sinkHttpClient.putRequests(requestEntries, endpointUrl); - future.whenCompleteAsync((response, err) -> { - if (err != null) { - int failedRequestsNumber = requestEntries.size(); - log.error( - "Http Sink fatally failed to write all {} requests", - failedRequestsNumber); - numRecordsSendErrorsCounter.inc(failedRequestsNumber); - - // TODO: Make `HttpSinkInternal` retry the failed requests. - // Currently, it does not retry those at all, only adds their count - // to the `numRecordsSendErrors` metric. It is due to the fact we do not have - // a clear image how we want to do it, so it would be both efficient and correct. - //requestResult.accept(requestEntries); - } else if (response.getFailedRequests().size() > 0) { - int failedRequestsNumber = response.getFailedRequests().size(); - log.error("Http Sink failed to write and will retry {} requests", - failedRequestsNumber); - numRecordsSendErrorsCounter.inc(failedRequestsNumber); - - // TODO: Make `HttpSinkInternal` retry the failed requests. Currently, - // it does not retry those at all, only adds their count to the - // `numRecordsSendErrors` metric. It is due to the fact we do not have - // a clear image how we want to do it, so it would be both efficient and correct. - - //requestResult.accept(response.getFailedRequests()); - //} else { - //requestResult.accept(Collections.emptyList()); - //} - } - requestResult.accept(Collections.emptyList()); - }, sinkWriterThreadPool); - } - - @Override - protected long getSizeInBytes(HttpSinkRequestEntry s) { - return s.getSizeInBytes(); - } - - @Override - public void close() { - sinkWriterThreadPool.shutdownNow(); - super.close(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializer.java b/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializer.java deleted file mode 100644 index 667629e4..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializer.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; - -import org.apache.flink.connector.base.sink.writer.AsyncSinkWriterStateSerializer; - -/** - * An implementation of {@link AsyncSinkWriterStateSerializer} for {@link HttpSinkInternal} and its - * {@link HttpSinkWriter}. - */ -public class HttpSinkWriterStateSerializer - extends AsyncSinkWriterStateSerializer { - - @Override - protected void serializeRequestToStream(HttpSinkRequestEntry s, DataOutputStream out) - throws IOException { - out.writeUTF(s.method); - out.write(s.element); - } - - @Override - protected HttpSinkRequestEntry deserializeRequestFromStream(long requestSize, - DataInputStream in) throws IOException { - var method = in.readUTF(); - var bytes = new byte[(int) requestSize]; - in.read(bytes); - return new HttpSinkRequestEntry(method, bytes); - } - - @Override - public int getVersion() { - return 1; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/AbstractRequestSubmitter.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/AbstractRequestSubmitter.java deleted file mode 100644 index 55117dbe..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/AbstractRequestSubmitter.java +++ /dev/null @@ -1,49 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.http.HttpClient; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.apache.flink.util.concurrent.ExecutorThreadFactory; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.ThreadUtils; - -public abstract class AbstractRequestSubmitter implements RequestSubmitter { - - protected static final int HTTP_CLIENT_PUBLISHING_THREAD_POOL_SIZE = 1; - - protected static final String DEFAULT_REQUEST_TIMEOUT_SECONDS = "30"; - - /** - * Thread pool to handle HTTP response from HTTP client. - */ - protected final ExecutorService publishingThreadPool; - - protected final int httpRequestTimeOutSeconds; - - protected final String[] headersAndValues; - - protected final HttpClient httpClient; - - public AbstractRequestSubmitter( - Properties properties, - String[] headersAndValues, - HttpClient httpClient) { - - this.headersAndValues = headersAndValues; - this.publishingThreadPool = - Executors.newFixedThreadPool( - HTTP_CLIENT_PUBLISHING_THREAD_POOL_SIZE, - new ExecutorThreadFactory( - "http-sink-client-response-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER)); - - this.httpRequestTimeOutSeconds = Integer.parseInt( - properties.getProperty(HttpConnectorConfigConstants.SINK_HTTP_TIMEOUT_SECONDS, - DEFAULT_REQUEST_TIMEOUT_SECONDS) - ); - - this.httpClient = httpClient; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactory.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactory.java deleted file mode 100644 index dba22c8c..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactory.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.apache.flink.util.StringUtils; -import org.apache.flink.util.concurrent.ExecutorThreadFactory; - -import com.getindata.connectors.http.internal.config.ConfigException; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.JavaNetHttpClientFactory; -import com.getindata.connectors.http.internal.utils.ThreadUtils; - -public class BatchRequestSubmitterFactory implements RequestSubmitterFactory { - - // TODO Add this property to config. Make sure to add note in README.md that will describe that - // any value greater than one will break order of messages. - int HTTP_CLIENT_THREAD_POOL_SIZE = 1; - - private final String maxBatchSize; - - public BatchRequestSubmitterFactory(int maxBatchSize) { - if (maxBatchSize < 1) { - throw new IllegalArgumentException( - "Batch Request submitter batch size must be greater than zero."); - } - this.maxBatchSize = String.valueOf(maxBatchSize); - } - - @Override - public BatchRequestSubmitter createSubmitter(Properties properties, String[] headersAndValues) { - String batchRequestSize = - properties.getProperty(HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE); - if (StringUtils.isNullOrWhitespaceOnly(batchRequestSize)) { - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - maxBatchSize - ); - } else { - try { - // TODO Create property validator someday. - int batchSize = Integer.parseInt(batchRequestSize); - if (batchSize < 1) { - throw new ConfigException( - String.format("Property %s must be greater than 0 but was: %s", - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - batchRequestSize) - ); - } - } catch (NumberFormatException e) { - // TODO Create property validator someday. - throw new ConfigException( - String.format("Property %s must be an integer but was: %s", - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - batchRequestSize), - e - ); - } - } - - ExecutorService httpClientExecutor = - Executors.newFixedThreadPool( - HTTP_CLIENT_THREAD_POOL_SIZE, - new ExecutorThreadFactory( - "http-sink-client-batch-request-worker", - ThreadUtils.LOGGING_EXCEPTION_HANDLER) - ); - - return new BatchRequestSubmitter( - properties, - headersAndValues, - JavaNetHttpClientFactory.createClient(properties, httpClientExecutor) - ); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java deleted file mode 100644 index 72adeacb..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/HttpRequest.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.List; - -import lombok.Data; - -@Data -public class HttpRequest { - - public final java.net.http.HttpRequest httpRequest; - - public final List elements; - - public final String method; - -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetHttpResponseWrapper.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetHttpResponseWrapper.java deleted file mode 100644 index 5d7a6607..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetHttpResponseWrapper.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.http.HttpResponse; -import java.util.Optional; - -import lombok.Data; -import lombok.NonNull; - -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -/** - * A wrapper structure around an HTTP response, keeping a reference to a particular {@link - * HttpSinkRequestEntry}. Used internally by the {@code HttpSinkWriter} to pass {@code - * HttpSinkRequestEntry} along some other element that it is logically connected with. - */ -@Data -final class JavaNetHttpResponseWrapper { - - /** - * A representation of a single {@link com.getindata.connectors.http.HttpSink} request. - */ - @NonNull - private final HttpRequest httpRequest; - - /** - * A response to an HTTP request based on {@link HttpSinkRequestEntry}. - */ - private final HttpResponse response; - - public Optional> getResponse() { - return Optional.ofNullable(response); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClient.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClient.java deleted file mode 100644 index 7e4c19ff..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClient.java +++ /dev/null @@ -1,121 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.http.HttpClient; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; -import java.util.stream.Collectors; - -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.annotation.VisibleForTesting; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.SinkHttpClient; -import com.getindata.connectors.http.internal.SinkHttpClientResponse; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; -import com.getindata.connectors.http.internal.status.ComposeHttpStatusCodeChecker; -import com.getindata.connectors.http.internal.status.ComposeHttpStatusCodeChecker.ComposeHttpStatusCodeCheckerConfig; -import com.getindata.connectors.http.internal.status.HttpStatusCodeChecker; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; - -/** - * An implementation of {@link SinkHttpClient} that uses Java 11's {@link HttpClient}. This - * implementation supports HTTP traffic only. - */ -@Slf4j -public class JavaNetSinkHttpClient implements SinkHttpClient { - - private final String[] headersAndValues; - - private final Map headerMap; - - private final HttpStatusCodeChecker statusCodeChecker; - - private final HttpPostRequestCallback httpPostRequestCallback; - - private final RequestSubmitter requestSubmitter; - - public JavaNetSinkHttpClient( - Properties properties, - HttpPostRequestCallback httpPostRequestCallback, - HeaderPreprocessor headerPreprocessor, - RequestSubmitterFactory requestSubmitterFactory) { - - this.httpPostRequestCallback = httpPostRequestCallback; - this.headerMap = HttpHeaderUtils.prepareHeaderMap( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX, - properties, - headerPreprocessor - ); - - // TODO Inject this via constructor when implementing a response processor. - // Processor will be injected and it will wrap statusChecker implementation. - ComposeHttpStatusCodeCheckerConfig checkerConfig = - ComposeHttpStatusCodeCheckerConfig.builder() - .properties(properties) - .whiteListPrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_WHITE_LIST) - .errorCodePrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST) - .build(); - - this.statusCodeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); - - this.headersAndValues = HttpHeaderUtils.toHeaderAndValueArray(this.headerMap); - this.requestSubmitter = requestSubmitterFactory.createSubmitter( - properties, - headersAndValues - ); - } - - @Override - public CompletableFuture putRequests( - List requestEntries, - String endpointUrl) { - return submitRequests(requestEntries, endpointUrl) - .thenApply(responses -> prepareSinkHttpClientResponse(responses, endpointUrl)); - } - - private CompletableFuture> submitRequests( - List requestEntries, - String endpointUrl) { - - var responseFutures = requestSubmitter.submit(endpointUrl, requestEntries); - var allFutures = CompletableFuture.allOf(responseFutures.toArray(new CompletableFuture[0])); - return allFutures.thenApply(_void -> responseFutures.stream().map(CompletableFuture::join) - .collect(Collectors.toList())); - } - - private SinkHttpClientResponse prepareSinkHttpClientResponse( - List responses, - String endpointUrl) { - var successfulResponses = new ArrayList(); - var failedResponses = new ArrayList(); - - for (var response : responses) { - var sinkRequestEntry = response.getHttpRequest(); - var optResponse = response.getResponse(); - - httpPostRequestCallback.call( - optResponse.orElse(null), sinkRequestEntry, endpointUrl, headerMap); - - // TODO Add response processor here and orchestrate it with statusCodeChecker. - if (optResponse.isEmpty() || - statusCodeChecker.isErrorCode(optResponse.get().statusCode())) { - failedResponses.add(sinkRequestEntry); - } else { - successfulResponses.add(sinkRequestEntry); - } - } - - return new SinkHttpClientResponse(successfulResponses, failedResponses); - } - - @VisibleForTesting - String[] getHeadersAndValues() { - return Arrays.copyOf(headersAndValues, headersAndValues.length); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestRequestSubmitterFactory.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestRequestSubmitterFactory.java deleted file mode 100644 index 25a71f4f..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestRequestSubmitterFactory.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.apache.flink.util.concurrent.ExecutorThreadFactory; - -import com.getindata.connectors.http.internal.utils.JavaNetHttpClientFactory; -import com.getindata.connectors.http.internal.utils.ThreadUtils; - -public class PerRequestRequestSubmitterFactory implements RequestSubmitterFactory { - - // TODO Add this property to config. Make sure to add note in README.md that will describe that - // any value greater than one will break order of messages. - int HTTP_CLIENT_THREAD_POOL_SIZE = 1; - - @Override - public RequestSubmitter createSubmitter(Properties properties, String[] headersAndValues) { - - ExecutorService httpClientExecutor = - Executors.newFixedThreadPool( - HTTP_CLIENT_THREAD_POOL_SIZE, - new ExecutorThreadFactory( - "http-sink-client-per-request-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER)); - - return new PerRequestSubmitter( - properties, - headersAndValues, - JavaNetHttpClientFactory.createClient(properties, httpClientExecutor) - ); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestSubmitter.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestSubmitter.java deleted file mode 100644 index 02b637c1..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/PerRequestSubmitter.java +++ /dev/null @@ -1,80 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpClient.Version; -import java.net.http.HttpRequest.BodyPublishers; -import java.net.http.HttpRequest.Builder; -import java.net.http.HttpResponse; -import java.time.Duration; -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - -import lombok.extern.slf4j.Slf4j; - -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -/** - * This implementation creates HTTP requests for every processed event. - */ -@Slf4j -public class PerRequestSubmitter extends AbstractRequestSubmitter { - - public PerRequestSubmitter( - Properties properties, - String[] headersAndValues, - HttpClient httpClient) { - - super(properties, headersAndValues, httpClient); - } - - @Override - public List> submit( - String endpointUrl, - List requestToSubmit) { - - var endpointUri = URI.create(endpointUrl); - var responseFutures = new ArrayList>(); - - for (var entry : requestToSubmit) { - HttpRequest httpRequest = buildHttpRequest(entry, endpointUri); - var response = httpClient - .sendAsync( - httpRequest.getHttpRequest(), - HttpResponse.BodyHandlers.ofString()) - .exceptionally(ex -> { - // TODO This will be executed on a ForkJoinPool Thread... refactor this someday. - log.error("Request fatally failed because of an exception", ex); - return null; - }) - .thenApplyAsync( - res -> new JavaNetHttpResponseWrapper(httpRequest, res), - publishingThreadPool - ); - responseFutures.add(response); - } - return responseFutures; - } - - private HttpRequest buildHttpRequest(HttpSinkRequestEntry requestEntry, URI endpointUri) { - Builder requestBuilder = java.net.http.HttpRequest - .newBuilder() - .uri(endpointUri) - .version(Version.HTTP_1_1) - .timeout(Duration.ofSeconds(httpRequestTimeOutSeconds)) - .method(requestEntry.method, - BodyPublishers.ofByteArray(requestEntry.element)); - - if (headersAndValues.length != 0) { - requestBuilder.headers(headersAndValues); - } - - return new HttpRequest( - requestBuilder.build(), - List.of(requestEntry.element), - requestEntry.method - ); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitter.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitter.java deleted file mode 100644 index d678c7f6..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitter.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -/** - * Submits request via HTTP. - */ -public interface RequestSubmitter { - - List> submit( - String endpointUrl, - List requestToSubmit); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitterFactory.java b/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitterFactory.java deleted file mode 100644 index 39dbe909..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/sink/httpclient/RequestSubmitterFactory.java +++ /dev/null @@ -1,8 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.Properties; - -public interface RequestSubmitterFactory { - - RequestSubmitter createSubmitter(Properties properties, String[] headersAndValues); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/ComposeHttpStatusCodeChecker.java b/src/main/java/com/getindata/connectors/http/internal/status/ComposeHttpStatusCodeChecker.java deleted file mode 100644 index 015c068c..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/ComposeHttpStatusCodeChecker.java +++ /dev/null @@ -1,161 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import java.util.Arrays; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; -import java.util.stream.Collectors; - -import lombok.AccessLevel; -import lombok.Builder; -import lombok.Data; -import lombok.RequiredArgsConstructor; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; - -/** - * An implementation of {@link HttpStatusCodeChecker} that checks Http Status code against - * white list, concrete value or {@link HttpResponseCodeType} - */ -public class ComposeHttpStatusCodeChecker implements HttpStatusCodeChecker { - - private static final Set DEFAULT_ERROR_CODES = - Set.of( - new TypeStatusCodeChecker(HttpResponseCodeType.CLIENT_ERROR), - new TypeStatusCodeChecker(HttpResponseCodeType.SERVER_ERROR) - ); - - private static final int MIN_HTTP_STATUS_CODE = 100; - - /** - * Set of {@link HttpStatusCodeChecker} for white listed status codes. - */ - private final Set excludedCodes; - - /** - * Set of {@link HttpStatusCodeChecker} that check status code againts value match or {@link - * HttpResponseCodeType} match. - */ - private final Set errorCodes; - - public ComposeHttpStatusCodeChecker(ComposeHttpStatusCodeCheckerConfig config) { - excludedCodes = prepareWhiteList(config); - errorCodes = prepareErrorCodes(config); - } - - /** - * Checks whether given status code is considered as a error code. - * This implementation checks if status code matches any single value mask like "404" - * or http type mask such as "4XX". Code that matches one of those masks and is not on a - * white list will be considered as error code. - * @param statusCode http status code to assess. - * @return true if status code is considered as error or false if not. - */ - public boolean isErrorCode(int statusCode) { - - Preconditions.checkArgument( - statusCode >= MIN_HTTP_STATUS_CODE, - String.format( - "Provided invalid Http status code %s," - + " status code should be equal or bigger than %d.", - statusCode, - MIN_HTTP_STATUS_CODE) - ); - - boolean isWhiteListed = excludedCodes.stream() - .anyMatch(check -> check.isWhiteListed(statusCode)); - - return !isWhiteListed - && errorCodes.stream() - .anyMatch(httpStatusCodeChecker -> httpStatusCodeChecker.isErrorCode(statusCode)); - } - - private Set prepareErrorCodes( - ComposeHttpStatusCodeCheckerConfig config) { - - Properties properties = config.getProperties(); - String errorCodePrefix = config.getErrorCodePrefix(); - - String errorCodes = - properties.getProperty(errorCodePrefix, ""); - - if (StringUtils.isNullOrWhitespaceOnly(errorCodes)) { - return DEFAULT_ERROR_CODES; - } else { - String[] splitCodes = errorCodes.split(HttpConnectorConfigConstants.PROP_DELIM); - return prepareErrorCodes(splitCodes); - } - } - - /** - * Process given array of status codes and assign them to - * {@link SingleValueHttpStatusCodeChecker} for full codes such as 100, 404 etc. or to - * {@link TypeStatusCodeChecker} for codes that were constructed with "XX" mask - */ - private Set prepareErrorCodes(String[] statusCodes) { - - Set errorCodes = new HashSet<>(); - for (String sCode : statusCodes) { - if (!StringUtils.isNullOrWhitespaceOnly(sCode)) { - String trimCode = sCode.toUpperCase().trim(); - Preconditions.checkArgument( - trimCode.length() == 3, - "Status code should contain three characters. Provided [%s]", - trimCode); - - // at this point we have trim, upper case 3 character status code. - if (isTypeCode(trimCode)) { - int code = Integer.parseInt(trimCode.replace("X", "")); - errorCodes.add(new TypeStatusCodeChecker(HttpResponseCodeType.getByCode(code))); - } else { - errorCodes.add( - new SingleValueHttpStatusCodeChecker(Integer.parseInt(trimCode)) - ); - } - } - } - return (errorCodes.isEmpty()) ? DEFAULT_ERROR_CODES : errorCodes; - } - - private Set prepareWhiteList( - ComposeHttpStatusCodeCheckerConfig config) { - - Properties properties = config.getProperties(); - String whiteListPrefix = config.getWhiteListPrefix(); - - return Arrays.stream( - properties.getProperty(whiteListPrefix, "") - .split(HttpConnectorConfigConstants.PROP_DELIM)) - .filter(sCode -> !StringUtils.isNullOrWhitespaceOnly(sCode)) - .map(String::trim) - .mapToInt(Integer::parseInt) - .mapToObj(WhiteListHttpStatusCodeChecker::new) - .collect(Collectors.toSet()); - } - - /** - * This method checks if "code" param matches "digit + XX" mask. This method expects that - * provided string will be 3 elements long, trim and upper case. - * - * @param code to check if it contains XX on second ant third position. Parameter is expected to - * be 3 characters long, trim and uppercase. - * @return true if string matches "anything + XX" and false if not. - */ - private boolean isTypeCode(final String code) { - return code.charAt(1) == 'X' && code.charAt(2) == 'X'; - } - - @Data - @Builder - @RequiredArgsConstructor(access = AccessLevel.PRIVATE) - public static class ComposeHttpStatusCodeCheckerConfig { - - private final String whiteListPrefix; - - private final String errorCodePrefix; - - private final Properties properties; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/HttpCodesParser.java b/src/main/java/com/getindata/connectors/http/internal/status/HttpCodesParser.java deleted file mode 100644 index 1f7a52cd..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/HttpCodesParser.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import static java.lang.String.format; - -import lombok.experimental.UtilityClass; -import org.apache.flink.util.ConfigurationException; - -@UtilityClass -public class HttpCodesParser { - - private final Pattern CODE_GROUP_EXPRESSION = Pattern.compile("[1-5][xX]{2}"); - private final String DELIMITER = Pattern.quote(","); - private final int HTTP_CODE_MIN = 100; - private final int HTTP_CODE_MAX = 599; - - public Set parse(String codesExpression) throws ConfigurationException { - var whitelist = new HashSet(); - var blacklist = new HashSet(); - for (var rawCode : codesExpression.split(DELIMITER)) { - var code = rawCode.trim(); - if (code.isEmpty()) { - continue; - } - if (code.startsWith("!")) { - try { - blacklist.add(parseHttpCode(code.substring(1))); - continue; - } catch (NumberFormatException e) { - throw new ConfigurationException("Can not parse code " + code); - } - } - try { - whitelist.add(parseHttpCode(code)); - } catch (NumberFormatException e) { - if (CODE_GROUP_EXPRESSION.matcher(code).matches()) { - var firstGroupCode = Integer.parseInt(code.substring(0, 1)) * 100; - var groupCodes = IntStream.range(firstGroupCode, firstGroupCode + 100) - .boxed().collect(Collectors.toList()); - whitelist.addAll(groupCodes); - } else { - throw new ConfigurationException("Can not parse code " + code); - } - } - } - - whitelist.removeAll(blacklist); - return Collections.unmodifiableSet(whitelist); - } - - private Integer parseHttpCode(String str) throws ConfigurationException { - var parsed = Integer.parseInt(str); - if (parsed < HTTP_CODE_MIN || parsed > HTTP_CODE_MAX) { - throw new ConfigurationException(format("Http code out of the range [%s]", parsed)); - } - return parsed; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/HttpResponseCodeType.java b/src/main/java/com/getindata/connectors/http/internal/status/HttpResponseCodeType.java deleted file mode 100644 index 71f174eb..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/HttpResponseCodeType.java +++ /dev/null @@ -1,49 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import java.util.HashMap; -import java.util.Map; - -/** - * This enum represents HTTP response code types, grouped by "hundreds" digit. - */ -public enum HttpResponseCodeType { - - INFO(1), - SUCCESS(2), - REDIRECTION(3), - CLIENT_ERROR(4), - SERVER_ERROR(5); - - private static final Map map; - - static { - map = new HashMap<>(); - for (HttpResponseCodeType httpResponseCodeType : HttpResponseCodeType.values()) { - map.put(httpResponseCodeType.httpTypeCode, httpResponseCodeType); - } - } - - private final int httpTypeCode; - - HttpResponseCodeType(int httpTypeCode) { - this.httpTypeCode = httpTypeCode; - } - - /** - * @param statusCode Http status code to get the {@link HttpResponseCodeType} instance for. - * @return a {@link HttpResponseCodeType} instance based on http type code, for example {@code - * HttpResponseCodeType.getByCode(1)} will return {@link HttpResponseCodeType#INFO} type. - */ - public static HttpResponseCodeType getByCode(int statusCode) { - return map.get(statusCode); - } - - /** - * @return a "hundreds" digit that represents given {@link HttpResponseCodeType} instance. - * For example {@code HttpResponseCodeType.INFO.getHttpTypeCode()} will return 1 since HTTP - * information repossess have status codes in range 100 - 199. - */ - public int getHttpTypeCode() { - return this.httpTypeCode; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/HttpStatusCodeChecker.java b/src/main/java/com/getindata/connectors/http/internal/status/HttpStatusCodeChecker.java deleted file mode 100644 index 6af0344c..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/HttpStatusCodeChecker.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -/** - * Base interface for all classes that would validate HTTP status - * code whether it is an error or not. - */ -public interface HttpStatusCodeChecker { - - /** - * Validates http status code wheter it is considered as error code. The logic for - * what status codes are considered as "errors" depends on the concreted implementation - * @param statusCode http status code to assess. - * @return true if statusCode is considered as Error and false if not. - */ - boolean isErrorCode(int statusCode); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/SingleValueHttpStatusCodeChecker.java b/src/main/java/com/getindata/connectors/http/internal/status/SingleValueHttpStatusCodeChecker.java deleted file mode 100644 index b52951ed..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/SingleValueHttpStatusCodeChecker.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - -/** - * An implementation of {@link HttpStatusCodeChecker} that validates status code against - * constant value. - */ -@RequiredArgsConstructor -@EqualsAndHashCode -public class SingleValueHttpStatusCodeChecker implements HttpStatusCodeChecker { - - /** - * A reference http status code to compare with. - */ - private final int errorCode; - - /** - * Validates given statusCode against constant value. - * @param statusCode http status code to assess. - * @return true if status code is considered as error or false if not. - */ - @Override - public boolean isErrorCode(int statusCode) { - return errorCode == statusCode; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/TypeStatusCodeChecker.java b/src/main/java/com/getindata/connectors/http/internal/status/TypeStatusCodeChecker.java deleted file mode 100644 index df942879..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/TypeStatusCodeChecker.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import lombok.EqualsAndHashCode; - -/** - * Implementation of {@link HttpStatusCodeChecker} that verifies if given Http status code - * belongs to specific HTTP code type family. For example if it any of 100's 200's or 500's code. - */ -@EqualsAndHashCode -public class TypeStatusCodeChecker implements HttpStatusCodeChecker { - - /** - * First digit from HTTP status code that describes a type of code, - * for example 1 for all 100's, 5 for all 500's. - */ - private final int httpTypeCode; - - /** - * Creates TypeStatusCodeChecker for given {@link HttpResponseCodeType} - * - * @param httpResponseCodeType {@link HttpResponseCodeType} for this {@link - * TypeStatusCodeChecker} instance. - */ - public TypeStatusCodeChecker(HttpResponseCodeType httpResponseCodeType) { - this.httpTypeCode = httpResponseCodeType.getHttpTypeCode(); - } - - /** - * Checks whether given status code belongs to Http code status type. - * For example: - *

{@code
-     *    TypeStatusCodeChecker checker =  new TypeStatusCodeChecker(5);
-     *    checker.isErrorCode(505); <- will return true.
-     *    }
-     * 
- * @param statusCode http status code to assess. - * @return true if status code is considered as error or false if not. - */ - @Override - public boolean isErrorCode(int statusCode) { - return statusCode / 100 == httpTypeCode; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/status/WhiteListHttpStatusCodeChecker.java b/src/main/java/com/getindata/connectors/http/internal/status/WhiteListHttpStatusCodeChecker.java deleted file mode 100644 index 2aa65c65..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/status/WhiteListHttpStatusCodeChecker.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.getindata.connectors.http.internal.status; - -import lombok.EqualsAndHashCode; -import lombok.RequiredArgsConstructor; - -/** - * Class that implements logic of a "white list" against single constant value. - */ -@RequiredArgsConstructor -@EqualsAndHashCode -public class WhiteListHttpStatusCodeChecker { - - private final int whiteListCode; - - /** - * Checks if given statusCode is considered as "white listed" - * @param statusCode status code to check. - * @return true if given statusCode is white listed and false if not. - */ - public boolean isWhiteListed(int statusCode) { - return whiteListCode == statusCode; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/SerializationSchemaElementConverter.java b/src/main/java/com/getindata/connectors/http/internal/table/SerializationSchemaElementConverter.java deleted file mode 100644 index 5fb00719..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/SerializationSchemaElementConverter.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.getindata.connectors.http.internal.table; - -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.api.connector.sink2.Sink.InitContext; -import org.apache.flink.api.connector.sink2.SinkWriter.Context; -import org.apache.flink.table.data.RowData; -import org.apache.flink.util.FlinkRuntimeException; - -import com.getindata.connectors.http.SchemaLifecycleAwareElementConverter; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -public class SerializationSchemaElementConverter - implements SchemaLifecycleAwareElementConverter { - - private final String insertMethod; - - private final SerializationSchema serializationSchema; - - private boolean schemaOpened = false; - - public SerializationSchemaElementConverter( - String insertMethod, - SerializationSchema serializationSchema) { - - this.insertMethod = insertMethod; - this.serializationSchema = serializationSchema; - } - - @Override - public void open(InitContext context) { - if (!schemaOpened) { - try { - serializationSchema.open(context.asSerializationSchemaInitializationContext()); - schemaOpened = true; - } catch (Exception e) { - throw new FlinkRuntimeException("Failed to initialize serialization schema.", e); - } - } - } - - @Override - public HttpSinkRequestEntry apply(RowData rowData, Context context) { - return new HttpSinkRequestEntry( - insertMethod, - serializationSchema.serialize(rowData)); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunction.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunction.java deleted file mode 100644 index 720186ad..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/AsyncHttpTableLookupFunction.java +++ /dev/null @@ -1,111 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.Collection; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.functions.AsyncLookupFunction; -import org.apache.flink.table.functions.FunctionContext; -import org.apache.flink.util.concurrent.ExecutorThreadFactory; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.ThreadUtils; - -@Slf4j -@RequiredArgsConstructor -public class AsyncHttpTableLookupFunction extends AsyncLookupFunction { - - private static final String PULLING_THREAD_POOL_SIZE = "8"; - - private static final String PUBLISHING_THREAD_POOL_SIZE = "4"; - - /** - * The {@link org.apache.flink.table.functions.TableFunction} we want to decorate with - * async framework. - */ - private final HttpTableLookupFunction decorate; - - /** - * Thread pool for polling data from Http endpoint. - */ - private transient ExecutorService pullingThreadPool; - - /** - * Thread pool for publishing data to Flink. - */ - private transient ExecutorService publishingThreadPool; - - @Override - public void open(FunctionContext context) throws Exception { - super.open(context); - decorate.open(context); - - int pullingThreadPoolSize = Integer.parseInt( - decorate.getOptions().getProperties().getProperty( - HttpConnectorConfigConstants.LOOKUP_HTTP_PULING_THREAD_POOL_SIZE, - PULLING_THREAD_POOL_SIZE) - ); - - int publishingThreadPoolSize = Integer.parseInt( - decorate.getOptions().getProperties().getProperty( - HttpConnectorConfigConstants.LOOKUP_HTTP_RESPONSE_THREAD_POOL_SIZE, - PUBLISHING_THREAD_POOL_SIZE) - ); - - pullingThreadPool = - Executors.newFixedThreadPool( - pullingThreadPoolSize, - new ExecutorThreadFactory( - "http-async-lookup-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER) - ); - - publishingThreadPool = - Executors.newFixedThreadPool( - publishingThreadPoolSize, - new ExecutorThreadFactory( - "http-async-publishing-worker", ThreadUtils.LOGGING_EXCEPTION_HANDLER) - ); - } - - @Override - public CompletableFuture> asyncLookup(RowData keyRow) { - CompletableFuture> future = new CompletableFuture<>(); - future.completeAsync(() -> decorate.lookup(keyRow), pullingThreadPool); - - // We don't want to use ForkJoinPool at all. We are using a different thread pool - // for publishing here intentionally to avoid thread starvation. - CompletableFuture> resultFuture = new CompletableFuture<>(); - future.whenCompleteAsync( - (result, throwable) -> { - if (throwable != null) { - log.error("Exception while processing Http Async request", throwable); - resultFuture.completeExceptionally( - new RuntimeException("Exception while processing Http Async request", - throwable)); - } else { - resultFuture.complete(result); - } - }, - publishingThreadPool); - return resultFuture; - } - - public LookupRow getLookupRow() { - return decorate.getLookupRow(); - } - - public HttpLookupConfig getOptions() { - return decorate.getOptions(); - } - - @Override - public void close() throws Exception { - this.publishingThreadPool.shutdownNow(); - this.pullingThreadPool.shutdownNow(); - super.close(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactory.java deleted file mode 100644 index b005931d..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/BodyBasedRequestFactory.java +++ /dev/null @@ -1,71 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.BodyPublishers; -import java.net.http.HttpRequest.Builder; -import java.time.Duration; - -import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.utils.uri.URIBuilder; - -/** - * Implementation of {@link HttpRequestFactory} for REST calls that sends their parameters using - * request body or in the path. - */ -@Slf4j -public class BodyBasedRequestFactory extends RequestFactoryBase { - - private final String methodName; - - public BodyBasedRequestFactory( - String methodName, - LookupQueryCreator lookupQueryCreator, - HeaderPreprocessor headerPreprocessor, - HttpLookupConfig options) { - - super(lookupQueryCreator, headerPreprocessor, options); - this.methodName = methodName.toUpperCase(); - } - - /** - * Method for preparing {@link HttpRequest.Builder} for REST request that sends their parameters - * in request body, for example PUT or POST methods - * - * @param lookupQueryInfo lookup query info used for request body. - * @return {@link HttpRequest.Builder} for given lookupQuery. - */ - @Override - protected Builder setUpRequestMethod(LookupQueryInfo lookupQueryInfo) { - return HttpRequest.newBuilder() - .uri(constructUri(lookupQueryInfo)) - .method(methodName, BodyPublishers.ofString(lookupQueryInfo.getLookupQuery())) - .timeout(Duration.ofSeconds(this.httpRequestTimeOutSeconds)); - } - - @Override - protected Logger getLogger() { - return log; - } - - URI constructUri(LookupQueryInfo lookupQueryInfo) { - StringBuilder resolvedUrl = new StringBuilder(baseUrl); - if (lookupQueryInfo.hasBodyBasedUrlQueryParameters()) { - resolvedUrl.append(baseUrl.contains("?") ? "&" : "?") - .append(lookupQueryInfo.getBodyBasedUrlQueryParameters()); - } - resolvedUrl = resolvePathParameters(lookupQueryInfo, resolvedUrl); - - try { - return new URIBuilder(resolvedUrl.toString()).build(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/GetRequestFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/GetRequestFactory.java deleted file mode 100644 index d5f2811c..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/GetRequestFactory.java +++ /dev/null @@ -1,70 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.URI; -import java.net.URISyntaxException; -import java.net.http.HttpRequest; -import java.net.http.HttpRequest.Builder; -import java.time.Duration; - -import lombok.extern.slf4j.Slf4j; -import org.slf4j.Logger; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.utils.uri.URIBuilder; - -/** - * Implementation of {@link HttpRequestFactory} for GET REST calls. - */ -@Slf4j -public class GetRequestFactory extends RequestFactoryBase { - - public GetRequestFactory( - LookupQueryCreator lookupQueryCreator, - HeaderPreprocessor headerPreprocessor, - HttpLookupConfig options) { - - super(lookupQueryCreator, headerPreprocessor, options); - } - - @Override - protected Logger getLogger() { - return log; - } - - /** - * Method for preparing {@link HttpRequest.Builder} for REST GET request, where lookupQueryInfo - * is used as query parameters for GET requests, for example: - *
-     *     http:localhost:8080/service?id=1
-     * 
- * or as payload for body-based requests with optional parameters, for example: - *
-     *     http:localhost:8080/service?id=1
-     *     body payload: { "uid": 2 }
-     * 
- * @param lookupQueryInfo lookup query info used for request query parameters. - * @return {@link HttpRequest.Builder} for given GET lookupQuery - */ - @Override - protected Builder setUpRequestMethod(LookupQueryInfo lookupQueryInfo) { - return HttpRequest.newBuilder() - .uri(constructGetUri(lookupQueryInfo)) - .GET() - .timeout(Duration.ofSeconds(this.httpRequestTimeOutSeconds)); - } - - URI constructGetUri(LookupQueryInfo lookupQueryInfo) { - StringBuilder resolvedUrl = new StringBuilder(baseUrl); - if (lookupQueryInfo.hasLookupQuery()) { - resolvedUrl.append(baseUrl.contains("?") ? "&" : "?") - .append(lookupQueryInfo.getLookupQuery()); - } - resolvedUrl = resolvePathParameters(lookupQueryInfo, resolvedUrl); - try { - return new URIBuilder(resolvedUrl.toString()).build(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConfig.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConfig.java deleted file mode 100644 index cba4b26e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConfig.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.Serializable; -import java.util.Properties; - -import lombok.Builder; -import lombok.Data; -import lombok.RequiredArgsConstructor; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.ReadableConfig; - -import com.getindata.connectors.http.HttpPostRequestCallback; - -@Builder -@Data -@RequiredArgsConstructor -public class HttpLookupConfig implements Serializable { - - private final String lookupMethod; - - private final String url; - - @Builder.Default - private final boolean useAsync = false; - - @Builder.Default - private final Properties properties = new Properties(); - - @Builder.Default - private final ReadableConfig readableConfig = new Configuration(); - - private final HttpPostRequestCallback httpPostRequestCallback; -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConnectorOptions.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConnectorOptions.java deleted file mode 100644 index b21eba93..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupConnectorOptions.java +++ /dev/null @@ -1,161 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.time.Duration; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; - -import com.getindata.connectors.http.internal.retry.RetryStrategyType; -import static com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants.*; - -public class HttpLookupConnectorOptions { - - public static final ConfigOption URL = - ConfigOptions.key("url") - .stringType() - .noDefaultValue() - .withDescription("The HTTP endpoint URL."); - - public static final ConfigOption URL_ARGS = - ConfigOptions.key("url-args") - .stringType() - .noDefaultValue() - .withDescription("The arguments that should be used for HTTP GET Request."); - - public static final ConfigOption ASYNC_POLLING = - ConfigOptions.key("asyncPolling") - .booleanType() - .defaultValue(false) - .withDescription("Whether to use Sync and Async polling mechanism"); - - public static final ConfigOption LOOKUP_METHOD = - ConfigOptions.key("lookup-method") - .stringType() - .defaultValue("GET") - .withDescription("Method used for REST executed by lookup connector."); - - public static final ConfigOption LOOKUP_QUERY_CREATOR_IDENTIFIER = - ConfigOptions.key(SOURCE_LOOKUP_QUERY_CREATOR_IDENTIFIER) - .stringType() - .noDefaultValue(); - - public static final ConfigOption LOOKUP_REQUEST_FORMAT = - ConfigOptions.key("lookup-request.format") - .stringType() - .defaultValue("json"); - - public static final ConfigOption USE_RAW_AUTH_HEADER = - ConfigOptions.key(LOOKUP_SOURCE_HEADER_USE_RAW) - .booleanType() - .defaultValue(false) - .withDescription("Whether to use the raw value of Authorization header"); - - public static final ConfigOption REQUEST_CALLBACK_IDENTIFIER = - ConfigOptions.key(SOURCE_LOOKUP_REQUEST_CALLBACK_IDENTIFIER) - .stringType() - .defaultValue(Slf4jHttpLookupPostRequestCallbackFactory.IDENTIFIER); - - public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL = - ConfigOptions.key(OIDC_AUTH_TOKEN_ENDPOINT_URL) - .stringType() - .noDefaultValue() - .withDescription("OIDC Token endpoint url."); - - public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST = - ConfigOptions.key(OIDC_AUTH_TOKEN_REQUEST) - .stringType() - .noDefaultValue() - .withDescription("OIDC token request."); - - public static final ConfigOption SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION = - ConfigOptions.key(OIDC_AUTH_TOKEN_EXPIRY_REDUCTION) - .durationType() - .defaultValue(Duration.ofSeconds(1)) - .withDescription("OIDC authorization access token expiry" + - " reduction as a Duration." + - " A new access token is obtained if the token" + - " is older than it's expiry time minus this value."); - - public static final ConfigOption SOURCE_LOOKUP_CONNECTION_TIMEOUT = - ConfigOptions.key(SOURCE_CONNECTION_TIMEOUT) - .durationType() - .noDefaultValue() - .withDescription("Http client connection timeout."); - - public static final ConfigOption SOURCE_LOOKUP_PROXY_HOST = - ConfigOptions.key(SOURCE_PROXY_HOST) - .stringType() - .noDefaultValue() - .withDescription("Http client proxy host."); - - public static final ConfigOption SOURCE_LOOKUP_PROXY_PORT = - ConfigOptions.key(SOURCE_PROXY_PORT) - .intType() - .noDefaultValue() - .withDescription("Http client proxy port."); - - public static final ConfigOption SOURCE_LOOKUP_PROXY_USERNAME = - ConfigOptions.key(SOURCE_PROXY_USERNAME) - .stringType() - .noDefaultValue() - .withDescription("Http client proxy username for authentication."); - - public static final ConfigOption SOURCE_LOOKUP_PROXY_PASSWORD = - ConfigOptions.key(SOURCE_PROXY_PASSWORD) - .stringType() - .noDefaultValue() - .withDescription("Http client proxy password for authentication."); - - public static final ConfigOption SOURCE_LOOKUP_RETRY_STRATEGY = - ConfigOptions.key(SOURCE_RETRY_STRATEGY_TYPE) - .stringType() - .defaultValue(RetryStrategyType.FIXED_DELAY.getCode()) - .withDescription("Auto retry strategy type: fixed-delay (default) or exponential-delay."); - - public static final ConfigOption SOURCE_LOOKUP_HTTP_SUCCESS_CODES = - ConfigOptions.key(SOURCE_RETRY_SUCCESS_CODES) - .stringType() - .defaultValue("2XX") - .withDescription("Comma separated http codes considered as success response. " + - "Use [1-5]XX for groups and '!' character for excluding."); - - public static final ConfigOption SOURCE_LOOKUP_HTTP_RETRY_CODES = - ConfigOptions.key(SOURCE_RETRY_RETRY_CODES) - .stringType() - .defaultValue("500,503,504") - .withDescription("Comma separated http codes considered as transient errors. " + - "Use [1-5]XX for groups and '!' character for excluding."); - - public static final ConfigOption SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY = - ConfigOptions.key(SOURCE_RETRY_FIXED_DELAY_DELAY) - .durationType() - .defaultValue(Duration.ofSeconds(1)) - .withDescription("Fixed-delay interval between retries."); - - public static final ConfigOption SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF = - ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_INITIAL_BACKOFF) - .durationType() - .defaultValue(Duration.ofSeconds(1)) - .withDescription("Exponential-delay initial delay."); - - public static final ConfigOption SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF = - ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_MAX_BACKOFF) - .durationType() - .defaultValue(Duration.ofMinutes(1)) - .withDescription("Exponential-delay maximum delay."); - - public static final ConfigOption SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER = - ConfigOptions.key(SOURCE_RETRY_EXP_DELAY_MULTIPLIER) - .doubleType() - .defaultValue(1.5) - .withDescription("Exponential-delay multiplier."); - - public static final ConfigOption SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES = - ConfigOptions.key(SOURCE_IGNORE_RESPONSE_CODES) - .stringType() - .defaultValue("") - .withDescription("Comma separated http codes. Content for these responses will be ignored. " + - "Use [1-5]XX for groups and '!' character for excluding. " + - "Ignored responses togater with `" + SOURCE_RETRY_SUCCESS_CODES - + "` are considered as successful."); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupSourceRequestEntry.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupSourceRequestEntry.java deleted file mode 100644 index 62083dea..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupSourceRequestEntry.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.http.HttpRequest; - -import lombok.Data; -import lombok.ToString; - -/** - * Wrapper class around {@link HttpRequest} that contains information about an actual lookup request - * body or request parameters. - */ -@Data -@ToString -public class HttpLookupSourceRequestEntry { - - /** - * Wrapped {@link HttpRequest} object. - */ - private final HttpRequest httpRequest; - - /** - * This field represents lookup query. Depending on used REST request method, this field can - * represent a request body, for example a Json string when PUT/POST requests method was used, - * or it can represent a query parameters if GET method was used. - */ - private final LookupQueryInfo lookupQueryInfo; - - public HttpLookupSourceRequestEntry(HttpRequest httpRequest, LookupQueryInfo lookupQueryInfo) { - this.httpRequest = httpRequest; - this.lookupQueryInfo = lookupQueryInfo; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactory.java deleted file mode 100644 index 7320a253..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactory.java +++ /dev/null @@ -1,194 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.List; -import java.util.Properties; -import java.util.Set; -import java.util.function.Predicate; -import java.util.stream.Collectors; -import javax.annotation.Nullable; - -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.api.DataTypes.Field; -import org.apache.flink.table.catalog.Column; -import org.apache.flink.table.catalog.ResolvedSchema; -import org.apache.flink.table.connector.format.DecodingFormat; -import org.apache.flink.table.connector.source.DynamicTableSource; -import org.apache.flink.table.connector.source.lookup.LookupOptions; -import org.apache.flink.table.connector.source.lookup.cache.DefaultLookupCache; -import org.apache.flink.table.connector.source.lookup.cache.LookupCache; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.factories.DeserializationFormatFactory; -import org.apache.flink.table.factories.DynamicTableSourceFactory; -import org.apache.flink.table.factories.FactoryUtil; -import org.apache.flink.table.types.DataType; -import static org.apache.flink.table.api.DataTypes.FIELD; -import static org.apache.flink.table.types.utils.DataTypeUtils.removeTimeAttribute; - -import com.getindata.connectors.http.HttpPostRequestCallbackFactory; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.utils.ConfigUtils; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.*; - -public class HttpLookupTableSourceFactory implements DynamicTableSourceFactory { - - private static DataTypes.Field columnToField(Column column) { - return FIELD( - column.getName(), - // only a column in a schema should have a time attribute, - // a field should not propagate the attribute because it might be used in a - // completely different context - removeTimeAttribute(column.getDataType())); - } - - public static DataType row(List fields) { - return DataTypes.ROW(fields.toArray(new Field[0])); - } - - @Override - public DynamicTableSource createDynamicTableSource(Context dynamicTableContext) { - FactoryUtil.TableFactoryHelper helper = - FactoryUtil.createTableFactoryHelper(this, dynamicTableContext); - - ReadableConfig readable = helper.getOptions(); - helper.validateExcept( - // properties coming from org.apache.flink.table.api.config.ExecutionConfigOptions - "table.", - HttpConnectorConfigConstants.GID_CONNECTOR_HTTP, - LOOKUP_REQUEST_FORMAT.key() - ); - validateHttpLookupSourceOptions(readable); - - DecodingFormat> decodingFormat = - helper.discoverDecodingFormat( - DeserializationFormatFactory.class, - FactoryUtil.FORMAT - ); - - HttpLookupConfig lookupConfig = getHttpLookupOptions(dynamicTableContext, readable); - - ResolvedSchema resolvedSchema = dynamicTableContext.getCatalogTable().getResolvedSchema(); - - DataType physicalRowDataType = - toRowDataType(resolvedSchema.getColumns(), Column::isPhysical); - - return new HttpLookupTableSource( - physicalRowDataType, - lookupConfig, - decodingFormat, - dynamicTableContext, - getLookupCache(readable) - ); - } - - protected void validateHttpLookupSourceOptions(ReadableConfig tableOptions) - throws IllegalArgumentException { - // ensure that there is an OIDC token request if we have an OIDC token endpoint - tableOptions.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL).ifPresent(url -> { - if (tableOptions.getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST).isEmpty()) { - throw new IllegalArgumentException("Config option " + - SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key() + " is required, if " + - SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key() + " is configured."); - } - }); - } - - @Override - public String factoryIdentifier() { - return "rest-lookup"; - } - - @Override - public Set> requiredOptions() { - return Set.of(URL, FactoryUtil.FORMAT); - } - - @Override - public Set> optionalOptions() { - return Set.of( - URL_ARGS, - ASYNC_POLLING, - LOOKUP_METHOD, - REQUEST_CALLBACK_IDENTIFIER, - - LookupOptions.CACHE_TYPE, - LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_ACCESS, - LookupOptions.PARTIAL_CACHE_EXPIRE_AFTER_WRITE, - LookupOptions.PARTIAL_CACHE_MAX_ROWS, - LookupOptions.PARTIAL_CACHE_CACHE_MISSING_KEY, - - SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, - SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST, - SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL, - - LookupOptions.MAX_RETRIES, - SOURCE_LOOKUP_RETRY_STRATEGY, - SOURCE_LOOKUP_RETRY_FIXED_DELAY_DELAY, - SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_INITIAL_BACKOFF, - SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MULTIPLIER, - SOURCE_LOOKUP_RETRY_EXPONENTIAL_DELAY_MAX_BACKOFF, - - SOURCE_LOOKUP_HTTP_SUCCESS_CODES, - SOURCE_LOOKUP_HTTP_RETRY_CODES, - SOURCE_LOOKUP_HTTP_IGNORED_RESPONSE_CODES, - - SOURCE_LOOKUP_PROXY_HOST, - SOURCE_LOOKUP_PROXY_PORT, - SOURCE_LOOKUP_PROXY_USERNAME, - SOURCE_LOOKUP_PROXY_PASSWORD, - SOURCE_LOOKUP_CONNECTION_TIMEOUT // TODO: add request timeout from properties - ); - } - - private HttpLookupConfig getHttpLookupOptions(Context context, ReadableConfig readableConfig) { - - Properties httpConnectorProperties = - ConfigUtils.getHttpConnectorProperties(context.getCatalogTable().getOptions()); - - final HttpPostRequestCallbackFactory - postRequestCallbackFactory = - FactoryUtil.discoverFactory( - context.getClassLoader(), - HttpPostRequestCallbackFactory.class, - readableConfig.get(REQUEST_CALLBACK_IDENTIFIER) - ); - - return HttpLookupConfig.builder() - .lookupMethod(readableConfig.get(LOOKUP_METHOD)) - .url(readableConfig.get(URL)) - .useAsync(readableConfig.get(ASYNC_POLLING)) - .properties(httpConnectorProperties) - .readableConfig(readableConfig) - .httpPostRequestCallback(postRequestCallbackFactory.createHttpPostRequestCallback()) - .build(); - } - - @Nullable - private LookupCache getLookupCache(ReadableConfig tableOptions) { - LookupCache cache = null; - // Do not support legacy cache options - if (tableOptions - .get(LookupOptions.CACHE_TYPE) - .equals(LookupOptions.LookupCacheType.PARTIAL)) { - cache = DefaultLookupCache.fromConfig(tableOptions); - } - return cache; - } - - // TODO verify this since we are on 1.15 now. - // Backport from Flink 1.15-Master - private DataType toRowDataType(List columns, Predicate columnPredicate) { - return columns.stream() - .filter(columnPredicate) - .map(HttpLookupTableSourceFactory::columnToField) - .collect( - Collectors.collectingAndThen(Collectors.toList(), - HttpLookupTableSourceFactory::row)) - // the row should never be null - .notNull(); - } - - // Backport End -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpRequestFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpRequestFactory.java deleted file mode 100644 index f0e06324..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/HttpRequestFactory.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.Serializable; -import java.net.http.HttpRequest; - -import org.apache.flink.table.data.RowData; - -/** - * Factory for creating {@link HttpRequest} objects for Rest clients. - */ -public interface HttpRequestFactory extends Serializable { - - /** - * Creates a {@link HttpRequest} from given {@link RowData}. - * - * @param lookupRow {@link RowData} object used for building http request. - * @return {@link HttpRequest} created from {@link RowData} - */ - HttpLookupSourceRequestEntry buildLookupRequest(RowData lookupRow); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactory.java deleted file mode 100644 index 61ffe21f..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactory.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.http.HttpClient; - -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.table.data.RowData; -import org.apache.flink.util.ConfigurationException; - -import com.getindata.connectors.http.internal.PollingClientFactory; -import com.getindata.connectors.http.internal.utils.JavaNetHttpClientFactory; - -public class JavaNetHttpPollingClientFactory implements PollingClientFactory { - - private final HttpRequestFactory requestFactory; - - public JavaNetHttpPollingClientFactory(HttpRequestFactory requestFactory) { - this.requestFactory = requestFactory; - } - - @Override - public JavaNetHttpPollingClient createPollClient( - HttpLookupConfig options, - DeserializationSchema schemaDecoder) throws ConfigurationException { - - HttpClient httpClient = JavaNetHttpClientFactory.createClient(options); - - return new JavaNetHttpPollingClient( - httpClient, - schemaDecoder, - options, - requestFactory - ); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java deleted file mode 100644 index f2c759d9..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupQueryInfo.java +++ /dev/null @@ -1,73 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.util.Collections; -import java.util.Map; -import java.util.stream.Collectors; - -import lombok.Getter; -import lombok.ToString; - -import com.getindata.connectors.http.internal.utils.uri.NameValuePair; -import com.getindata.connectors.http.internal.utils.uri.URLEncodedUtils; - - -/** - * Holds the lookup query for an HTTP request. - * The {@code lookupQuery} either contain the query parameters for a GET operation - * or the payload of a body-based request. - * The {@code bodyBasedUrlQueryParams} contains the optional query parameters of a - * body-based request in addition to its payload supplied with {@code lookupQuery}. - */ -@ToString -public class LookupQueryInfo implements Serializable { - @Getter - private final String lookupQuery; - - private final Map bodyBasedUrlQueryParams; - - private final Map pathBasedUrlParams; - - public LookupQueryInfo(String lookupQuery) { - this(lookupQuery, null, null); - } - - public LookupQueryInfo(String lookupQuery, Map bodyBasedUrlQueryParams, - Map pathBasedUrlParams) { - this.lookupQuery = - lookupQuery == null ? "" : lookupQuery; - this.bodyBasedUrlQueryParams = - bodyBasedUrlQueryParams == null ? Collections.emptyMap() : bodyBasedUrlQueryParams; - this.pathBasedUrlParams = - pathBasedUrlParams == null ? Collections.emptyMap() : pathBasedUrlParams; - } - - public String getBodyBasedUrlQueryParameters() { - return URLEncodedUtils.format( - bodyBasedUrlQueryParams - .entrySet() - .stream() - // sort the map by key to ensure there is a reliable order for unit tests - .sorted(Map.Entry.comparingByKey()) - .map(entry -> new NameValuePair(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()), - StandardCharsets.UTF_8); - } - - public Map getPathBasedUrlParameters() { - return pathBasedUrlParams; - } - - public boolean hasLookupQuery() { - return !lookupQuery.isBlank(); - } - - public boolean hasBodyBasedUrlQueryParameters() { - return !bodyBasedUrlQueryParams.isEmpty(); - } - - public boolean hasPathBasedUrlParameters() { - return !pathBasedUrlParams.isEmpty(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupSchemaEntry.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupSchemaEntry.java deleted file mode 100644 index d72cbcb4..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/LookupSchemaEntry.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.Serializable; -import java.util.List; - -import com.getindata.connectors.http.LookupArg; - -/** - * Represents Lookup entry with its name and provides conversion method to collection of {@link - * LookupArg} elements. - * - * @param type of lookupKeyRow used for converting to {@link LookupArg}. - */ -public interface LookupSchemaEntry extends Serializable { - - /** - * @return lookup Field name. - */ - String getFieldName(); - - /** - * Creates a collection of {@link LookupArg} elements from provided T lookupKeyRow - * - * @param lookupKeyRow Element to get the values from for {@code LookupArg#getArgValue()}. - * @return Collection of {@link LookupArg} objects created from lookupKeyRow. - */ - List convertToLookupArg(T lookupKeyRow); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataLookupSchemaEntryBase.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataLookupSchemaEntryBase.java deleted file mode 100644 index 492325a5..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/RowDataLookupSchemaEntryBase.java +++ /dev/null @@ -1,36 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import org.apache.flink.table.data.RowData; - -/** - * Base implementation of {@link LookupSchemaEntry} for {@link RowData} type. - */ -public abstract class RowDataLookupSchemaEntryBase implements LookupSchemaEntry { - - /** - * Lookup field name represented by this instance. - */ - protected final String fieldName; - - /** - * {@link RowData.FieldGetter} matching RowData type for field represented by this instance. - */ - protected final RowData.FieldGetter fieldGetter; - - /** - * Creates new instance. - * - * @param fieldName field name that this instance represents, matching {@link RowData} column - * name. - * @param fieldGetter {@link RowData.FieldGetter} for data type matching {@link RowData} column - * type that this instance represents. - */ - public RowDataLookupSchemaEntryBase(String fieldName, RowData.FieldGetter fieldGetter) { - this.fieldName = fieldName; - this.fieldGetter = fieldGetter; - } - - public String getFieldName() { - return this.fieldName; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java deleted file mode 100644 index a1ae1eae..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4JHttpLookupPostRequestCallback.java +++ /dev/null @@ -1,70 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.StringJoiner; - -import lombok.extern.slf4j.Slf4j; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.utils.ConfigUtils; - -/** - * A {@link HttpPostRequestCallback} that logs pairs of request and response as INFO level - * logs using Slf4j. - * - *

Serving as a default implementation of {@link HttpPostRequestCallback} for - * the {@link HttpLookupTableSource}. - */ -@Slf4j -public class Slf4JHttpLookupPostRequestCallback - implements HttpPostRequestCallback { - - @Override - public void call( - HttpResponse response, - HttpLookupSourceRequestEntry requestEntry, - String endpointUrl, - Map headerMap) { - - HttpRequest httpRequest = requestEntry.getHttpRequest(); - StringJoiner headers = new StringJoiner(";"); - - for (Entry> reqHeaders : httpRequest.headers().map().entrySet()) { - StringJoiner values = new StringJoiner(";"); - for (String value : reqHeaders.getValue()) { - values.add(value); - } - String header = reqHeaders.getKey() + ": [" + values + "]"; - headers.add(header); - } - - if (response == null) { - log.warn("Null Http response for request " + httpRequest.uri().toString()); - - log.info( - "Got response for a request.\n Request:\n URL: {}\n " + - "Method: {}\n Headers: {}\n Params/Body: {}\nResponse: null", - httpRequest.uri().toString(), - httpRequest.method(), - headers, - requestEntry.getLookupQueryInfo() - ); - } else { - log.info( - "Got response for a request.\n Request:\n URL: {}\n " + - "Method: {}\n Headers: {}\n Params/Body: {}\nResponse: {}\n Body: {}", - httpRequest.uri().toString(), - httpRequest.method(), - headers, - requestEntry.getLookupQueryInfo(), - response, - response.body().replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "") - ); - } - - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java deleted file mode 100644 index 406a71b7..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/Slf4jHttpLookupPostRequestCallbackFactory.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.HashSet; -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.HttpPostRequestCallbackFactory; - -/** - * Factory for creating {@link Slf4JHttpLookupPostRequestCallback}. - */ -public class Slf4jHttpLookupPostRequestCallbackFactory - implements HttpPostRequestCallbackFactory { - - public static final String IDENTIFIER = "slf4j-lookup-logger"; - - @Override - public HttpPostRequestCallback createHttpPostRequestCallback() { - return new Slf4JHttpLookupPostRequestCallback(); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return new HashSet<>(); - } - - @Override - public Set> optionalOptions() { - return new HashSet<>(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java deleted file mode 100644 index 068f699a..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreator.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Collection; -import java.util.stream.Collectors; - -import org.apache.flink.table.data.RowData; - -import com.getindata.connectors.http.LookupArg; -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; - -/** - * A {@link LookupQueryCreator} that prepares q - * parameter GET query for ElasticSearch Search API using Lucene query string syntax (in - * first versions of the ElasticSearch called Search - * Lite). - */ -public class ElasticSearchLiteQueryCreator implements LookupQueryCreator { - - private static final String ENCODED_SPACE = "%20"; - private static final String ENCODED_QUOTATION_MARK = "%22"; - - private final LookupRow lookupRow; - - public ElasticSearchLiteQueryCreator(LookupRow lookupRow) { - this.lookupRow = lookupRow; - } - - private static String processLookupArg(LookupArg arg) { - return arg.getArgName() - + ":" - + ENCODED_QUOTATION_MARK - + arg.getArgValue() - + ENCODED_QUOTATION_MARK; - } - - @Override - public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { - Collection lookupArgs = lookupRow.convertToLookupArgs(lookupDataRow); - - var luceneQuery = lookupArgs.stream() - .map(ElasticSearchLiteQueryCreator::processLookupArg) - .collect(Collectors.joining(ENCODED_SPACE + "AND" + ENCODED_SPACE)); - - String lookupQuery = luceneQuery.isEmpty() ? "" : ("q=" + luceneQuery); - - return new LookupQueryInfo(lookupQuery); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java deleted file mode 100644 index 0cc66ccc..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorFactory.java +++ /dev/null @@ -1,44 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.factories.DynamicTableFactory; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.LookupQueryCreatorFactory; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; - - -/** - * Factory for creating {@link ElasticSearchLiteQueryCreator}. - */ -public class ElasticSearchLiteQueryCreatorFactory implements LookupQueryCreatorFactory { - - public static final String IDENTIFIER = "elasticsearch-lite"; - - @Override - public LookupQueryCreator createLookupQueryCreator( - ReadableConfig readableConfig, - LookupRow lookupRow, - DynamicTableFactory.Context dynamicTableFactoryContext - ) { - return new ElasticSearchLiteQueryCreator(lookupRow); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return Set.of(); - } - - @Override - public Set> optionalOptions() { - return Set.of(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java deleted file mode 100644 index bdcd05b3..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreator.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.stream.Collectors; - -import org.apache.flink.table.data.RowData; - -import com.getindata.connectors.http.LookupArg; -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.utils.uri.NameValuePair; -import com.getindata.connectors.http.internal.utils.uri.URLEncodedUtils; - -/** - * A {@link LookupQueryCreator} that builds an "ordinary" GET query, i.e. adds - * joinColumn1=value1&joinColumn2=value2&... to the URI of the endpoint. - */ -public class GenericGetQueryCreator implements LookupQueryCreator { - - private final LookupRow lookupRow; - - public GenericGetQueryCreator(LookupRow lookupRow) { - this.lookupRow = lookupRow; - } - - @Override - public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { - - Collection lookupArgs = lookupRow.convertToLookupArgs(lookupDataRow); - - String lookupQuery = - URLEncodedUtils.format( - lookupArgs.stream() - .map(arg -> new NameValuePair(arg.getArgName(), arg.getArgValue())) - .collect(Collectors.toList()), - StandardCharsets.UTF_8); - - return new LookupQueryInfo(lookupQuery); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorFactory.java deleted file mode 100644 index 6ed6ef75..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorFactory.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.factories.DynamicTableFactory; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.LookupQueryCreatorFactory; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; - - -/** - * Factory for creating {@link GenericGetQueryCreator}. - */ -public class GenericGetQueryCreatorFactory implements LookupQueryCreatorFactory { - - public static final String IDENTIFIER = "generic-get-query"; - - @Override - public LookupQueryCreator createLookupQueryCreator( - ReadableConfig readableConfig, - LookupRow lookupRow, - DynamicTableFactory.Context dynamicTableFactoryContext) { - return new GenericGetQueryCreator(lookupRow); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return Set.of(); - } - - @Override - public Set> optionalOptions() { - return Set.of(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java deleted file mode 100644 index 1ed9dab1..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactory.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * © Copyright IBM Corp. 2025 - */ - -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.*; - -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.connector.format.EncodingFormat; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.apache.flink.table.factories.FactoryUtil; -import org.apache.flink.table.factories.SerializationFormatFactory; -import static org.apache.flink.configuration.ConfigOptions.key; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.LookupQueryCreatorFactory; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.utils.SynchronizedSerializationSchema; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.ASYNC_POLLING; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.LOOKUP_METHOD; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.LOOKUP_REQUEST_FORMAT; - -/** - * Generic JSON and url query creator factory defined configuration to define the columns to be - *

    - *
  1. List of column names to be included in the query params
  2. - *
  3. List of column names to be included in the body (for PUT and POST)
  4. - *
  5. Map of templated uri segment names to column names
  6. - *
- */ -@SuppressWarnings({"checkstyle:RegexpSingleline", "checkstyle:LineLength"}) -public class GenericJsonAndUrlQueryCreatorFactory implements LookupQueryCreatorFactory { - private static final long serialVersionUID = 1L; - - public static final String ID = "generic-json-url"; - - public static final ConfigOption> REQUEST_QUERY_PARAM_FIELDS = - key("gid.connector.http.request.query-param-fields") - .stringType() - .asList() - .defaultValues() //default to empty list so we do not need to check for null - .withDescription( - "The names of the fields that will be mapped to query parameters." - + " The parameters are separated by semicolons," - + " such as 'param1;param2'."); - public static final ConfigOption> REQUEST_BODY_FIELDS = - key("gid.connector.http.request.body-fields") - .stringType() - .asList() - .defaultValues() //default to empty list so we do not need to check for null - .withDescription( - "The names of the fields that will be mapped to the body." - + " The parameters are separated by semicolons," - + " such as 'param1;param2'."); - public static final ConfigOption> REQUEST_URL_MAP = - ConfigOptions.key("gid.connector.http.request.url-map") - .mapType() - .noDefaultValue() - .withDescription("The map of insert names to column names used" - + "as url segments. Parses a string as a map of strings. " - + "
" - + "For example if there are table columns called customerId" - + " and orderId, then specifying value customerId:cid1,orderID:oid" - + " and a url of https://myendpoint/customers/{cid}/orders/{oid}" - + " will mean that the url used for the lookup query will" - + " dynamically pickup the values for customerId, orderId" - + " and use them in the url." - + "
Notes
" - + "The expected format of the map is:" - + "
" - + " key1:value1,key2:value2" - ); - - @Override - public LookupQueryCreator createLookupQueryCreator(final ReadableConfig readableConfig, - final LookupRow lookupRow, - final DynamicTableFactory.Context - dynamicTableFactoryContext) { - final String httpMethod = readableConfig.get(LOOKUP_METHOD); - final String formatIdentifier = readableConfig.get(LOOKUP_REQUEST_FORMAT); - // get the information from config - final List requestQueryParamsFields = - readableConfig.get(REQUEST_QUERY_PARAM_FIELDS); - final List requestBodyFields = - readableConfig.get(REQUEST_BODY_FIELDS); - Map requestUrlMap = readableConfig.get(REQUEST_URL_MAP); - - final SerializationFormatFactory jsonFormatFactory = - FactoryUtil.discoverFactory(Thread.currentThread().getContextClassLoader(), - SerializationFormatFactory.class, formatIdentifier); - QueryFormatAwareConfiguration queryFormatAwareConfiguration = - new QueryFormatAwareConfiguration( - LOOKUP_REQUEST_FORMAT.key() + "." + formatIdentifier, - (Configuration) readableConfig); - EncodingFormat> - encoder = jsonFormatFactory.createEncodingFormat( - dynamicTableFactoryContext, - queryFormatAwareConfiguration - ); - - final SerializationSchema jsonSerializationSchema; - if (readableConfig.get(ASYNC_POLLING)) { - jsonSerializationSchema = new SynchronizedSerializationSchema<>( - encoder.createRuntimeEncoder(null, - lookupRow.getLookupPhysicalRowDataType())); - } else { - jsonSerializationSchema = - encoder.createRuntimeEncoder(null, - lookupRow.getLookupPhysicalRowDataType()); - } - // create using config parameter values and specify serialization - // schema from json format. - return new GenericJsonAndUrlQueryCreator(httpMethod, - jsonSerializationSchema, - requestQueryParamsFields, - requestBodyFields, - requestUrlMap, - lookupRow); - } - - @Override - public String factoryIdentifier() { - return ID; - } - - @Override - public Set> requiredOptions() { - return Set.of(); - } - @Override - public Set> optionalOptions() { - return Set.of(REQUEST_QUERY_PARAM_FIELDS, - REQUEST_BODY_FIELDS, - REQUEST_URL_MAP); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreator.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreator.java deleted file mode 100644 index fb386a5e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreator.java +++ /dev/null @@ -1,59 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.nio.charset.StandardCharsets; - -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.table.data.RowData; -import org.apache.flink.util.FlinkRuntimeException; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.utils.SerializationSchemaUtils; - -/** - * A {@link LookupQueryCreator} that builds Json based body for REST requests, i.e. adds - */ -public class GenericJsonQueryCreator implements LookupQueryCreator { - - /** - * The {@link SerializationSchema} to serialize {@link RowData} object. - */ - private final SerializationSchema jsonSerialization; - - private boolean schemaOpened = false; - - public GenericJsonQueryCreator(SerializationSchema jsonSerialization) { - - this.jsonSerialization = jsonSerialization; - } - - /** - * Creates a Jason string from given {@link RowData}. - * - * @param lookupDataRow {@link RowData} to serialize into Json string. - * @return Json string created from lookupDataRow argument. - */ - @Override - public LookupQueryInfo createLookupQuery(RowData lookupDataRow) { - checkOpened(); - String lookupQuery = - new String(jsonSerialization.serialize(lookupDataRow), StandardCharsets.UTF_8); - - return new LookupQueryInfo(lookupQuery); - } - - private void checkOpened() { - if (!schemaOpened) { - try { - jsonSerialization.open( - SerializationSchemaUtils - .createSerializationInitContext(GenericJsonQueryCreator.class)); - } catch (Exception e) { - throw new FlinkRuntimeException( - "Failed to initialize serialization schema for GenericJsonQueryCreatorFactory.", - e); - } - schemaOpened = true; - } - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java deleted file mode 100644 index fbf5401b..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactory.java +++ /dev/null @@ -1,78 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Set; - -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.table.connector.format.EncodingFormat; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.apache.flink.table.factories.FactoryUtil; -import org.apache.flink.table.factories.SerializationFormatFactory; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.LookupQueryCreatorFactory; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.utils.SynchronizedSerializationSchema; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.ASYNC_POLLING; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.LOOKUP_REQUEST_FORMAT; - -/** - * Factory for creating {@link GenericJsonQueryCreatorFactory}. - */ -public class GenericJsonQueryCreatorFactory implements LookupQueryCreatorFactory { - - public static final String IDENTIFIER = "generic-json-query"; - - @Override - public LookupQueryCreator createLookupQueryCreator( - ReadableConfig readableConfig, - LookupRow lookupRow, - DynamicTableFactory.Context dynamicTableFactoryContext) { - - String formatIdentifier = readableConfig.get(LOOKUP_REQUEST_FORMAT); - SerializationFormatFactory jsonFormatFactory = - FactoryUtil.discoverFactory( - dynamicTableFactoryContext.getClassLoader(), - SerializationFormatFactory.class, - formatIdentifier - ); - QueryFormatAwareConfiguration queryFormatAwareConfiguration = - new QueryFormatAwareConfiguration( - LOOKUP_REQUEST_FORMAT.key() + "." + formatIdentifier, - (Configuration) readableConfig); - EncodingFormat> - encoder = jsonFormatFactory.createEncodingFormat( - dynamicTableFactoryContext, - queryFormatAwareConfiguration - ); - - final SerializationSchema serializationSchema; - if (readableConfig.get(ASYNC_POLLING)) { - serializationSchema = new SynchronizedSerializationSchema<>( - encoder.createRuntimeEncoder(null, lookupRow.getLookupPhysicalRowDataType())); - } else { - serializationSchema = - encoder.createRuntimeEncoder(null, lookupRow.getLookupPhysicalRowDataType()); - } - - return new GenericJsonQueryCreator(serializationSchema); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return Set.of(); - } - - @Override - public Set> optionalOptions() { - return Set.of(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ObjectMapperAdapter.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ObjectMapperAdapter.java deleted file mode 100644 index d8d8aa34..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ObjectMapperAdapter.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationFeature; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.MapperFeature; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.SerializationFeature; -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.json.JsonMapper; - -/** - * Centralizes the use of {@link ObjectMapper}. - */ -public class ObjectMapperAdapter { - private static final ObjectMapper MAPPER = initialize(); - - private static ObjectMapper initialize() { - final ObjectMapper mapper = JsonMapper.builder() - .configure(MapperFeature.USE_STD_BEAN_NAMING, - false).build(); - mapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); - mapper.disable(SerializationFeature.WRITE_DATES_WITH_ZONE_ID); - mapper.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE); - return mapper; - } - - public static ObjectMapper instance() { - return MAPPER; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PrefixedConfigOption.java b/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PrefixedConfigOption.java deleted file mode 100644 index d7ccda91..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PrefixedConfigOption.java +++ /dev/null @@ -1,85 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.lang.reflect.Constructor; -import java.lang.reflect.Field; -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.List; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.FallbackKey; - -/** - * This is a ConfigOption that has an associated config option and prefix. - * - * Note that this Class used to extend ConfigOption, - * but at Flink 1.16, there was a new way of doing class loaders - * for custom content, so we could no longer extend ConfigOption. - */ -public class PrefixedConfigOption { - /** - * configOption to decorate - */ - private ConfigOption configOption; - - public ConfigOption getConfigOption() { - return configOption; - } - - /** - * This constructor creates a new clone of the supplied option 'other' with - * the prefix prefixing the key. We create a new object, because we do - * not want to mutate a Flink object that we did not create. - * - * @param keyPrefix prefix that will be added to decorate the {@link ConfigOption} key. - * @param other original {@link ConfigOption} to clone and decorate. - */ - public PrefixedConfigOption(String keyPrefix, ConfigOption other) { - String prefixedKey = keyPrefix + other.key(); - Class clazz; - boolean isList; - - try { - // get clazz - Field field = other.getClass().getDeclaredField("clazz"); - field.setAccessible(true); - clazz = (Class) field.get(other); - - // get isList - field = other.getClass().getDeclaredField("isList"); - field.setAccessible(true); - isList = (Boolean) field.get(other); - - /* - * Create a new ConfigOption based on other, but with a prefixed key. - * At 1.16 we cannot access the protected fields / constructor in the supplied - * configOption as this object is loaded using a different classloader. - * Without changing Flink to make the constructor, methods and fields public, we need - * to use reflection to access and create the new prefixed ConfigOption. It is not - * great practise to use reflection, but getting round this classloader issue - * necessitates it's use. - */ - Constructor constructor = other.getClass().getDeclaredConstructors()[0]; - constructor.setAccessible(true); - configOption = (ConfigOption) constructor.newInstance(prefixedKey, - clazz, - other.description(), - other.defaultValue(), - isList, - getFallbackKeys(other)); - } catch (InstantiationException | - IllegalAccessException | - InvocationTargetException | - NoSuchFieldException e) { - throw new RuntimeException(e); - } - } - - private static FallbackKey[] getFallbackKeys(ConfigOption other) { - List fallbackKeys = new ArrayList<>(); - for (FallbackKey fallbackKey : other.fallbackKeys()) { - fallbackKeys.add(fallbackKey); - } - return fallbackKeys.toArray(new FallbackKey[0]); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkConnectorOptions.java b/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkConnectorOptions.java deleted file mode 100644 index b87b6eb7..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkConnectorOptions.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; - -import static com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants.SINK_REQUEST_CALLBACK_IDENTIFIER; - -/** - * Table API options for {@link HttpDynamicSink}. - */ -public class HttpDynamicSinkConnectorOptions { - - public static final ConfigOption URL = - ConfigOptions.key("url").stringType().noDefaultValue() - .withDescription("The HTTP endpoint URL."); - - public static final ConfigOption INSERT_METHOD = - ConfigOptions.key("insert-method") - .stringType() - .defaultValue("POST") - .withDescription("Method used for requests built from SQL's INSERT."); - - public static final ConfigOption REQUEST_CALLBACK_IDENTIFIER = - ConfigOptions.key(SINK_REQUEST_CALLBACK_IDENTIFIER) - .stringType() - .defaultValue(Slf4jHttpPostRequestCallbackFactory.IDENTIFIER); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactory.java deleted file mode 100644 index 549ff650..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactory.java +++ /dev/null @@ -1,100 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import java.util.Properties; -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ReadableConfig; -import org.apache.flink.connector.base.table.AsyncDynamicTableSinkFactory; -import org.apache.flink.connector.base.table.sink.options.AsyncSinkConfigurationValidator; -import org.apache.flink.table.connector.sink.DynamicTableSink; -import org.apache.flink.table.factories.FactoryUtil; - -import com.getindata.connectors.http.HttpPostRequestCallbackFactory; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.utils.ConfigUtils; -import static com.getindata.connectors.http.internal.table.sink.HttpDynamicSinkConnectorOptions.*; - -/** - * Factory for creating {@link HttpDynamicSink}. - */ -public class HttpDynamicTableSinkFactory extends AsyncDynamicTableSinkFactory { - - public static final String IDENTIFIER = "http-sink"; - - @Override - public DynamicTableSink createDynamicTableSink(Context context) { - final AsyncDynamicSinkContext factoryContext = new AsyncDynamicSinkContext(this, context); - - // This is actually same as calling helper.getOptions(); - ReadableConfig tableOptions = factoryContext.getTableOptions(); - - // Validate configuration - FactoryUtil.createTableFactoryHelper(this, context) - .validateExcept( - // properties coming from org.apache.flink.table.api.config.ExecutionConfigOptions - "table.", - HttpConnectorConfigConstants.GID_CONNECTOR_HTTP - ); - validateHttpSinkOptions(tableOptions); - - Properties asyncSinkProperties = - new AsyncSinkConfigurationValidator(tableOptions).getValidatedConfigurations(); - - // generics type erasure, so we have to do an unchecked cast - final HttpPostRequestCallbackFactory postRequestCallbackFactory = - FactoryUtil.discoverFactory( - context.getClassLoader(), - HttpPostRequestCallbackFactory.class, // generics type erasure - tableOptions.get(REQUEST_CALLBACK_IDENTIFIER) - ); - - Properties httpConnectorProperties = - ConfigUtils.getHttpConnectorProperties(context.getCatalogTable().getOptions()); - - HttpDynamicSink.HttpDynamicTableSinkBuilder builder = - new HttpDynamicSink.HttpDynamicTableSinkBuilder() - .setTableOptions(tableOptions) - .setEncodingFormat(factoryContext.getEncodingFormat()) - .setHttpPostRequestCallback( - postRequestCallbackFactory.createHttpPostRequestCallback() - ) - .setConsumedDataType(factoryContext.getPhysicalDataType()) - .setProperties(httpConnectorProperties); - addAsyncOptionsToBuilder(asyncSinkProperties, builder); - - return builder.build(); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return Set.of(URL, FactoryUtil.FORMAT); - } - - @Override - public Set> optionalOptions() { - var options = super.optionalOptions(); - options.add(INSERT_METHOD); - options.add(REQUEST_CALLBACK_IDENTIFIER); - return options; - } - - private void validateHttpSinkOptions(ReadableConfig tableOptions) - throws IllegalArgumentException { - tableOptions.getOptional(INSERT_METHOD).ifPresent(insertMethod -> { - if (!Set.of("POST", "PUT").contains(insertMethod)) { - throw new IllegalArgumentException( - String.format( - "Invalid option '%s'. It is expected to be either 'POST' or 'PUT'.", - INSERT_METHOD.key() - )); - } - }); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java b/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java deleted file mode 100644 index b5f2049d..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallback.java +++ /dev/null @@ -1,53 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.Map; -import java.util.stream.Collectors; - -import lombok.extern.slf4j.Slf4j; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.utils.ConfigUtils; - -/** - * A {@link HttpPostRequestCallback} that logs pairs of request and response as INFO level - * logs using Slf4j. - * - *

Serving as a default implementation of {@link HttpPostRequestCallback} for - * the {@link HttpDynamicSink}. - */ -@Slf4j -public class Slf4jHttpPostRequestCallback implements HttpPostRequestCallback { - - @Override - public void call( - HttpResponse response, - HttpRequest requestEntry, - String endpointUrl, - Map headerMap) { - - String requestBody = requestEntry.getElements().stream() - .map(element -> new String(element, StandardCharsets.UTF_8)) - .collect(Collectors.joining()); - - if (response == null) { - log.info( - "Got response for a request.\n Request:\n " + - "Method: {}\n Body: {}\n Response: null", - requestEntry.getMethod(), - requestBody - ); - } else { - log.info( - "Got response for a request.\n Request:\n " + - "Method: {}\n Body: {}\n Response: {}\n Body: {}", - requestEntry.method, - requestBody, - response, - response.body().replaceAll(ConfigUtils.UNIVERSAL_NEW_LINE_REGEXP, "") - ); - } - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallbackFactory.java b/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallbackFactory.java deleted file mode 100644 index 4573c20e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/table/sink/Slf4jHttpPostRequestCallbackFactory.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import java.util.HashSet; -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.HttpPostRequestCallbackFactory; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; - -/** - * Factory for creating {@link Slf4jHttpPostRequestCallback}. - */ -public class Slf4jHttpPostRequestCallbackFactory - implements HttpPostRequestCallbackFactory { - - public static final String IDENTIFIER = "slf4j-logger"; - - @Override - public HttpPostRequestCallback createHttpPostRequestCallback() { - return new Slf4jHttpPostRequestCallback(); - } - - @Override - public String factoryIdentifier() { - return IDENTIFIER; - } - - @Override - public Set> requiredOptions() { - return new HashSet<>(); - } - - @Override - public Set> optionalOptions() { - return new HashSet<>(); - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/ExceptionUtils.java b/src/main/java/com/getindata/connectors/http/internal/utils/ExceptionUtils.java deleted file mode 100644 index 24c65e21..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/ExceptionUtils.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; -import java.io.UncheckedIOException; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -@NoArgsConstructor(access = AccessLevel.NONE) -public final class ExceptionUtils { - - public static String stringifyException(Throwable e) { - try (StringWriter stm = new StringWriter(); - PrintWriter wrt = new PrintWriter(stm)) { - - e.printStackTrace(wrt); - wrt.close(); - return stm.toString(); - - } catch (IOException ioException) { - throw new UncheckedIOException(ioException); - } - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtils.java b/src/main/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtils.java deleted file mode 100644 index 94f74915..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtils.java +++ /dev/null @@ -1,137 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.time.Duration; -import java.util.*; -import java.util.Map.Entry; -import java.util.stream.Stream; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.configuration.ReadableConfig; - -import com.getindata.connectors.http.internal.BasicAuthHeaderValuePreprocessor; -import com.getindata.connectors.http.internal.ComposeHeaderPreprocessor; -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.OIDCAuthHeaderValuePreprocessor; -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.*; - - - -@NoArgsConstructor(access = AccessLevel.NONE) -@Slf4j -public final class HttpHeaderUtils { - - public static final String AUTHORIZATION = "Authorization"; - - public static Map prepareHeaderMap( - String headerKeyPrefix, - Properties properties, - HeaderPreprocessor headerPreprocessor) { - - // at this stage headerMap keys are full property paths not only header names. - Map propertyHeaderMap = - ConfigUtils.propertiesToMap(properties, headerKeyPrefix, String.class); - - // Map with keys pointing to the headerName. - Map headerMap = new HashMap<>(); - - for (Entry headerAndValue : propertyHeaderMap.entrySet()) { - String propertyName = headerAndValue.getKey(); - String headerValue = headerAndValue.getValue(); - log.info("prepareHeaderMap propertyName=" + propertyName - + ",headerValue" + headerValue); - String headerName = ConfigUtils.extractPropertyLastElement(propertyName); - String preProcessedHeader = - headerPreprocessor.preprocessValueForHeader(headerName, headerValue); - log.info("prepareHeaderMap preProcessedHeader=" - + preProcessedHeader); - headerMap.put( - headerName, - preProcessedHeader - ); - } - return headerMap; - } - - /** - * Flat map a given Map of header name and header value map to an array containing both header - * names and values. For example, header map of - *

{@code
-     *     Map.of(
-     *     header1, val1,
-     *     header2, val2
-     *     )
-     * }
- * will be converter to an array of: - *
{@code
-     *      String[] headers = {"header1", "val1", "header2", "val2"};
-     * }
- * - * @param headerMap mapping of header names to header values - * @return an array containing both header names and values - */ - public static String[] toHeaderAndValueArray(Map headerMap) { - return headerMap - .entrySet() - .stream() - .flatMap(entry -> Stream.of(entry.getKey(), entry.getValue())) - .toArray(String[]::new); - } - - public static HeaderPreprocessor createBasicAuthorizationHeaderPreprocessor() { - return createBasicAuthorizationHeaderPreprocessor(false); - } - - public static HeaderPreprocessor createBasicAuthorizationHeaderPreprocessor( - boolean useRawAuthHeader) { - return new ComposeHeaderPreprocessor( - Collections.singletonMap( - AUTHORIZATION, new BasicAuthHeaderValuePreprocessor(useRawAuthHeader)) - ); - } - - public static HeaderPreprocessor createOIDCAuthorizationHeaderPreprocessor( - String oidcAuthURL, - String oidcTokenRequest, - Optional oidcExpiryReduction - ) { - return new ComposeHeaderPreprocessor( - Collections.singletonMap( - AUTHORIZATION, new OIDCAuthHeaderValuePreprocessor(oidcAuthURL, - oidcTokenRequest, oidcExpiryReduction)) - ); - } - - public static HeaderPreprocessor createHeaderPreprocessor(ReadableConfig readableConfig) { - boolean useRawAuthHeader = - readableConfig.get(HttpLookupConnectorOptions.USE_RAW_AUTH_HEADER); - HeaderPreprocessor headerPreprocessor = - HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor( - useRawAuthHeader); - log.info("created HeaderPreprocessor for basic useRawAuthHeader=" + useRawAuthHeader); - log.info("returning HeaderPreprocessor " + headerPreprocessor); - return headerPreprocessor; - } - public static HeaderPreprocessor createOIDCHeaderPreprocessor(ReadableConfig readableConfig) { - HeaderPreprocessor headerPreprocessor = null; - Optional oidcAuthURL = readableConfig - .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL); - - if(oidcAuthURL.isPresent()) { - Optional oidcTokenRequest = readableConfig - .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST); - - Optional oidcExpiryReduction = readableConfig - .getOptional(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION); - headerPreprocessor = HttpHeaderUtils.createOIDCAuthorizationHeaderPreprocessor( - oidcAuthURL.get(), oidcTokenRequest.get(), oidcExpiryReduction); - log.info("created HeaderPreprocessor " + headerPreprocessor - + " for OIDC oidcAuthURL=" + oidcAuthURL - + ", oidcTokenRequest=" + oidcTokenRequest - + ", oidcExpiryReduction=" + oidcExpiryReduction); - } - return headerPreprocessor; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/ProxyConfig.java b/src/main/java/com/getindata/connectors/http/internal/utils/ProxyConfig.java deleted file mode 100644 index e90f62ae..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/ProxyConfig.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.net.Authenticator; -import java.net.PasswordAuthentication; -import java.util.Optional; - -import lombok.Getter; - -@Getter -public class ProxyConfig { - - private final String host; - - private final int port; - - private final Optional authenticator; - - public ProxyConfig(String host, int port, Optional proxyUsername, Optional proxyPassword) { - this.host = host; - this.port = port; - - if(proxyUsername.isPresent() && proxyPassword.isPresent()){ - this.authenticator = Optional.of(new Authenticator() { - @Override - protected PasswordAuthentication getPasswordAuthentication() { - if (getRequestorType().equals(RequestorType.PROXY) && getRequestingHost().equalsIgnoreCase(host)) { - return new PasswordAuthentication(proxyUsername.get(), - proxyPassword.get().toCharArray()); - } else { - return null; - } - } - }); - } else { - this.authenticator = Optional.empty(); - } - - } - -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/SerializationSchemaUtils.java b/src/main/java/com/getindata/connectors/http/internal/utils/SerializationSchemaUtils.java deleted file mode 100644 index 7f0cf37e..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/SerializationSchemaUtils.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import org.apache.flink.metrics.MetricGroup; -import org.apache.flink.metrics.groups.UnregisteredMetricsGroup; -import org.apache.flink.util.SimpleUserCodeClassLoader; -import org.apache.flink.util.UserCodeClassLoader; - -public final class SerializationSchemaUtils { - - private SerializationSchemaUtils() { - - } - - public static org.apache.flink.api.common.serialization.SerializationSchema - .InitializationContext createSerializationInitContext(Class classForClassLoader) { - - return new org.apache.flink.api.common.serialization.SerializationSchema - .InitializationContext() { - - @Override - public MetricGroup getMetricGroup() { - return new UnregisteredMetricsGroup(); - } - - @Override - public UserCodeClassLoader getUserCodeClassLoader() { - return SimpleUserCodeClassLoader.create(classForClassLoader.getClassLoader()); - } - }; - } - - public static org.apache.flink.api.common.serialization.DeserializationSchema - .InitializationContext createDeserializationInitContext(Class classForClassLoader) { - - return new org.apache.flink.api.common.serialization.DeserializationSchema - .InitializationContext() { - - @Override - public MetricGroup getMetricGroup() { - return new UnregisteredMetricsGroup(); - } - - @Override - public UserCodeClassLoader getUserCodeClassLoader() { - return SimpleUserCodeClassLoader.create(classForClassLoader.getClassLoader()); - } - }; - } - -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/ThreadUtils.java b/src/main/java/com/getindata/connectors/http/internal/utils/ThreadUtils.java deleted file mode 100644 index f3e1f772..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/ThreadUtils.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.lang.Thread.UncaughtExceptionHandler; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import static com.getindata.connectors.http.internal.utils.ExceptionUtils.stringifyException; - -@Slf4j -@NoArgsConstructor(access = AccessLevel.NONE) -public final class ThreadUtils { - - public static final UncaughtExceptionHandler LOGGING_EXCEPTION_HANDLER = - (t, e) -> log.warn("Thread:" + t + " exited with Exception:" + stringifyException(e)); -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/NameValuePair.java b/src/main/java/com/getindata/connectors/http/internal/utils/uri/NameValuePair.java deleted file mode 100644 index cc462afc..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/NameValuePair.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . - * - * ============================= NOTE ================================= - * This code has been copied from - * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== - */ - -package com.getindata.connectors.http.internal.utils.uri; - -import lombok.Data; -import org.apache.flink.util.Preconditions; - -@Data -public class NameValuePair { - - private final String name; - - private final String value; - - /** - * Default Constructor taking a name and a value. The value may be null. - * - * @param name The name. - * @param value The value. - */ - public NameValuePair(final String name, final String value) { - super(); - this.name = Preconditions.checkNotNull(name, "Name may not be null"); - this.value = value; - } -} diff --git a/src/main/java/com/getindata/connectors/http/internal/utils/uri/ParserCursor.java b/src/main/java/com/getindata/connectors/http/internal/utils/uri/ParserCursor.java deleted file mode 100644 index f92dcfa2..00000000 --- a/src/main/java/com/getindata/connectors/http/internal/utils/uri/ParserCursor.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - * - * This software consists of voluntary contributions made by many - * individuals on behalf of the Apache Software Foundation. For more - * information on the Apache Software Foundation, please see - * . - * - * ============================= NOTE ================================= - * This code has been copied from - * https://github.com/apache/httpcomponents-client/tree/rel/v4.5.13 - * and it was changed to use in this project. - * ==================================================================== - */ - -package com.getindata.connectors.http.internal.utils.uri; - -import lombok.Getter; - -/** - * This class represents a context of a parsing operation: - *
    - *
  • the current position the parsing operation is expected to start at
  • - *
  • the bounds limiting the scope of the parsing operation
  • - *
- */ -@Getter -class ParserCursor { - - private final int lowerBound; - - private final int upperBound; - - private int pos; - - ParserCursor(final int lowerBound, final int upperBound) { - super(); - if (lowerBound < 0) { - throw new IndexOutOfBoundsException("Lower bound cannot be negative"); - } - if (lowerBound > upperBound) { - throw new IndexOutOfBoundsException("Lower bound cannot be greater then upper bound"); - } - this.lowerBound = lowerBound; - this.upperBound = upperBound; - this.pos = lowerBound; - } - - void updatePos(final int pos) { - if (pos < this.lowerBound) { - throw new IndexOutOfBoundsException( - "pos: " + pos + " < lowerBound: " + this.lowerBound); - } - if (pos > this.upperBound) { - throw new IndexOutOfBoundsException( - "pos: " + pos + " > upperBound: " + this.upperBound); - } - this.pos = pos; - } - - boolean atEnd() { - return this.pos >= this.upperBound; - } - - @Override - public String toString() { - return "[" - + this.lowerBound - + '>' - + this.pos - + '>' - + this.upperBound - + ']'; - } -} diff --git a/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory b/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory deleted file mode 100644 index 943d0b8c..00000000 --- a/src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory +++ /dev/null @@ -1,8 +0,0 @@ -com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.ElasticSearchLiteQueryCreatorFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.GenericGetQueryCreatorFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonQueryCreatorFactory -com.getindata.connectors.http.internal.table.lookup.Slf4jHttpLookupPostRequestCallbackFactory -com.getindata.connectors.http.internal.table.sink.HttpDynamicTableSinkFactory -com.getindata.connectors.http.internal.table.sink.Slf4jHttpPostRequestCallbackFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory \ No newline at end of file diff --git a/src/test/java/com/getindata/StreamTableJob.java b/src/test/java/com/getindata/StreamTableJob.java deleted file mode 100644 index 95fea0f5..00000000 --- a/src/test/java/com/getindata/StreamTableJob.java +++ /dev/null @@ -1,56 +0,0 @@ -package com.getindata; - -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.api.java.utils.ParameterTool; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; - -public class StreamTableJob { - - public static void main(String[] args) { - - ParameterTool parameters = ParameterTool.fromSystemProperties(); - parameters = parameters.mergeWith(ParameterTool.fromArgs(args)); - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - // env.enableCheckpointing(5000); - env.setRestartStrategy(RestartStrategies.fixedDelayRestart(1000, 1000)); - env.setParallelism(1); - env.disableOperatorChaining(); - env.getConfig().setGlobalJobParameters(parameters); - - StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); - - tableEnv.executeSql( - "CREATE TABLE Orders (id STRING, id2 STRING, proc_time AS PROCTIME())" - + " WITH (" - + "'connector' = 'datagen', 'rows-per-second' = '1', 'fields.id.kind' = 'sequence'," - + " 'fields.id.start' = '1', 'fields.id.end' = '120'," - + " 'fields.id2.kind' = 'sequence', 'fields.id2.start' = '2'," - + " 'fields.id2.end' = '120')" - ); - tableEnv.executeSql( - "CREATE TABLE Customers (id STRING, id2 STRING, msg STRING, uuid STRING, isActive STRING, balance STRING) WITH ('connector' = 'rest-lookup', 'url' = 'http://localhost:8080/client', " - + "'asyncPolling' = 'true', " - + "'field.isActive.path' = '$.details.isActive', " - + "'field.balance.path' = '$.details.nestedDetails.balance')"); - - Table resultTable = - tableEnv.sqlQuery( - "SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o " - + "JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c " - + "ON o.id = c.id AND o.id2 = c.id2"); - - /* DataStream rowDataStream = tableEnv.toDataStream(resultTable); - rowDataStream.print();*/ - - // Table result = tableEnv.sqlQuery("SELECT * FROM Orders"); - // Table result = tableEnv.sqlQuery("SELECT * FROM Customers"); - // Table result = tableEnv.sqlQuery("SELECT * FROM T WHERE T.id > 10"); - - resultTable.execute().print(); - - // env.execute(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/ExceptionUtilsTest.java b/src/test/java/com/getindata/connectors/http/ExceptionUtilsTest.java deleted file mode 100644 index f124a232..00000000 --- a/src/test/java/com/getindata/connectors/http/ExceptionUtilsTest.java +++ /dev/null @@ -1,18 +0,0 @@ -package com.getindata.connectors.http; - -import lombok.extern.slf4j.Slf4j; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.utils.ExceptionUtils; - -@Slf4j -class ExceptionUtilsTest { - - @Test - void shouldConvertStackTrace() { - String stringifyException = - ExceptionUtils.stringifyException(new RuntimeException("Test Exception")); - assertThat(stringifyException).contains("java.lang.RuntimeException: Test Exception"); - } -} diff --git a/src/test/java/com/getindata/connectors/http/HttpPostRequestCallbackFactoryTest.java b/src/test/java/com/getindata/connectors/http/HttpPostRequestCallbackFactoryTest.java deleted file mode 100644 index 7d503f99..00000000 --- a/src/test/java/com/getindata/connectors/http/HttpPostRequestCallbackFactoryTest.java +++ /dev/null @@ -1,172 +0,0 @@ -package com.getindata.connectors.http; - -import java.net.http.HttpResponse; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; - -import com.github.tomakehurst.wiremock.WireMockServer; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.TableResult; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.table.lookup.HttpLookupSourceRequestEntry; -import com.getindata.connectors.http.internal.table.sink.HttpDynamicTableSinkFactory; -import static com.getindata.connectors.http.TestLookupPostRequestCallbackFactory.TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT; -import static com.getindata.connectors.http.TestPostRequestCallbackFactory.TEST_POST_REQUEST_CALLBACK_IDENT; - -public class HttpPostRequestCallbackFactoryTest { - private static final int SERVER_PORT = 9090; - - private WireMockServer wireMockServer; - protected StreamExecutionEnvironment env; - protected StreamTableEnvironment tEnv; - - private static final ArrayList requestEntries = new ArrayList<>(); - - private static final ArrayList - lookupRequestEntries = new ArrayList<>(); - - private static final ArrayList> responses = new ArrayList<>(); - - @BeforeEach - public void setup() { - wireMockServer = new WireMockServer(SERVER_PORT); - wireMockServer.start(); - - env = StreamExecutionEnvironment.getExecutionEnvironment(); - tEnv = StreamTableEnvironment.create(env); - - requestEntries.clear(); - responses.clear(); - } - - @AfterEach - public void tearDown() { - wireMockServer.stop(); - } - - @ParameterizedTest - @CsvSource(value = {"single, {\"id\":1}", "batch, [{\"id\":1}]"}) - public void httpPostRequestCallbackFactoryTest(String mode, String expectedRequest) - throws ExecutionException, InterruptedException { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.request-callback' = '%s',\n" - + " 'gid.connector.http.sink.writer.request.mode' = '%s',\n" - + " 'gid.connector.http.sink.header.Content-Type' = 'application/json'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - TEST_POST_REQUEST_CALLBACK_IDENT, - mode - ); - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http VALUES (1)"; - tEnv.executeSql(insert).await(); - - assertEquals(1, requestEntries.size()); - assertEquals(1, responses.size()); - - String actualRequest = requestEntries.get(0).getElements().stream() - .map(element -> new String(element, StandardCharsets.UTF_8)) - .collect(Collectors.joining()); - - Assertions.assertThat(actualRequest).isEqualToIgnoringNewLines(expectedRequest); - } - - @Test - public void httpLookupPostRequestCallbackFactoryTest() - throws ExecutionException, InterruptedException { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn( - aResponse().withStatus(200).withBody("{\"customerId\": 1}") - )); - - final String createTable1 = - "CREATE TABLE Orders (\n" + - " proc_time AS PROCTIME(),\n" + - " orderId INT\n" + - ") WITH (\n" + - " 'connector' = 'datagen',\n" + - " 'fields.orderId.kind' = 'sequence',\n" + - " 'fields.orderId.start' = '1',\n" + - " 'fields.orderId.end' = '1'\n" + - ");"; - tEnv.executeSql(createTable1); - - final String createTable2 = - String.format( - "CREATE TABLE Customers (\n" - + " `customerId` INT\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.source.lookup.request-callback' = '%s'\n" - + ")", - "rest-lookup", - "http://localhost:" + SERVER_PORT + "/myendpoint", - TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT - ); - tEnv.executeSql(createTable2); - - final String joinTable = - "SELECT o.`orderId`, c.`customerId`\n" + - " FROM Orders AS o\n" + - " JOIN Customers FOR SYSTEM_TIME AS OF o.`proc_time` AS c\n" + - " ON o.`orderId` = c.`customerId`;"; - - final TableResult resultTable = tEnv.sqlQuery(joinTable).execute(); - resultTable.await(); - - assertEquals(1, lookupRequestEntries.size()); - assertEquals(1, responses.size()); - } - - public static class TestPostRequestCallback implements HttpPostRequestCallback { - @Override - public void call( - HttpResponse response, - HttpRequest requestEntry, - String endpointUrl, - Map headerMap - ) { - requestEntries.add(requestEntry); - responses.add(response); - } - } - - public static class TestLookupPostRequestCallback - implements HttpPostRequestCallback { - @Override - public void call( - HttpResponse response, - HttpLookupSourceRequestEntry requestEntry, - String endpointUrl, - Map headerMap - ) { - lookupRequestEntries.add(requestEntry); - responses.add(response); - } - } -} diff --git a/src/test/java/com/getindata/connectors/http/TestHelper.java b/src/test/java/com/getindata/connectors/http/TestHelper.java deleted file mode 100644 index 5875110f..00000000 --- a/src/test/java/com/getindata/connectors/http/TestHelper.java +++ /dev/null @@ -1,48 +0,0 @@ -package com.getindata.connectors.http; - -import java.net.URI; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Arrays; -import java.util.Objects; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import org.junit.jupiter.api.Assertions; -import static org.assertj.core.api.Assertions.assertThat; - -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class TestHelper { - - private static final TestHelper INSTANCE = new TestHelper(); - - public static String readTestFile(String pathToFile) { - try { - URI uri = Objects.requireNonNull(INSTANCE.getClass().getResource(pathToFile)).toURI(); - return Files.readString(Path.of(uri)); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - public static void assertPropertyArray( - String[] headerArray, - String propertyName, - String expectedValue) { - // important thing is that we have property followed by its value. - for (int i = 0; i < headerArray.length; i++) { - if (headerArray[i].equals(propertyName)) { - assertThat(headerArray[i + 1]) - .withFailMessage("Property Array does not contain property name, value pairs.") - .isEqualTo(expectedValue); - return; - } - } - Assertions.fail( - String.format( - "Missing property name [%s] in header array %s.", - propertyName, - Arrays.toString(headerArray)) - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/TestLookupPostRequestCallbackFactory.java b/src/test/java/com/getindata/connectors/http/TestLookupPostRequestCallbackFactory.java deleted file mode 100644 index 3b7df1c4..00000000 --- a/src/test/java/com/getindata/connectors/http/TestLookupPostRequestCallbackFactory.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.getindata.connectors.http; - -import java.util.HashSet; -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; - -import com.getindata.connectors.http.internal.table.lookup.HttpLookupSourceRequestEntry; - -public class TestLookupPostRequestCallbackFactory - implements HttpPostRequestCallbackFactory { - - public static final String TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT = - "test-lookup-request-callback"; - - @Override - public HttpPostRequestCallback createHttpPostRequestCallback() { - return new HttpPostRequestCallbackFactoryTest.TestLookupPostRequestCallback(); - } - - @Override - public String factoryIdentifier() { return TEST_LOOKUP_POST_REQUEST_CALLBACK_IDENT; } - - @Override - public Set> requiredOptions() { return new HashSet<>(); } - - @Override - public Set> optionalOptions() { return new HashSet<>(); } -} diff --git a/src/test/java/com/getindata/connectors/http/TestPostRequestCallbackFactory.java b/src/test/java/com/getindata/connectors/http/TestPostRequestCallbackFactory.java deleted file mode 100644 index f3ceddb7..00000000 --- a/src/test/java/com/getindata/connectors/http/TestPostRequestCallbackFactory.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http; - -import java.util.HashSet; -import java.util.Set; - -import org.apache.flink.configuration.ConfigOption; - -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; - -public class TestPostRequestCallbackFactory implements HttpPostRequestCallbackFactory { - - public static final String TEST_POST_REQUEST_CALLBACK_IDENT = "test-request-callback"; - - @Override - public HttpPostRequestCallback createHttpPostRequestCallback() { - return new HttpPostRequestCallbackFactoryTest.TestPostRequestCallback(); - } - - @Override - public String factoryIdentifier() { return TEST_POST_REQUEST_CALLBACK_IDENT; } - - @Override - public Set> requiredOptions() { return new HashSet<>(); } - - @Override - public Set> optionalOptions() { return new HashSet<>(); } -} diff --git a/src/test/java/com/getindata/connectors/http/app/HttpStubApp.java b/src/test/java/com/getindata/connectors/http/app/HttpStubApp.java deleted file mode 100644 index d029fbd0..00000000 --- a/src/test/java/com/getindata/connectors/http/app/HttpStubApp.java +++ /dev/null @@ -1,35 +0,0 @@ -package com.getindata.connectors.http.app; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.core.WireMockConfiguration; -import com.github.tomakehurst.wiremock.stubbing.StubMapping; -import lombok.extern.slf4j.Slf4j; -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; - -@Slf4j -public class HttpStubApp { - - private static final String URL = "/client"; - - private static WireMockServer wireMockServer; - - @SuppressWarnings("unchecked") - public static void main(String[] args) { - wireMockServer = - new WireMockServer( - WireMockConfiguration.wireMockConfig().port(8080).extensions(JsonTransform.class)); - wireMockServer.start(); - - wireMockServer.addStubMapping(setupServerStub()); - } - - private static StubMapping setupServerStub() { - return wireMockServer.stubFor( - get(urlPathEqualTo(URL)) - .willReturn( - aResponse() - .withTransformers(JsonTransform.NAME))); - } -} diff --git a/src/test/java/com/getindata/connectors/http/app/JsonTransform.java b/src/test/java/com/getindata/connectors/http/app/JsonTransform.java deleted file mode 100644 index 7d646cf4..00000000 --- a/src/test/java/com/getindata/connectors/http/app/JsonTransform.java +++ /dev/null @@ -1,108 +0,0 @@ -package com.getindata.connectors.http.app; - -import java.util.UUID; -import java.util.concurrent.atomic.AtomicInteger; - -import com.github.tomakehurst.wiremock.common.FileSource; -import com.github.tomakehurst.wiremock.extension.Parameters; -import com.github.tomakehurst.wiremock.extension.ResponseTransformer; -import com.github.tomakehurst.wiremock.http.Request; -import com.github.tomakehurst.wiremock.http.Response; - -/** - * Wiremock Extension that prepares HTTP REST endpoint response body. This extension is stateful, - * every next response will have values like id == counter, id2 == counter + 1 - * and uuid = randomValue value in its response, - * where counter is incremented for every subsequent request. - * - * This class is used for AppDemo Wiremock. - */ -public class JsonTransform extends ResponseTransformer { - - public static final String NAME = "JsonTransform"; - - private static final String RESULT_JSON = - "{\n" - + "\t\"id\": \"&COUNTER&\",\n" - + "\t\"id2\": \"&COUNTER_2&\",\n" - + "\t\"uuid\": \"&UUID&\",\n" - + "\t\"picture\": \"http://placehold.it/32x32\",\n" - + "\t\"msg\": \"&PARAM&, cnt: &COUNTER&\",\n" - + "\t\"age\": 30,\n" - + "\t\"eyeColor\": \"green\",\n" - + "\t\"name\": \"Marva Fischer\",\n" - + "\t\"gender\": \"female\",\n" - + "\t\"company\": \"SILODYNE\",\n" - + "\t\"email\": \"marvafischer@silodyne.com\",\n" - + "\t\"phone\": \"+1 (990) 562-2120\",\n" - + "\t\"address\": \"601 Auburn Place, Bynum, New York, 7057\",\n" - + "\t\"about\": \"Proident Lorem et duis nisi tempor elit occaecat laboris" - + " dolore magna Lorem consequat. Deserunt velit minim nisi consectetur duis " - + "amet labore cupidatat. Pariatur sunt occaecat qui reprehenderit ipsum ex culpa " - + "ullamco ex duis adipisicing commodo sunt. Ad cupidatat magna ad in officia " - + "irure aute duis culpa et. Magna esse adipisicing consequat occaecat. Excepteur amet " - + "dolore occaecat sit officia dolore elit in cupidatat non anim.\\r\\n\",\n" - + "\t\"registered\": \"2020-07-11T11:13:32 -02:00\",\n" - + "\t\"latitude\": -35.237843,\n" - + "\t\"longitude\": 60.386104,\n" - + "\t\"tags\": [\n" - + "\t\t\"officia\",\n" - + "\t\t\"eiusmod\",\n" - + "\t\t\"labore\",\n" - + "\t\t\"ex\",\n" - + "\t\t\"aliqua\",\n" - + "\t\t\"consectetur\",\n" - + "\t\t\"excepteur\"\n" - + "\t],\n" - + "\t\"friends\": [\n" - + "\t\t{\n" - + "\t\t\t\"id\": 0,\n" - + "\t\t\t\"name\": \"Kemp Newman\"\n" - + "\t\t},\n" - + "\t\t{\n" - + "\t\t\t\"id\": 1,\n" - + "\t\t\t\"name\": \"Sears Blackburn\"\n" - + "\t\t},\n" - + "\t\t{\n" - + "\t\t\t\"id\": 2,\n" - + "\t\t\t\"name\": \"Lula Rogers\"\n" - + "\t\t}\n" - + "\t],\n" - + "\t\"details\": {\n" - + "\t\t\"isActive\": true,\n" - + "\t\t\"nestedDetails\": {\n" - + "\t\t\t\"index\": 0,\n" - + "\t\t\t\"guid\": \"d81fc542-6b49-4d59-8fb9-d57430d4871d\",\n" - + "\t\t\t\"balance\": \"$1,729.34\"\n" - + "\t\t}\n" - + "\t},\n" - + "\t\"greeting\": \"Hello, Marva Fischer! You have 7 unread messages.\",\n" - + "\t\"favoriteFruit\": \"banana\"\n" - + "}"; - private final AtomicInteger counter = new AtomicInteger(0); - - @Override - public Response transform( - Request request, Response response, FileSource files, Parameters parameters) { - int cnt = counter.getAndIncrement(); - - return Response.response() - .body(generateResponse(request.getUrl(), cnt)) - .status(response.getStatus()) - .statusMessage(response.getStatusMessage()) - .build(); - } - - @Override - public String getName() { - return NAME; - } - - private String generateResponse(String param, int counter) { - return RESULT_JSON - .replaceAll("&PARAM&", param) - .replaceAll("&COUNTER&", String.valueOf(counter)) - .replaceAll("&COUNTER_2&", String.valueOf(counter + 1)) - .replaceAll("&UUID&", UUID.randomUUID().toString()); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessorTest.java b/src/test/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessorTest.java deleted file mode 100644 index 9f755e30..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/BasicAuthHeaderValuePreprocessorTest.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.getindata.connectors.http.internal; - -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import static org.assertj.core.api.Assertions.assertThat; - -class BasicAuthHeaderValuePreprocessorTest { - - @ParameterizedTest - @CsvSource({ - "user:password, Basic dXNlcjpwYXNzd29yZA==, false", - "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, false", - "abc123, abc123, true", - "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, true", - "Bearer dXNlcjpwYXNzd29yZA==, Bearer dXNlcjpwYXNzd29yZA==, true" - }) - public void testAuthorizationHeaderPreprocess( - String headerRawValue, - String expectedHeaderValue, - boolean useRawAuthHeader) { - BasicAuthHeaderValuePreprocessor preprocessor = - new BasicAuthHeaderValuePreprocessor(useRawAuthHeader); - String headerValue = preprocessor.preprocessHeaderValue(headerRawValue); - assertThat(headerValue).isEqualTo(expectedHeaderValue); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessorTest.java b/src/test/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessorTest.java deleted file mode 100644 index 02a86d50..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/ComposeHeaderPreprocessorTest.java +++ /dev/null @@ -1,21 +0,0 @@ -package com.getindata.connectors.http.internal; - -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import static org.assertj.core.api.Assertions.assertThat; - -public class ComposeHeaderPreprocessorTest { - @ParameterizedTest - @CsvSource({ - "a, a", - "a123, a123", - "user:password, user:password", - "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==" - }) - public void testNoPreprocessors(String rawValue, String expectedValue) { - var noPreprocessorHeaderPreprocessor = new ComposeHeaderPreprocessor(null); - var obtainedValue = noPreprocessorHeaderPreprocessor - .preprocessValueForHeader("someHeader", rawValue); - assertThat(obtainedValue).isEqualTo(expectedValue); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/HttpsConnectionTestBase.java b/src/test/java/com/getindata/connectors/http/internal/HttpsConnectionTestBase.java deleted file mode 100644 index c597aa56..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/HttpsConnectionTestBase.java +++ /dev/null @@ -1,47 +0,0 @@ -package com.getindata.connectors.http.internal; - -import java.util.Properties; - -import com.github.tomakehurst.wiremock.WireMockServer; - -import com.getindata.connectors.http.HttpPostRequestCallback; -import com.getindata.connectors.http.internal.sink.httpclient.HttpRequest; -import com.getindata.connectors.http.internal.table.sink.Slf4jHttpPostRequestCallback; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; - -public abstract class HttpsConnectionTestBase { - - public static final int SERVER_PORT = 9090; - - public static final int HTTPS_SERVER_PORT = 8443; - - protected static final String ENDPOINT = "/myendpoint"; - - protected static final String CERTS_PATH = "src/test/resources/security/certs/"; - - protected static final String SERVER_KEYSTORE_PATH = - "src/test/resources/security/certs/serverKeyStore.jks"; - - protected static final String SERVER_TRUSTSTORE_PATH = - "src/test/resources/security/certs/serverTrustStore.jks"; - - protected WireMockServer wireMockServer; - - protected Properties properties; - - protected HeaderPreprocessor headerPreprocessor; - - protected HttpPostRequestCallback postRequestCallback = - new Slf4jHttpPostRequestCallback(); - - public void setUp() { - this.properties = new Properties(); - this.headerPreprocessor = HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); - } - - public void tearDown() { - if (wireMockServer != null) { - wireMockServer.stop(); - } - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/config/ConfigExceptionTest.java b/src/test/java/com/getindata/connectors/http/internal/config/ConfigExceptionTest.java deleted file mode 100644 index a661af9f..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/config/ConfigExceptionTest.java +++ /dev/null @@ -1,20 +0,0 @@ -package com.getindata.connectors.http.internal.config; - -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -class ConfigExceptionTest { - - @Test - public void testTemplateMessageWithNull() { - ConfigException exception = new ConfigException("myProp", -1, null); - assertThat(exception.getMessage()).isEqualTo("Invalid value -1 for configuration myProp"); - } - - @Test - public void testTemplateMessage() { - ConfigException exception = new ConfigException("myProp", -1, "Invalid test value."); - assertThat(exception.getMessage()) - .isEqualTo("Invalid value -1 for configuration myProp: Invalid test value."); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/retry/RetryConfigProviderTest.java b/src/test/java/com/getindata/connectors/http/internal/retry/RetryConfigProviderTest.java deleted file mode 100644 index db0fa8a9..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/retry/RetryConfigProviderTest.java +++ /dev/null @@ -1,63 +0,0 @@ -package com.getindata.connectors.http.internal.retry; - -import java.util.stream.IntStream; - -import org.apache.flink.configuration.Configuration; -import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; - -class RetryConfigProviderTest { - - @Test - void verifyFixedDelayRetryConfig() { - var config = new Configuration(); - config.setString("gid.connector.http.source.lookup.retry-strategy.type", "fixed-delay"); - config.setString("gid.connector.http.source.lookup.retry-strategy.fixed-delay.delay", "10s"); - config.setInteger("lookup.max-retries", 12); - - var retryConfig = RetryConfigProvider.create(config); - - assertEquals(13, retryConfig.getMaxAttempts()); - IntStream.range(1, 12).forEach(attempt -> - assertEquals(10000, retryConfig.getIntervalFunction().apply(attempt)) - ); - } - - @Test - void verifyExponentialDelayConfig() { - var config = new Configuration(); - config.setString("gid.connector.http.source.lookup.retry-strategy.type", "exponential-delay"); - config.setString("gid.connector.http.source.lookup.retry-strategy.exponential-delay.initial-backoff", "15ms"); - config.setString("gid.connector.http.source.lookup.retry-strategy.exponential-delay.max-backoff", "120ms"); - config.setInteger("gid.connector.http.source.lookup.retry-strategy.exponential-delay.backoff-multiplier", 2); - config.setInteger("lookup.max-retries", 6); - - var retryConfig = RetryConfigProvider.create(config); - var intervalFunction = retryConfig.getIntervalFunction(); - - assertEquals(7, retryConfig.getMaxAttempts()); - assertEquals(15, intervalFunction.apply(1)); - assertEquals(30, intervalFunction.apply(2)); - assertEquals(60, intervalFunction.apply(3)); - assertEquals(120, intervalFunction.apply(4)); - assertEquals(120, intervalFunction.apply(5)); - assertEquals(120, intervalFunction.apply(6)); - } - - @Test - void failWhenStrategyIsUnsupported() { - var config = new Configuration(); - config.setString("gid.connector.http.source.lookup.retry-strategy.type", "dummy"); - - try (var mockedStatic = mockStatic(RetryStrategyType.class)) { - var dummyStrategy = mock(RetryStrategyType.class); - mockedStatic.when(() -> RetryStrategyType.fromCode("dummy")).thenReturn(dummyStrategy); - - assertThrows(IllegalArgumentException.class, - () -> RetryConfigProvider.create(config)); - } - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkBuilderTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkBuilderTest.java deleted file mode 100644 index 21a2c4ee..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkBuilderTest.java +++ /dev/null @@ -1,74 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.concurrent.CompletableFuture; - -import org.apache.flink.connector.base.sink.writer.ElementConverter; -import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.HttpSink; -import com.getindata.connectors.http.internal.SinkHttpClient; -import com.getindata.connectors.http.internal.SinkHttpClientResponse; - -public class HttpSinkBuilderTest { - - private static final ElementConverter ELEMENT_CONVERTER = - (s, context) -> new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8)); - - @Test - public void testEmptyUrl() { - assertThrows( - IllegalArgumentException.class, - () -> HttpSink.builder().setElementConverter(ELEMENT_CONVERTER) - .setSinkHttpClientBuilder( - ( - properties, - httpPostRequestCallback, - headerPreprocessor, - requestSubmitterFactory) -> new MockHttpClient()) - .setEndpointUrl("") - .build() - ); - } - - @Test - public void testNullUrl() { - assertThrows( - IllegalArgumentException.class, - () -> HttpSink.builder() - .setElementConverter(ELEMENT_CONVERTER) - .setSinkHttpClientBuilder( - ( - properties, - httpPostRequestCallback, - headerPreprocessor, - requestSubmitterFactory) -> new MockHttpClient()) - .build() - ); - } - - @Test - public void testNullHttpClient() { - assertThrows( - NullPointerException.class, - () -> HttpSink.builder() - .setElementConverter(ELEMENT_CONVERTER) - .setSinkHttpClientBuilder(null) - .setEndpointUrl("localhost:8000") - .build() - ); - } - - private static class MockHttpClient implements SinkHttpClient { - - MockHttpClient() {} - - @Override - public CompletableFuture putRequests( - List requestEntries, String endpointUrl) { - throw new RuntimeException("Mock implementation of HttpClient"); - } - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkConnectionTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkConnectionTest.java deleted file mode 100644 index cbf33c2f..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkConnectionTest.java +++ /dev/null @@ -1,325 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.*; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.http.Fault; -import com.github.tomakehurst.wiremock.stubbing.ServeEvent; -import org.apache.flink.configuration.ConfigConstants; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.MetricOptions; -import org.apache.flink.metrics.Counter; -import org.apache.flink.metrics.Metric; -import org.apache.flink.metrics.MetricConfig; -import org.apache.flink.metrics.MetricGroup; -import org.apache.flink.metrics.reporter.MetricReporter; -import org.apache.flink.metrics.reporter.MetricReporterFactory; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.testutils.junit.extensions.ContextClassLoaderExtension; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.RegisterExtension; -import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static com.github.tomakehurst.wiremock.stubbing.Scenario.STARTED; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import com.getindata.connectors.http.HttpSink; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.config.SinkRequestSubmitMode; -import com.getindata.connectors.http.internal.sink.httpclient.JavaNetSinkHttpClient; - -public class HttpSinkConnectionTest { - - @RegisterExtension - static final ContextClassLoaderExtension CONTEXT_CLASS_LOADER_EXTENSION = - ContextClassLoaderExtension.builder() - .withServiceEntry( - MetricReporterFactory.class, - SendErrorsTestReporterFactory.class.getName()) - .build(); - - - private static final int SERVER_PORT = 9090; - - private static final int HTTPS_SERVER_PORT = 8443; - - private static final Set messageIds = IntStream.range(0, 50) - .boxed() - .collect(Collectors.toSet()); - - private static final List messages = messageIds.stream() - .map(i -> "{\"http-sink-id\":" + i + "}") - .collect(Collectors.toList()); - - private StreamExecutionEnvironment env; - - private WireMockServer wireMockServer; - - @BeforeEach - public void setUp() { - SendErrorsTestReporterFactory.reset(); - - env = StreamExecutionEnvironment.getExecutionEnvironment(new Configuration() { - { - setString( - ConfigConstants.METRICS_REPORTER_PREFIX - + "test." - + MetricOptions.REPORTER_FACTORY_CLASS.key(), - SendErrorsTestReporterFactory.class.getName()); - } - }); - - wireMockServer = new WireMockServer(SERVER_PORT, HTTPS_SERVER_PORT); - wireMockServer.start(); - } - - @AfterEach - public void tearDown() { - wireMockServer.stop(); - } - - @Test - public void testConnection_singleRequestMode() throws Exception { - - @SuppressWarnings("unchecked") - Function> responseMapper = response -> { - try { - return new ObjectMapper().readValue(response.getRequest().getBody(), HashMap.class); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - - List> responses = - testConnection(SinkRequestSubmitMode.SINGLE, responseMapper); - - var idsSet = new HashSet<>(messageIds); - for (var request : responses) { - var el = (Integer) request.get("http-sink-id"); - assertTrue(idsSet.contains(el)); - idsSet.remove(el); - } - - // check that we hot responses for all requests. - assertTrue(idsSet.isEmpty()); - } - - @Test - public void testConnection_batchRequestMode() throws Exception { - - Function>> responseMapper = response -> { - try { - return new ObjectMapper().readValue(response.getRequest().getBody(), - new TypeReference>>(){}); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; - - List>> responses = - testConnection(SinkRequestSubmitMode.BATCH, responseMapper); - - var idsSet = new HashSet<>(messageIds); - for (var requests : responses) { - for (var request : requests) { - var el = (Integer) request.get("http-sink-id"); - assertTrue(idsSet.contains(el)); - idsSet.remove(el); - } - } - - // check that we hot responses for all requests. - assertTrue(idsSet.isEmpty()); - } - - public List testConnection( - SinkRequestSubmitMode mode, - Function responseMapper) throws Exception { - - String endpoint = "/myendpoint"; - String contentTypeHeader = "application/json"; - - wireMockServer.stubFor(any(urlPathEqualTo(endpoint)) - .withHeader("Content-Type", equalTo(contentTypeHeader)) - .willReturn( - aResponse().withHeader("Content-Type", contentTypeHeader) - .withStatus(200) - .withBody("{}"))); - - var source = env.fromCollection(messages); - var httpSink = HttpSink.builder() - .setEndpointUrl("http://localhost:" + SERVER_PORT + endpoint) - .setElementConverter( - (s, _context) -> - new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) - .setProperty( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Content-Type", - contentTypeHeader) - .setProperty( - HttpConnectorConfigConstants.SINK_HTTP_REQUEST_MODE, - mode.getMode() - ) - .build(); - source.sinkTo(httpSink); - env.execute("Http Sink test connection"); - - var responses = wireMockServer.getAllServeEvents(); - assertTrue(responses.stream() - .allMatch(response -> Objects.equals(response.getRequest().getUrl(), endpoint))); - assertTrue( - responses.stream().allMatch(response -> response.getResponse().getStatus() == 200)); - assertTrue(responses.stream() - .allMatch(response -> Objects.equals(response.getRequest().getUrl(), endpoint))); - assertTrue( - responses.stream().allMatch(response -> response.getResponse().getStatus() == 200)); - - List collect = responses.stream().map(responseMapper).collect(Collectors.toList()); - assertTrue(collect.stream().allMatch(Objects::nonNull)); - return collect; - } - - @Test - public void testServerErrorConnection() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")) - .withHeader("Content-Type", equalTo("application/json")) - .inScenario("Retry Scenario") - .whenScenarioStateIs(STARTED) - .willReturn(serverError()) - .willSetStateTo("Cause Success")); - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")) - .withHeader("Content-Type", equalTo("application/json")) - .inScenario("Retry Scenario") - .whenScenarioStateIs("Cause Success") - .willReturn(aResponse().withStatus(200)) - .willSetStateTo("Cause Success")); - - var source = env.fromCollection(List.of(messages.get(0))); - var httpSink = HttpSink.builder() - .setEndpointUrl("http://localhost:" + SERVER_PORT + "/myendpoint") - .setElementConverter( - (s, _context) -> - new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) - .build(); - source.sinkTo(httpSink); - env.execute("Http Sink test failed connection"); - - assertEquals(1, SendErrorsTestReporterFactory.getCount()); - // TODO: reintroduce along with the retries - // var postedRequests = wireMockServer - // .findAll(postRequestedFor(urlPathEqualTo("/myendpoint"))); - // assertEquals(2, postedRequests.size()); - // assertEquals(postedRequests.get(0).getBodyAsString(), - // postedRequests.get(1).getBodyAsString()); - } - - @Test - public void testFailedConnection() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")) - .withHeader("Content-Type", equalTo("application/json")) - .inScenario("Retry Scenario") - .whenScenarioStateIs(STARTED) - .willReturn(aResponse().withFault(Fault.EMPTY_RESPONSE)) - .willSetStateTo("Cause Success")); - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")) - .withHeader("Content-Type", equalTo("application/json")) - .inScenario("Retry Scenario") - .whenScenarioStateIs("Cause Success") - .willReturn(aResponse().withStatus(200)) - .willSetStateTo("Cause Success")); - - var source = env.fromCollection(List.of(messages.get(0))); - var httpSink = HttpSink.builder() - .setEndpointUrl("http://localhost:" + SERVER_PORT + "/myendpoint") - .setElementConverter( - (s, _context) -> - new HttpSinkRequestEntry("POST", - s.getBytes(StandardCharsets.UTF_8))) - .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) - .build(); - source.sinkTo(httpSink); - env.execute("Http Sink test failed connection"); - - assertEquals(1, SendErrorsTestReporterFactory.getCount()); - // var postedRequests = wireMockServer - // .findAll(postRequestedFor(urlPathEqualTo("/myendpoint"))); - // assertEquals(2, postedRequests.size()); - // assertEquals(postedRequests.get(0).getBodyAsString(), - // postedRequests.get(1).getBodyAsString()); - } - - @Test - public void testFailedConnection404OnWhiteList() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")) - .withHeader("Content-Type", equalTo("application/json")) - .willReturn(aResponse().withBody("404 body").withStatus(404))); - - var source = env.fromCollection(List.of(messages.get(0))); - var httpSink = HttpSink.builder() - .setEndpointUrl("http://localhost:" + SERVER_PORT + "/myendpoint") - .setElementConverter( - (s, _context) -> - new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8))) - .setSinkHttpClientBuilder(JavaNetSinkHttpClient::new) - .setProperty("gid.connector.http.sink.error.code.exclude", "404, 405") - .setProperty("gid.connector.http.sink.error.code", "4XX") - .build(); - source.sinkTo(httpSink); - env.execute("Http Sink test failed connection"); - - assertEquals(0, SendErrorsTestReporterFactory.getCount()); - } - - // must be public because of the reflection - public static class SendErrorsTestReporterFactory - implements MetricReporter, MetricReporterFactory { - static volatile List numRecordsSendErrors = null; - - public static long getCount() { - return numRecordsSendErrors.stream().map(Counter::getCount).reduce(0L, Long::sum); - } - - public static void reset() { - numRecordsSendErrors = new ArrayList<>(); - } - - @Override - public void open(MetricConfig metricConfig) { - } - - @Override - public void close() { - } - - @Override - public void notifyOfAddedMetric( - Metric metric, - String s, - MetricGroup metricGroup) { - - if ("numRecordsSendErrors".equals(s)) { - numRecordsSendErrors.add((Counter) metric); - } - } - - @Override - public void notifyOfRemovedMetric(Metric metric, String s, MetricGroup metricGroup) { - } - - @Override - public MetricReporter createMetricReporter(Properties properties) { - return new SendErrorsTestReporterFactory(); - } - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializerTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializerTest.java deleted file mode 100644 index 7e143547..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/HttpSinkWriterStateSerializerTest.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.getindata.connectors.http.internal.sink; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import org.apache.flink.connector.base.sink.writer.BufferedRequestState; -import org.apache.flink.connector.base.sink.writer.ElementConverter; -import org.junit.jupiter.api.Test; -import static org.apache.flink.connector.base.sink.writer.AsyncSinkWriterTestUtils.assertThatBufferStatesAreEqual; -import static org.apache.flink.connector.base.sink.writer.AsyncSinkWriterTestUtils.getTestState; - -public class HttpSinkWriterStateSerializerTest { - - private static final ElementConverter ELEMENT_CONVERTER = - (s, _context) -> - new HttpSinkRequestEntry("POST", s.getBytes(StandardCharsets.UTF_8)); - - @Test - public void testSerializeAndDeserialize() throws IOException { - BufferedRequestState expectedState = - getTestState(ELEMENT_CONVERTER, - httpSinkRequestEntry -> Math.toIntExact(httpSinkRequestEntry.getSizeInBytes())); - - HttpSinkWriterStateSerializer serializer = new HttpSinkWriterStateSerializer(); - BufferedRequestState actualState = - serializer.deserialize(1, serializer.serialize(expectedState)); - - assertThatBufferStatesAreEqual(actualState, expectedState); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactoryTest.java deleted file mode 100644 index 3a9865a4..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterFactoryTest.java +++ /dev/null @@ -1,86 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.util.Properties; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.internal.config.ConfigException; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; - -class BatchRequestSubmitterFactoryTest { - - @ParameterizedTest - @ValueSource(ints = {0, -1}) - public void shouldThrowIfInvalidDefaultSize(int invalidArgument) { - assertThrows( - IllegalArgumentException.class, - () -> new BatchRequestSubmitterFactory(invalidArgument) - ); - } - - @Test - public void shouldCreateSubmitterWithDefaultBatchSize() { - - int defaultBatchSize = 10; - BatchRequestSubmitter submitter = new BatchRequestSubmitterFactory(defaultBatchSize) - .createSubmitter(new Properties(), new String[0]); - - assertThat(submitter.getBatchSize()).isEqualTo(defaultBatchSize); - } - - @ParameterizedTest - @ValueSource(strings = {"1", "2"}) - public void shouldCreateSubmitterWithCustomBatchSize(String batchSize) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - batchSize - ); - - BatchRequestSubmitter submitter = new BatchRequestSubmitterFactory(10) - .createSubmitter(properties, new String[0]); - - assertThat(submitter.getBatchSize()).isEqualTo(Integer.valueOf(batchSize)); - } - - @ParameterizedTest - @ValueSource(strings = {"0", "-1"}) - public void shouldThrowIfBatchSizeToSmall(String invalidBatchSize) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - invalidBatchSize - ); - - BatchRequestSubmitterFactory factory = new BatchRequestSubmitterFactory(10); - - assertThrows( - ConfigException.class, - () -> factory.createSubmitter(properties, new String[0]) - ); - } - - @ParameterizedTest - @ValueSource(strings = {"1.1", "2,2", "hello"}) - public void shouldThrowIfInvalidBatchSize(String invalidBatchSize) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - invalidBatchSize - ); - - BatchRequestSubmitterFactory factory = new BatchRequestSubmitterFactory(10); - - assertThrows( - ConfigException.class, - () -> factory.createSubmitter(properties, new String[0]) - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterTest.java deleted file mode 100644 index c6581255..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/BatchRequestSubmitterTest.java +++ /dev/null @@ -1,96 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.net.http.HttpClient; -import java.util.List; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.MethodSource; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -@ExtendWith(MockitoExtension.class) -class BatchRequestSubmitterTest { - - @Mock - private HttpClient mockHttpClient; - - @ParameterizedTest - @CsvSource(value = {"50, 1", "5, 1", "3, 2", "2, 3", "1, 5"}) - public void submitBatches(int batchSize, int expectedNumberOfBatchRequests) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - String.valueOf(batchSize) - ); - - when(mockHttpClient.sendAsync(any(), any())).thenReturn(new CompletableFuture<>()); - - BatchRequestSubmitter submitter = new BatchRequestSubmitter( - properties, - new String[0], - mockHttpClient - ); - - submitter.submit( - "http://hello.pl", - IntStream.range(0, 5) - .mapToObj(val -> new HttpSinkRequestEntry("PUT", new byte[0])) - .collect(Collectors.toList()) - ); - - verify(mockHttpClient, times(expectedNumberOfBatchRequests)).sendAsync(any(), any()); - } - - private static Stream httpRequestMethods() { - return Stream.of( - Arguments.of(List.of("PUT", "PUT", "PUT", "PUT", "POST"), 2), - Arguments.of(List.of("PUT", "PUT", "PUT", "POST", "PUT"), 3), - Arguments.of(List.of("POST", "PUT", "POST", "POST", "PUT"), 4) - ); - } - @ParameterizedTest - @MethodSource("httpRequestMethods") - public void shouldSplitBatchPerHttpMethod( - List httpMethods, - int expectedNumberOfBatchRequests) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.SINK_HTTP_BATCH_REQUEST_SIZE, - String.valueOf(50) - ); - - when(mockHttpClient.sendAsync(any(), any())).thenReturn(new CompletableFuture<>()); - - BatchRequestSubmitter submitter = new BatchRequestSubmitter( - properties, - new String[0], - mockHttpClient - ); - - submitter.submit( - "http://hello.pl", - httpMethods.stream() - .map(method -> new HttpSinkRequestEntry(method, new byte[0])) - .collect(Collectors.toList()) - ); - - verify(mockHttpClient, times(expectedNumberOfBatchRequests)).sendAsync(any(), any()); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java deleted file mode 100644 index 345687b5..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/JavaNetSinkHttpClientConnectionTest.java +++ /dev/null @@ -1,369 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient; - -import java.io.File; -import java.util.Collections; -import java.util.Properties; - -import com.github.tomakehurst.wiremock.WireMockServer; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.ValueSource; -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.any; -import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.internal.HttpsConnectionTestBase; -import com.getindata.connectors.http.internal.SinkHttpClientResponse; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.sink.HttpSinkRequestEntry; - -class JavaNetSinkHttpClientConnectionTest extends HttpsConnectionTestBase { - - private RequestSubmitterFactory perRequestSubmitterFactory; - - private RequestSubmitterFactory batchRequestSubmitterFactory; - - @BeforeEach - public void setUp() { - super.setUp(); - this.perRequestSubmitterFactory = new PerRequestRequestSubmitterFactory(); - this.batchRequestSubmitterFactory = new BatchRequestSubmitterFactory(50); - } - - @AfterEach - public void tearDown() { - super.tearDown(); - } - - @Test - public void testHttpConnection() { - - wireMockServer = new WireMockServer(SERVER_PORT); - wireMockServer.start(); - mockEndPoint(wireMockServer); - - testSinkClientForConnection( - new Properties(), - "http://localhost:", - SERVER_PORT, - perRequestSubmitterFactory); - - testSinkClientForConnection( - new Properties(), - "http://localhost:", - SERVER_PORT, - batchRequestSubmitterFactory); - } - - @Test - public void testHttpsConnectionWithSelfSignedCert() { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - - wireMockServer = new WireMockServer(options() - .httpsPort(HTTPS_SERVER_PORT) - .httpDisabled(true) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - ); - - wireMockServer.start(); - mockEndPoint(wireMockServer); - - properties.setProperty(HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "true"); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - perRequestSubmitterFactory); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - batchRequestSubmitterFactory); - } - - @ParameterizedTest - @ValueSource(strings = {"ca.crt", "server.crt", "ca_server_bundle.cert.pem"}) - public void testHttpsConnectionWithAddedCerts(String certName) { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustedCert = new File(CERTS_PATH + certName); - - wireMockServer = new WireMockServer(options() - .httpsPort(HTTPS_SERVER_PORT) - .httpDisabled(true) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - ); - - wireMockServer.start(); - mockEndPoint(wireMockServer); - - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - trustedCert.getAbsolutePath() - ); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - perRequestSubmitterFactory - ); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - batchRequestSubmitterFactory - ); - } - - @ParameterizedTest - @ValueSource(strings = {"clientPrivateKey.pem", "clientPrivateKey.der"}) - public void testMTlsConnection(String clientPrivateKeyName) { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); - - File clientCert = new File(CERTS_PATH + "client.crt"); - File clientPrivateKey = new File(CERTS_PATH + clientPrivateKeyName); - - this.wireMockServer = new WireMockServer(options() - .httpDisabled(true) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - ); - - wireMockServer.start(); - mockEndPoint(wireMockServer); - - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_CERT, - clientCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, - clientPrivateKey.getAbsolutePath() - ); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - perRequestSubmitterFactory - ); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - batchRequestSubmitterFactory - ); - } - - @Test - public void testMTlsConnectionUsingKeyStore() { - String password = "password"; - - String clientKeyStoreName = "client_keyStore.p12"; - String serverKeyStoreName = "serverKeyStore.jks"; - String serverTrustStoreName = "serverTrustStore.jks"; - - File clientKeyStoreFile = new File(CERTS_PATH + clientKeyStoreName); - File serverKeyStoreFile = new File(CERTS_PATH + serverKeyStoreName); - File serverTrustStoreFile = new File(CERTS_PATH + serverTrustStoreName); - File serverTrustedCert = new File(CERTS_PATH + "ca_server_bundle.cert.pem"); - - this.wireMockServer = new WireMockServer(options() - .httpDisabled(true) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(serverKeyStoreFile.getAbsolutePath()) - .keystorePassword(password) - .keyManagerPassword(password) - .needClientAuth(true) - .trustStorePath(serverTrustStoreFile.getAbsolutePath()) - .trustStorePassword(password) - ); - - wireMockServer.start(); - mockEndPoint(wireMockServer); - - properties.setProperty( - HttpConnectorConfigConstants.KEY_STORE_PASSWORD, - password - ); - properties.setProperty( - HttpConnectorConfigConstants.KEY_STORE_PATH, - clientKeyStoreFile.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - - testSinkClientForConnection( - properties, - "https://localhost:", - HTTPS_SERVER_PORT, - perRequestSubmitterFactory - ); - - testSinkClientForConnection(properties, - "https://localhost:", - HTTPS_SERVER_PORT, - batchRequestSubmitterFactory - ); - } - - - @ParameterizedTest - @CsvSource(value = { - "invalid.crt, client.crt, clientPrivateKey.pem", - "ca.crt, invalid.crt, clientPrivateKey.pem", - "ca.crt, client.crt, invalid.pem" - }) - public void shouldThrowOnInvalidPath( - String serverCertName, - String clientCertName, - String clientKeyName) { - - File serverTrustedCert = new File(CERTS_PATH + serverCertName); - File clientCert = new File(CERTS_PATH + clientCertName); - File clientPrivateKey = new File(CERTS_PATH + clientKeyName); - - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_CERT, - clientCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, - clientPrivateKey.getAbsolutePath() - ); - - assertAll(() -> { - assertThrows( - RuntimeException.class, - () -> new JavaNetSinkHttpClient( - properties, - postRequestCallback, - headerPreprocessor, - perRequestSubmitterFactory - ) - ); - assertThrows( - RuntimeException.class, - () -> new JavaNetSinkHttpClient( - properties, - postRequestCallback, - headerPreprocessor, - batchRequestSubmitterFactory - ) - ); - }); - } - - @ParameterizedTest - @ValueSource(strings = { - "user:password", - "Basic dXNlcjpwYXNzd29yZA==" - }) - public void shouldConnectWithBasicAuth(String authorizationHeaderValue) { - - wireMockServer = new WireMockServer(SERVER_PORT); - wireMockServer.start(); - mockEndPointWithBasicAuth(wireMockServer); - - properties.setProperty( - HttpConnectorConfigConstants.SINK_HEADER_PREFIX + "Authorization", - authorizationHeaderValue - ); - - testSinkClientForConnection( - properties, - "http://localhost:", - SERVER_PORT, - perRequestSubmitterFactory - ); - - testSinkClientForConnection( - properties, - "http://localhost:", - SERVER_PORT, - batchRequestSubmitterFactory - ); - } - - private void testSinkClientForConnection( - Properties properties, - String endpointUrl, - int httpsServerPort, - RequestSubmitterFactory requestSubmitterFactory) { - - try { - JavaNetSinkHttpClient client = - new JavaNetSinkHttpClient( - properties, - postRequestCallback, - headerPreprocessor, - requestSubmitterFactory); - HttpSinkRequestEntry requestEntry = new HttpSinkRequestEntry("GET", new byte[0]); - SinkHttpClientResponse response = - client.putRequests( - Collections.singletonList(requestEntry), - endpointUrl + httpsServerPort + ENDPOINT - ).get(); - - assertThat(response.getSuccessfulRequests()).isNotEmpty(); - assertThat(response.getFailedRequests()).isEmpty(); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - private void mockEndPoint(WireMockServer wireMockServer) { - wireMockServer.stubFor(any(urlPathEqualTo(ENDPOINT)) - .willReturn( - aResponse() - .withStatus(200) - .withBody("{}")) - ); - } - - private void mockEndPointWithBasicAuth(WireMockServer wireMockServer) { - - wireMockServer.stubFor(any(urlPathEqualTo(ENDPOINT)) - .withBasicAuth("user", "password") - .willReturn( - aResponse() - .withStatus(200) - .withBody("{}")) - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java b/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java deleted file mode 100644 index 23baaead..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/sink/httpclient/status/ComposeHttpStatusCodeCheckerTest.java +++ /dev/null @@ -1,171 +0,0 @@ -package com.getindata.connectors.http.internal.sink.httpclient.status; - -import java.util.Arrays; -import java.util.List; -import java.util.Properties; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.apache.flink.util.StringUtils; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.junit.jupiter.params.provider.ValueSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.status.ComposeHttpStatusCodeChecker; -import com.getindata.connectors.http.internal.status.ComposeHttpStatusCodeChecker.ComposeHttpStatusCodeCheckerConfig; - -class ComposeHttpStatusCodeCheckerTest { - - private static final String STRING_CODES = "403, 100,200, 300, , 303 ,200"; - - private static final List CODES = - Arrays.stream(STRING_CODES.split(HttpConnectorConfigConstants.PROP_DELIM)) - .filter(code -> !StringUtils.isNullOrWhitespaceOnly(code)) - .map(String::trim) - .mapToInt(Integer::parseInt) - .boxed() - .collect(Collectors.toList()); - - private ComposeHttpStatusCodeChecker codeChecker; - - @BeforeAll - public static void beforeAll() { - assertThat(CODES).isNotEmpty(); - } - - private static Stream propertiesArguments() { - return Stream.of( - Arguments.of(new Properties()), - Arguments.of(prepareErrorCodeProperties("", "")), - Arguments.of(prepareErrorCodeProperties(" ", " ")), - Arguments.of(prepareErrorCodeProperties(",,,", ",,,,")) - ); - } - - @ParameterizedTest - @MethodSource("propertiesArguments") - public void shouldPassOnDefault(Properties properties) { - - ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); - - codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); - - assertAll(() -> { - assertThat(codeChecker.isErrorCode(100)).isFalse(); - assertThat(codeChecker.isErrorCode(200)).isFalse(); - assertThat(codeChecker.isErrorCode(500)).isTrue(); - assertThat(codeChecker.isErrorCode(501)).isTrue(); - assertThat(codeChecker.isErrorCode(400)).isTrue(); - assertThat(codeChecker.isErrorCode(404)).isTrue(); - }); - } - - @Test - public void shouldParseWhiteList() { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_WHITE_LIST, - STRING_CODES); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, - "1XX, 2XX, 3XX, 4XX, 5XX" - ); - - ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); - - codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); - - assertAll(() -> { - CODES.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isFalse()); - - assertThat(codeChecker.isErrorCode(301)) - .withFailMessage( - "Not on a white list but matches 3XX range. " - + "Should be considered as error code.") - .isTrue(); - }); - } - - @Test - public void shouldParseErrorCodeList() { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, - STRING_CODES); - - ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); - - codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); - - assertAll(() -> CODES.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isTrue())); - } - - @Test - public void shouldParseErrorCodeRange() { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, "1xx, 2XX "); - - List codes = List.of(100, 110, 200, 220); - - ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); - - codeChecker = new ComposeHttpStatusCodeChecker(checkerConfig); - - assertAll(() -> { - codes.forEach(code -> assertThat(codeChecker.isErrorCode(code)).isTrue()); - - assertThat(codeChecker.isErrorCode(303)) - .withFailMessage( - "Out ot Error code type range therefore should be not marked as error code.") - .isFalse(); - }); - } - - @ParameterizedTest - @ValueSource(strings = {"X", "XXX", " X X", "1X1", "XX1", "XX1XX", "XX1 XX"}) - public void shouldThrowOnInvalidCodeRange(String listCode) { - - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, listCode); - - ComposeHttpStatusCodeCheckerConfig checkerConfig = prepareCheckerConfig(properties); - - assertThrows( - Exception.class, - () -> new ComposeHttpStatusCodeChecker(checkerConfig) - ); - } - - private static Properties prepareErrorCodeProperties(String errorCodeList, String whiteList) { - Properties properties = new Properties(); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_WHITE_LIST, - whiteList - ); - properties.setProperty( - HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST, - errorCodeList - ); - return properties; - } - - private ComposeHttpStatusCodeCheckerConfig prepareCheckerConfig(Properties properties) { - return ComposeHttpStatusCodeCheckerConfig.builder() - .properties(properties) - .whiteListPrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODE_WHITE_LIST) - .errorCodePrefix(HttpConnectorConfigConstants.HTTP_ERROR_SINK_CODES_LIST) - .build(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactoryTest.java deleted file mode 100644 index 82811004..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceFactoryTest.java +++ /dev/null @@ -1,110 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.*; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.api.TableConfig; -import org.apache.flink.table.api.ValidationException; -import org.apache.flink.table.catalog.Column; -import org.apache.flink.table.catalog.ResolvedSchema; -import org.apache.flink.table.catalog.UniqueConstraint; -import org.apache.flink.table.connector.source.DynamicTableSource; -import org.junit.jupiter.api.Test; -import static org.apache.flink.table.factories.utils.FactoryMocks.createTableSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatExceptionOfType; -import static org.junit.jupiter.api.Assertions.assertFalse; - -public class HttpLookupTableSourceFactoryTest { - - private static final ResolvedSchema SCHEMA = - new ResolvedSchema( - Arrays.asList( - Column.physical("id", DataTypes.STRING().notNull()), - Column.physical("msg", DataTypes.STRING().notNull()), - Column.physical("uuid", DataTypes.STRING().notNull()), - Column.physical("details", DataTypes.ROW( - DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), - DataTypes.FIELD("nestedDetails", DataTypes.ROW( - DataTypes.FIELD("balance", DataTypes.STRING()) - ) - ) - ).notNull()) - ), - Collections.emptyList(), - UniqueConstraint.primaryKey("id", List.of("id")) - ); - - @Test - void validateHttpLookupSourceOptions() { - - HttpLookupTableSourceFactory httpLookupTableSourceFactory - = new HttpLookupTableSourceFactory(); - TableConfig tableConfig = new TableConfig(); - httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); - tableConfig.set(HttpLookupConnectorOptions - .SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), "aaa"); - - try { - httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); - assertFalse(true, "Expected an error."); - } catch (IllegalArgumentException e) { - // expected - } - // should now work. - tableConfig.set(HttpLookupConnectorOptions - .SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key(), "bbb"); - - httpLookupTableSourceFactory.validateHttpLookupSourceOptions(tableConfig); - } - - @Test - void shouldCreateForMandatoryFields() { - Map options = getMandatoryOptions(); - DynamicTableSource source = createTableSource(SCHEMA, options); - assertThat(source).isNotNull(); - assertThat(source).isInstanceOf(HttpLookupTableSource.class); - } - - @Test - void shouldThrowIfMissingUrl() { - Map options = Collections.singletonMap("connector", "rest-lookup"); - assertThatExceptionOfType(ValidationException.class) - .isThrownBy(() -> createTableSource(SCHEMA, options)); - } - - @Test - void shouldAcceptWithUrlArgs() { - Map options = getOptions(Map.of("url-args", "id;msg")); - DynamicTableSource source = createTableSource(SCHEMA, options); - assertThat(source).isNotNull(); - assertThat(source).isInstanceOf(HttpLookupTableSource.class); - } - - @Test - void shouldHandleEmptyUrlArgs() { - Map options = getOptions(Collections.emptyMap()); - DynamicTableSource source = createTableSource(SCHEMA, options); - assertThat(source).isNotNull(); - assertThat(source).isInstanceOf(HttpLookupTableSource.class); - } - - private Map getMandatoryOptions() { - return Map.of( - "connector", "rest-lookup", - "url", "http://localhost:8080/service", - "format", "json"); - } - - private Map getOptions(Map optionalOptions) { - if (optionalOptions.isEmpty()) { - return getMandatoryOptions(); - } - - Map allOptions = new HashMap<>(); - allOptions.putAll(getMandatoryOptions()); - allOptions.putAll(optionalOptions); - - return allOptions; - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceITCaseTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceITCaseTest.java deleted file mode 100644 index aa099087..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceITCaseTest.java +++ /dev/null @@ -1,1071 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.File; -import java.util.Collection; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.client.MappingBuilder; -import com.github.tomakehurst.wiremock.core.WireMockConfiguration; -import com.github.tomakehurst.wiremock.matching.StringValuePattern; -import com.github.tomakehurst.wiremock.stubbing.Scenario; -import com.github.tomakehurst.wiremock.stubbing.StubMapping; -import lombok.extern.slf4j.Slf4j; -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.ExecutionOptions; -import org.apache.flink.streaming.api.CheckpointingMode; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.TableResult; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.table.connector.source.lookup.cache.LookupCache; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.binary.BinaryStringData; -import org.apache.flink.table.runtime.functions.table.lookup.LookupCacheManager; -import org.apache.flink.table.test.lookup.cache.LookupCacheAssert; -import org.apache.flink.types.Row; -import org.apache.flink.util.CloseableIterator; -import org.apache.flink.util.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.ValueSource; -import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertThrows; - -@Slf4j -class HttpLookupTableSourceITCaseTest { - - private static final int SERVER_PORT = 9090; - - private static final int HTTPS_SERVER_PORT = 8443; - - private static final String CERTS_PATH = "src/test/resources/security/certs/"; - - private static final String SERVER_KEYSTORE_PATH = - "src/test/resources/security/certs/serverKeyStore.jks"; - - private static final String SERVER_TRUSTSTORE_PATH = - "src/test/resources/security/certs/serverTrustStore.jks"; - - private static final String ENDPOINT = "/client"; - - /** - * Comparator for Flink SQL result. - */ - private static final Comparator ROW_COMPARATOR = (row1, row2) -> { - String row1Id = (String) Objects.requireNonNull(row1.getField("id")); - String row2Id = (String) Objects.requireNonNull(row2.getField("id")); - - return row1Id.compareTo(row2Id); - }; - - private StreamTableEnvironment tEnv; - - private WireMockServer wireMockServer; - - @SuppressWarnings("unchecked") - @BeforeEach - void setup() { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - - wireMockServer = new WireMockServer( - WireMockConfiguration.wireMockConfig() - .port(SERVER_PORT) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - .extensions(JsonTransform.class) - ); - wireMockServer.start(); - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setRestartStrategy(RestartStrategies.noRestart()); - Configuration config = new Configuration(); - config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.STREAMING); - env.configure(config, getClass().getClassLoader()); - env.enableCheckpointing(1000, CheckpointingMode.EXACTLY_ONCE); - env.setParallelism(1); // wire mock server has problem with scenario state during parallel execution - - tEnv = StreamTableEnvironment.create(env); - } - - @AfterEach - void tearDown() { - wireMockServer.stop(); - } - - @ParameterizedTest - @ValueSource(strings = {"", "GET", "POST", "PUT"}) - void testHttpLookupJoin(String methodName) throws Exception { - - // GIVEN - if (StringUtils.isNullOrWhitespaceOnly(methodName) || methodName.equalsIgnoreCase("GET")) { - setupServerStub(wireMockServer); - } else { - setUpServerBodyStub( - methodName, - wireMockServer, - List.of(matchingJsonPath("$.id"), matchingJsonPath("$.id2")) - ); - } - - String lookupTable = - "CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + ((StringUtils.isNullOrWhitespaceOnly(methodName)) ? - "" : - "'lookup-method' = '" + methodName + "',") - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'," - + "'table.exec.async-lookup.buffer-capacity' = '50'," - + "'table.exec.async-lookup.timeout' = '120s'" - + ")"; - - // WHEN - SortedSet rows = testLookupJoin(lookupTable, 4); - - // THEN - assertEnrichedRows(rows); - } - - @Test - void testHttpLookupJoinNoDataFromEndpoint() { - - // GIVEN - setupServerStubEmptyResponse(wireMockServer); - - String lookupTable = - "CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - // WHEN/THEN - assertThrows(TimeoutException.class, () -> testLookupJoin(lookupTable, 4)); - } - - @Test - void testLookupWithRetry() throws Exception { - wireMockServer.stubFor(get(urlPathEqualTo(ENDPOINT)) - .inScenario("retry") - .whenScenarioStateIs(Scenario.STARTED) - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn(aResponse().withBody(new byte[0]).withStatus(501)) - .willSetStateTo("temporal_issue_gone") - ); - wireMockServer.stubFor(get(urlPathEqualTo(ENDPOINT)) - .inScenario("retry") - .whenScenarioStateIs("temporal_issue_gone") - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn(aResponse().withTransformers(JsonTransform.NAME).withStatus(200)) - ); - - var lookupTable = - "CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'url' = 'http://localhost:9090/client'," - + "'lookup.max-retries' = '3'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'gid.connector.http.source.lookup.retry-strategy.type' = 'fixed-delay'," - + "'gid.connector.http.source.lookup.retry-strategy.fixed-delay.delay' = '1ms'," - + "'gid.connector.http.source.lookup.success-codes' = '2XX'," - + "'gid.connector.http.source.lookup.retry-codes' = '501'" - + ")"; - - var result = testLookupJoin(lookupTable, 1); - - assertEquals(1, result.size()); - wireMockServer.verify(2, getRequestedFor(urlPathEqualTo(ENDPOINT))); - } - - @Test - void testLookupIgnoreResponse() throws Exception { - wireMockServer.stubFor(get(urlPathEqualTo(ENDPOINT)) - .inScenario("404_on_first") - .whenScenarioStateIs(Scenario.STARTED) - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn(aResponse().withBody(JsonTransform.NAME).withStatus(404)) - .willSetStateTo("second_request") - ); - wireMockServer.stubFor(get(urlPathEqualTo(ENDPOINT)) - .inScenario("404_on_first") - .whenScenarioStateIs("second_request") - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn(aResponse().withTransformers(JsonTransform.NAME).withStatus(200)) - ); - - var lookupTable = - "CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'gid.connector.http.source.lookup.success-codes' = '2XX,404'," - + "'gid.connector.http.source.lookup.ignored-response-codes' = '404'" - + ")"; - - var result = testLookupJoin(lookupTable, 3); - - assertEquals(2, result.size()); - wireMockServer.verify(3, getRequestedFor(urlPathEqualTo(ENDPOINT))); - } - - @Test - void testHttpsMTlsLookupJoin() throws Exception { - - // GIVEN - File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); - File clientCert = new File(CERTS_PATH + "client.crt"); - File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); - - setupServerStub(wireMockServer); - - String lookupTable = - String.format("CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'url' = 'https://localhost:" + HTTPS_SERVER_PORT + "/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'," - + "'gid.connector.http.security.cert.server' = '%s'," - + "'gid.connector.http.security.cert.client' = '%s'," - + "'gid.connector.http.security.key.client' = '%s'" - + ")", - serverTrustedCert.getAbsolutePath(), - clientCert.getAbsolutePath(), - clientPrivateKey.getAbsolutePath() - ); - - // WHEN - SortedSet rows = testLookupJoin(lookupTable, 4); - - // THEN - assertEnrichedRows(rows); - } - - @Test - void testLookupJoinProjectionPushDown() throws Exception { - - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.row.aStringColumn"), - matchingJsonPath("$.row.anIntColumn"), - matchingJsonPath("$.row.aFloatColumn") - ) - ); - - String fields = - "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + " proc_time AS PROCTIME(),\n" - + " id STRING,\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'lookup-request.format' = 'json'," - + "'lookup-request.format.json.fail-on-missing-field' = 'true'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'POST'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // WHEN - // SQL query that performs JOIN on both tables. - String joinQuery = - "CREATE TEMPORARY VIEW lookupResult AS " + - "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.`row` = c.`row`\n" - + ")"; - - tEnv.executeSql(joinQuery); - - // SQL query that performs a projection pushdown to limit the number of columns - String lastQuery = - "SELECT r.id, r.enrichedInt FROM lookupResult r;"; - - TableResult result = tEnv.executeSql(lastQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - collectedRows.stream().forEach(row -> assertThat(row.getArity()).isEqualTo(2)); - - assertThat(collectedRows.size()).isEqualTo(5); - } - - @Test - void testLookupJoinProjectionPushDownNested() throws Exception { - - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.row.aStringColumn"), - matchingJsonPath("$.row.anIntColumn"), - matchingJsonPath("$.row.aFloatColumn") - ) - ); - - String fields = - "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + " proc_time AS PROCTIME(),\n" - + " id STRING,\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'lookup-request.format' = 'json'," - + "'lookup-request.format.json.fail-on-missing-field' = 'true'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'POST'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // WHEN - // SQL query that performs JOIN on both tables. - String joinQuery = - "CREATE TEMPORARY VIEW lookupResult AS " + - "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.`row` = c.`row`\n" - + ")"; - - tEnv.executeSql(joinQuery); - - // SQL query that performs a project pushdown to take a subset of columns with nested value - String lastQuery = - "SELECT r.id, r.enrichedInt, r.`row`.aStringColumn FROM lookupResult r;"; - - TableResult result = tEnv.executeSql(lastQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - collectedRows.stream().forEach(row -> assertThat(row.getArity()).isEqualTo(3)); - - assertThat(collectedRows.size()).isEqualTo(5); - } - - @Test - void testLookupJoinOnRowType() throws Exception { - - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.row.aStringColumn"), - matchingJsonPath("$.row.anIntColumn"), - matchingJsonPath("$.row.aFloatColumn") - ) - ); - - String fields = - "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + " proc_time AS PROCTIME(),\n" - + " id STRING,\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'lookup-request.format' = 'json'," - + "'lookup-request.format.json.fail-on-missing-field' = 'true'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'POST'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // WHEN - // SQL query that performs JOIN on both tables. - String joinQuery = - "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.`row` = c.`row`\n" - + ")"; - - TableResult result = tEnv.executeSql(joinQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - // TODO add assert on values - assertThat(collectedRows.size()).isEqualTo(5); - } - - @Test - void testLookupJoinOnRowTypeAndRootColumn() throws Exception { - - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.enrichedString"), - matchingJsonPath("$.row.aStringColumn"), - matchingJsonPath("$.row.anIntColumn"), - matchingJsonPath("$.row.aFloatColumn") - ) - ); - - String fields = - "`row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + " proc_time AS PROCTIME(),\n" - + " id STRING,\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'lookup-request.format' = 'json'," - + "'lookup-request.format.json.fail-on-missing-field' = 'true'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'POST'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // WHEN - // SQL query that performs JOIN on both tables. - String joinQuery = - "SELECT o.id, o.`row`, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.id = c.enrichedString AND\n" - + " o.`row` = c.`row`\n" - + ")"; - - TableResult result = tEnv.executeSql(joinQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - // TODO add assert on values - assertThat(collectedRows.size()).isEqualTo(5); - } - - @Test - void testLookupJoinOnRowWithRowType() throws Exception { - testLookupJoinOnRowWithRowTypeImpl(); - } - - @ParameterizedTest - @CsvSource({ - "user:password, Basic dXNlcjpwYXNzd29yZA==, false", - "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, false", - "abc123, abc123, true", - "Basic dXNlcjpwYXNzd29yZA==, Basic dXNlcjpwYXNzd29yZA==, true", - "Bearer dXNlcjpwYXNzd29yZA==, Bearer dXNlcjpwYXNzd29yZA==, true" - }) - void testLookupWithUseRawAuthHeader( - String authHeaderRawValue, - String expectedAuthHeaderValue, - boolean useRawAuthHeader) throws Exception { - - // Test with gid.connector.http.source.lookup.use-raw-authorization-header set to either - // true or false, and asserting Authorization header is processed as expected, either with - // transformation for Basic Auth, or kept as-is when it is not used for Basic Auth. - testLookupJoinOnRowWithRowTypeImpl( - authHeaderRawValue, expectedAuthHeaderValue, useRawAuthHeader); - } - - private void testLookupJoinOnRowWithRowTypeImpl() throws Exception { - testLookupJoinOnRowWithRowTypeImpl(null, null, false); - } - - private void testLookupJoinOnRowWithRowTypeImpl( - String authHeaderRawValue, - String expectedAuthHeaderValue, - boolean useRawAuthHeader) throws Exception { - - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.nestedRow.aStringColumn"), - matchingJsonPath("$.nestedRow.anIntColumn"), - matchingJsonPath("$.nestedRow.aRow.anotherStringColumn"), - matchingJsonPath("$.nestedRow.aRow.anotherIntColumn") - ), - // For testing the gid.connector.http.source.lookup.use-raw-authorization-header - // configuration parameter: - expectedAuthHeaderValue != null ? "Authorization" : null, - expectedAuthHeaderValue // expected value of extra header - ); - - String fields = - " `nestedRow` ROW<" + - " `aStringColumn` STRING," + - " `anIntColumn` INT," + - " `aRow` ROW<`anotherStringColumn` STRING, `anotherIntColumn` INT>" + - " >\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + " proc_time AS PROCTIME(),\n" - + " id STRING,\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String useRawAuthHeaderString = useRawAuthHeader ? "'true'" : "'false'"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'POST'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + (authHeaderRawValue != null ? - ("'gid.connector.http.source.lookup.use-raw-authorization-header' = " - + useRawAuthHeaderString + "," - + "'gid.connector.http.source.lookup.header.Authorization' = '" - + authHeaderRawValue + "',") - : "") - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // SQL query that performs JOIN on both tables. - String joinQuery = - "SELECT o.id, o.`nestedRow`, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.`nestedRow` = c.`nestedRow`\n" - + ")"; - - TableResult result = tEnv.executeSql(joinQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - // TODO add assert on values - assertThat(collectedRows.size()).isEqualTo(5); - } - - @Test - void testNestedLookupJoinWithoutCast() throws Exception { - - // TODO ADD MORE ASSERTS - // GIVEN - setUpServerBodyStub( - "POST", - wireMockServer, - List.of( - matchingJsonPath("$.bool"), - matchingJsonPath("$.tinyint"), - matchingJsonPath("$.smallint"), - matchingJsonPath("$.map"), - matchingJsonPath("$.doubles"), - matchingJsonPath("$.multiSet"), - matchingJsonPath("$.time"), - matchingJsonPath("$.map2map") - ) - ); - - String fields = - " `bool` BOOLEAN,\n" + - " `tinyint` TINYINT,\n" + - " `smallint` SMALLINT,\n" + - " `idInt` INT,\n" + - " `bigint` BIGINT,\n" + - " `float` FLOAT,\n" + - " `name` STRING,\n" + - " `decimal` DECIMAL(9, 6),\n" + - " `doubles` ARRAY,\n" + - " `date` DATE,\n" + - " `time` TIME(0),\n" + - " `timestamp3` TIMESTAMP(3),\n" + - " `timestamp9` TIMESTAMP(9),\n" + - " `timestampWithLocalZone` TIMESTAMP_LTZ(9),\n" + - " `map` MAP,\n" + - " `multiSet` MULTISET,\n" + - " `map2map` MAP>,\n" + - " `row` ROW<`aStringColumn` STRING, `anIntColumn` INT, `aFloatColumn` FLOAT>,\n" + - " `nestedRow` ROW<" + - " `aStringColumn` STRING," + - " `anIntColumn` INT," + - " `aRow` ROW<`anotherStringColumn` STRING, `anotherIntColumn` INT>" + - " >,\n" + - " `aTable` ARRAY>\n"; - - String sourceTable = - "CREATE TABLE Orders (\n" - + "id STRING," - + " proc_time AS PROCTIME(),\n" - + fields - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '5'" - + ")"; - - String lookupTable = - "CREATE TABLE Customers (\n" + - " `enrichedInt` INT,\n" + - " `enrichedString` STRING,\n" + - " \n" - + fields - + ") WITH (" - + "'format' = 'json'," - + "'lookup-request.format' = 'json'," - + "'lookup-request.format.json.fail-on-missing-field' = 'true'," - + "'lookup-method' = 'POST'," - + "'connector' = 'rest-lookup'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + "'asyncPolling' = 'true'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // SQL query that performs JOIN on both tables. - String joinQuery = - "SELECT o.id, o.name, c.enrichedInt, c.enrichedString FROM Orders AS o" - + " JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c" - + " ON (\n" - + " o.`bool` = c.`bool` AND\n" - + " o.`tinyint` = c.`tinyint` AND\n" - + " o.`smallint` = c.`smallint` AND\n" - + " o.idInt = c.idInt AND\n" - + " o.`bigint` = c.`bigint` AND\n" - + " o.`float` = c.`float` AND\n" - + " o.name = c.name AND\n" - + " o.`decimal` = c.`decimal` AND\n" - + " o.doubles = c.doubles AND\n" - + " o.`date` = c.`date` AND\n" - + " o.`time` = c.`time` AND\n" - + " o.timestamp3 = c.timestamp3 AND\n" - + " o.timestamp9 = c.timestamp9 AND\n" - + " o.timestampWithLocalZone = c.timestampWithLocalZone AND\n" - + " o.`map` = c.`map` AND\n" - + " o.`multiSet` = c.`multiSet` AND\n" - + " o.map2map = c.map2map AND\n" - + " o.`row` = c.`row` AND\n" - + " o.nestedRow = c.nestedRow AND\n" - + " o.aTable = c.aTable\n" - + ")"; - - TableResult result = tEnv.executeSql(joinQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - SortedSet collectedRows = getCollectedRows(result); - - // TODO add assert on values - assertThat(collectedRows.size()).isEqualTo(5); - } - - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void testHttpLookupJoinWithCache(boolean isAsync) throws Exception { - // GIVEN - LookupCacheManager.keepCacheOnRelease(true); - - setupServerStub(wireMockServer); - - String lookupTable = - "CREATE TABLE Customers (" - + "id STRING," - + "id2 STRING," - + "msg STRING," - + "uuid STRING," - + "details ROW<" - + "isActive BOOLEAN," - + "nestedDetails ROW<" - + "balance STRING" - + ">" - + ">" - + ") WITH (" - + "'format' = 'json'," - + "'connector' = 'rest-lookup'," - + "'lookup-method' = 'GET'," - + "'url' = 'http://localhost:9090/client'," - + "'gid.connector.http.source.lookup.header.Content-Type' = 'application/json'," - + (isAsync ? "'asyncPolling' = 'true'," : "") - + "'lookup.cache' = 'partial'," - + "'lookup.partial-cache.max-rows' = '100'" - + ")"; - - // WHEN - SortedSet rows = testLookupJoin(lookupTable, 4); - - // THEN - try { - assertEnrichedRows(rows); - - LookupCacheAssert.assertThat(getCache()).hasSize(4) - .containsKey(GenericRowData.of( - BinaryStringData.fromString("3"), BinaryStringData.fromString("4"))) - .containsKey(GenericRowData.of( - BinaryStringData.fromString("4"), BinaryStringData.fromString("5"))) - .containsKey(GenericRowData.of( - BinaryStringData.fromString("1"), BinaryStringData.fromString("2"))) - .containsKey(GenericRowData.of( - BinaryStringData.fromString("2"), BinaryStringData.fromString("3"))); - } finally { - LookupCacheManager.getInstance().checkAllReleased(); - LookupCacheManager.getInstance().clear(); - LookupCacheManager.keepCacheOnRelease(false); - } - } - - private LookupCache getCache() { - Map managedCaches = - LookupCacheManager.getInstance().getManagedCaches(); - assertThat(managedCaches).as("There should be only 1 shared cache registered").hasSize(1); - return managedCaches.get(managedCaches.keySet().iterator().next()).getCache(); - } - - private @NotNull SortedSet testLookupJoin(String lookupTable, int maxRows) throws Exception { - - String sourceTable = - "CREATE TABLE Orders (" - + "id STRING," - + " id2 STRING," - + " proc_time AS PROCTIME()" - + ") WITH (" - + "'connector' = 'datagen'," - + "'rows-per-second' = '1'," - + "'fields.id.kind' = 'sequence'," - + "'fields.id.start' = '1'," - + "'fields.id.end' = '" + maxRows + "'," - + "'fields.id2.kind' = 'sequence'," - + "'fields.id2.start' = '2'," - + "'fields.id2.end' = '" + (maxRows + 1) + "'" - + ")"; - - tEnv.executeSql(sourceTable); - tEnv.executeSql(lookupTable); - - // WHEN - // SQL query that performs JOIN on both tables. - String joinQuery = - "SELECT o.id, o.id2, c.msg, c.uuid, c.isActive, c.balance FROM Orders AS o " - + "JOIN Customers FOR SYSTEM_TIME AS OF o.proc_time AS c " - + "ON o.id = c.id " - + "AND o.id2 = c.id2"; - - TableResult result = tEnv.executeSql(joinQuery); - result.await(15, TimeUnit.SECONDS); - - // THEN - return getCollectedRows(result); - } - - private void assertEnrichedRows(Collection collectedRows) { - // validate every row and its column. - assertAll(() -> { - assertThat(collectedRows.size()).isEqualTo(4); - int intElement = 0; - for (Row row : collectedRows) { - intElement++; - assertThat(row.getArity()).isEqualTo(6); - - // "id" nad "id2" columns should be different for every row. - assertThat(row.getField("id")).isEqualTo(String.valueOf(intElement)); - assertThat(row.getField("id2")).isEqualTo(String.valueOf(intElement + 1)); - - assertThat(row.getField("uuid")) - .isEqualTo("fbb68a46-80a9-46da-9d40-314b5287079c"); - assertThat(row.getField("isActive")).isEqualTo(true); - assertThat(row.getField("balance")).isEqualTo("$1,729.34"); - } - } - ); - } - - @NotNull - private SortedSet getCollectedRows(TableResult result) throws Exception { - - // We want to sort the result by "id" to make validation easier. - SortedSet collectedRows = new TreeSet<>(ROW_COMPARATOR); - try (CloseableIterator joinResult = result.collect()) { - while (joinResult.hasNext()) { - Row row = joinResult.next(); - log.info("Collected row " + row); - collectedRows.add(row); - } - } - return collectedRows; - } - - private void setupServerStub(WireMockServer wireMockServer) { - StubMapping stubMapping = wireMockServer.stubFor( - get(urlPathEqualTo(ENDPOINT)) - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn( - aResponse() - .withTransformers(JsonTransform.NAME) - ) - ); - - wireMockServer.addStubMapping(stubMapping); - } - - private void setupServerStubEmptyResponse(WireMockServer wireMockServer) { - StubMapping stubMapping = wireMockServer.stubFor( - get(urlPathEqualTo(ENDPOINT)) - .withHeader("Content-Type", equalTo("application/json")) - .withQueryParam("id", matching("[0-9]+")) - .withQueryParam("id2", matching("[0-9]+")) - .willReturn( - aResponse() - .withBody(new byte[0]) - ) - ); - - wireMockServer.addStubMapping(stubMapping); - } - - private void setUpServerBodyStub( - String methodName, - WireMockServer wireMockServer, - List matchingJsonPaths) { - setUpServerBodyStub(methodName, wireMockServer, matchingJsonPaths, null, null); - } - - private void setUpServerBodyStub( - String methodName, - WireMockServer wireMockServer, - List matchingJsonPaths, - String extraHeader, - String expectedExtraHeaderValue) { - - MappingBuilder methodStub = (methodName.equalsIgnoreCase("PUT") ? - put(urlEqualTo(ENDPOINT)) : - post(urlEqualTo(ENDPOINT)) - ); - - methodStub - .withHeader("Content-Type", equalTo("application/json")); - - if (extraHeader != null && expectedExtraHeaderValue != null) { - methodStub - .withHeader(extraHeader, equalTo(expectedExtraHeaderValue)); - } - - // TODO think about writing custom matcher that will check node values against regexp - // or real values. Currently we check only if JsonPath exists. Also, we should check if - // there are no extra fields. - for (StringValuePattern pattern : matchingJsonPaths) { - methodStub.withRequestBody(pattern); - } - - methodStub - .willReturn( - aResponse() - .withTransformers(JsonTransform.NAME)); - - StubMapping stubMapping = wireMockServer.stubFor(methodStub); - - wireMockServer.addStubMapping(stubMapping); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceTest.java deleted file mode 100644 index 86e81e32..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/HttpLookupTableSourceTest.java +++ /dev/null @@ -1,253 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.flink.configuration.ConfigOptions; -import org.apache.flink.metrics.groups.CacheMetricGroup; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.catalog.Column; -import org.apache.flink.table.catalog.ResolvedSchema; -import org.apache.flink.table.catalog.UniqueConstraint; -import org.apache.flink.table.connector.source.LookupTableSource; -import org.apache.flink.table.connector.source.lookup.AsyncLookupFunctionProvider; -import org.apache.flink.table.connector.source.lookup.LookupFunctionProvider; -import org.apache.flink.table.connector.source.lookup.PartialCachingAsyncLookupProvider; -import org.apache.flink.table.connector.source.lookup.PartialCachingLookupProvider; -import org.apache.flink.table.connector.source.lookup.cache.LookupCache; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext; -import org.apache.flink.table.types.DataType; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.MethodSource; -import org.testcontainers.shaded.com.google.common.collect.ImmutableList; -import static org.apache.flink.table.factories.utils.FactoryMocks.createTableSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - - -class HttpLookupTableSourceTest { - - public static final DataType PHYSICAL_ROW_DATA_TYPE = - row(List.of(DataTypes.FIELD("id", DataTypes.STRING().notNull()))); - - private static final ResolvedSchema SCHEMA = - new ResolvedSchema( - Arrays.asList( - Column.physical("id", DataTypes.STRING().notNull()), - Column.physical("msg", DataTypes.STRING().notNull()), - Column.physical("uuid", DataTypes.STRING().notNull()), - Column.physical("details", DataTypes.ROW( - DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), - DataTypes.FIELD("nestedDetails", DataTypes.ROW( - DataTypes.FIELD("balance", DataTypes.STRING()) - ) - ) - ).notNull()) - ), - Collections.emptyList(), - UniqueConstraint.primaryKey("id", List.of("id")) - ); - - // lookupKey index {{0}} means first column. - private final int[][] lookupKey = {{0}}; - - @BeforeEach - public void setUp() { - - LookupRow expectedLookupRow = new LookupRow(); - expectedLookupRow.addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "id", - RowData.createFieldGetter(DataTypes.STRING().notNull().getLogicalType(), 0) - ) - ); - expectedLookupRow.setLookupPhysicalRowDataType(PHYSICAL_ROW_DATA_TYPE); - } - - @Test - @SuppressWarnings("unchecked") - void shouldCreateTableSourceWithParams() { - HttpLookupTableSource tableSource = - (HttpLookupTableSource) createTableSource(SCHEMA, getOptions()); - - LookupTableSource.LookupRuntimeProvider lookupProvider = - tableSource.getLookupRuntimeProvider(new LookupRuntimeProviderContext(lookupKey)); - HttpTableLookupFunction tableFunction = (HttpTableLookupFunction) - ((LookupFunctionProvider) lookupProvider).createLookupFunction(); - - LookupRow actualLookupRow = tableFunction.getLookupRow(); - assertThat(actualLookupRow).isNotNull(); - assertThat(actualLookupRow.getLookupEntries()).isNotEmpty(); - assertThat(actualLookupRow.getLookupPhysicalRowDataType()) - .isEqualTo(PHYSICAL_ROW_DATA_TYPE); - - HttpLookupConfig actualLookupConfig = tableFunction.getOptions(); - assertThat(actualLookupConfig).isNotNull(); - assertThat( - actualLookupConfig.getReadableConfig().get( - ConfigOptions.key("connector").stringType().noDefaultValue()) - ) - .withFailMessage( - "Readable config probably was not passed from Table Factory or it is empty.") - .isNotNull(); - } - - @Test - @SuppressWarnings("unchecked") - void shouldCreateAsyncTableSourceWithParams() { - Map options = getOptionsWithAsync(); - - HttpLookupTableSource tableSource = - (HttpLookupTableSource) createTableSource(SCHEMA, options); - - AsyncLookupFunctionProvider lookupProvider = - (AsyncLookupFunctionProvider) - tableSource.getLookupRuntimeProvider( - new LookupRuntimeProviderContext(lookupKey)); - - AsyncHttpTableLookupFunction tableFunction = - (AsyncHttpTableLookupFunction) lookupProvider.createAsyncLookupFunction(); - - LookupRow actualLookupRow = tableFunction.getLookupRow(); - assertThat(actualLookupRow).isNotNull(); - assertThat(actualLookupRow.getLookupEntries()).isNotEmpty(); - assertThat(actualLookupRow.getLookupPhysicalRowDataType()) - .isEqualTo(PHYSICAL_ROW_DATA_TYPE); - - HttpLookupConfig actualLookupConfig = tableFunction.getOptions(); - assertThat(actualLookupConfig).isNotNull(); - assertThat(actualLookupConfig.isUseAsync()).isTrue(); - assertThat( - actualLookupConfig.getReadableConfig().get(HttpLookupConnectorOptions.ASYNC_POLLING) - ) - .withFailMessage( - "Readable config probably was not passed" + - " from Table Factory or it is empty.") - .isTrue(); - } - - @ParameterizedTest - @MethodSource("configProvider") - void testGetLookupRuntimeProvider(TestSpec testSpec) { - LookupCache cache = new LookupCache() { - @Override - public void open(CacheMetricGroup cacheMetricGroup) { - - } - - @Nullable - @Override - public Collection getIfPresent(RowData rowData) { - return null; - } - - @Override - public Collection put(RowData rowData, Collection collection) { - return null; - } - - @Override - public void invalidate(RowData rowData) { - - } - - @Override - public long size() { - return 0; - } - - @Override - public void close() throws Exception { - - } - }; - - HttpLookupConfig options = HttpLookupConfig.builder() - .useAsync(testSpec.isAsync) - .build(); - LookupTableSource.LookupRuntimeProvider lookupRuntimeProvider = - getLookupRuntimeProvider(testSpec.hasCache ? cache : null, options); - assertTrue(testSpec.expected.isInstance(lookupRuntimeProvider)); - - } - - private static class TestSpec { - - boolean hasCache; - boolean isAsync; - - Class expected; - - private TestSpec(boolean hasCache, - boolean isAsync, - Class expected) { - this.hasCache = hasCache; - this.isAsync = isAsync; - this.expected = expected; - } - - @Override - public String toString() { - return "TestSpec{" - + "hasCache=" - + hasCache - + ", isAsync=" - + isAsync - + ", expected=" - + expected - + '}'; - } - } - - static Collection configProvider() { - return ImmutableList.builder() - .addAll(getTestSpecs()) - .build(); - } - - @NotNull - private static ImmutableList getTestSpecs() { - return ImmutableList.of( - new TestSpec(false, false, LookupFunctionProvider.class), - new TestSpec(true, false, PartialCachingLookupProvider.class), - new TestSpec(false, true, AsyncLookupFunctionProvider.class), - new TestSpec(true, true, PartialCachingAsyncLookupProvider.class) - ); - } - - private static LookupTableSource.LookupRuntimeProvider - getLookupRuntimeProvider(LookupCache cache, HttpLookupConfig options) { - HttpLookupTableSource tableSource = new HttpLookupTableSource( - null, options, - null, null, cache); - int[][] lookupKeys = {{1, 2}}; - LookupTableSource.LookupContext lookupContext = - new LookupRuntimeProviderContext(lookupKeys); - return tableSource.getLookupRuntimeProvider(null, null, null); - } - - private Map getOptionsWithAsync() { - Map options = getOptions(); - options = new HashMap<>(options); - options.put("asyncPolling", "true"); - return options; - } - - private Map getOptions() { - return Map.of( - "connector", "rest-lookup", - "url", "http://localhost:8080/service", - "format", "json"); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactoryTest.java deleted file mode 100644 index eaa8ab16..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientFactoryTest.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.table.data.RowData; -import org.apache.flink.util.ConfigurationException; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; - -class JavaNetHttpPollingClientFactoryTest { - - private JavaNetHttpPollingClientFactory factory; - - @BeforeEach - public void setUp() { - factory = new JavaNetHttpPollingClientFactory(mock(GetRequestFactory.class)); - } - - @Test - @SuppressWarnings("unchecked") - void shouldCreateClient() throws ConfigurationException { - - assertThat( - factory.createPollClient( - HttpLookupConfig.builder().build(), - (DeserializationSchema) mock(DeserializationSchema.class)) - ).isInstanceOf(JavaNetHttpPollingClient.class); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java deleted file mode 100644 index 69fb7fd5..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientHttpsConnectionTest.java +++ /dev/null @@ -1,356 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.File; -import java.time.Duration; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.Properties; - -import com.github.tomakehurst.wiremock.WireMockServer; -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.formats.json.JsonFormatFactory; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.connector.source.DynamicTableSource; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.factories.DynamicTableFactory.Context; -import org.apache.flink.table.runtime.connector.source.LookupRuntimeProviderContext; -import org.apache.flink.table.types.DataType; -import org.apache.flink.util.ConfigurationException; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import org.junit.jupiter.params.provider.ValueSource; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.HttpsConnectionTestBase; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericGetQueryCreator; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import com.getindata.connectors.http.internal.utils.SerializationSchemaUtils; -import static com.getindata.connectors.http.TestHelper.readTestFile; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - -@ExtendWith(MockitoExtension.class) -public class JavaNetHttpPollingClientHttpsConnectionTest extends HttpsConnectionTestBase { - - private static final String SAMPLES_FOLDER = "/http/"; - - private static final String ENDPOINT = "/service"; - - @Mock - private Context dynamicTableFactoryContext; - - private DynamicTableSource.Context dynamicTableSourceContext; - - private JavaNetHttpPollingClientFactory pollingClientFactory; - - private RowData lookupRowData; - - private DataType lookupPhysicalDataType; - - @BeforeEach - public void setUp() { - super.setUp(); - int[][] lookupKey = {{0, 1}}; - this.dynamicTableSourceContext = new LookupRuntimeProviderContext(lookupKey); - - this.lookupRowData = GenericRowData.of( - StringData.fromString("1"), - StringData.fromString("2") - ); - - this.lookupPhysicalDataType = row(List.of( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()) - ) - ); - } - - @AfterEach - public void tearDown() { - super.tearDown(); - } - - @Test - public void testHttpsConnectionWithSelfSignedCert() throws ConfigurationException { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - - wireMockServer = new WireMockServer(options() - .httpsPort(HTTPS_SERVER_PORT) - .httpDisabled(true) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - ); - - wireMockServer.start(); - setupServerStub(); - properties.setProperty(HttpConnectorConfigConstants.ALLOW_SELF_SIGNED, "true"); - - setupAndTestConnection(); - } - - @ParameterizedTest - @ValueSource(strings = {"ca.crt", "server.crt"}) - public void testHttpsConnectionWithAddedCerts(String certName) throws ConfigurationException { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustedCert = new File(CERTS_PATH + certName); - - wireMockServer = new WireMockServer(options() - .httpsPort(HTTPS_SERVER_PORT) - .httpDisabled(true) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - ); - - wireMockServer.start(); - setupServerStub(); - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - trustedCert.getAbsolutePath() - ); - setupAndTestConnection(); - } - - @ParameterizedTest - @ValueSource(strings = {"clientPrivateKey.pem", "clientPrivateKey.der"}) - public void testMTlsConnection(String clientPrivateKeyName) throws ConfigurationException { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); - - File clientCert = new File(CERTS_PATH + "client.crt"); - File clientPrivateKey = new File(CERTS_PATH + clientPrivateKeyName); - - this.wireMockServer = new WireMockServer(options() - .httpDisabled(true) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - ); - - wireMockServer.start(); - setupServerStub(); - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_CERT, - clientCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, - clientPrivateKey.getAbsolutePath() - ); - setupAndTestConnection(); - } - - @Test - public void testMTlsConnectionUsingKeyStore() throws ConfigurationException { - String password = "password"; - - String clientKeyStoreName = "client_keyStore.p12"; - String serverKeyStoreName = "serverKeyStore.jks"; - String serverTrustStoreName = "serverTrustStore.jks"; - - File clientKeyStoreFile = new File(CERTS_PATH + clientKeyStoreName); - File serverKeyStoreFile = new File(CERTS_PATH + serverKeyStoreName); - File serverTrustStoreFile = new File(CERTS_PATH + serverTrustStoreName); - File serverTrustedCert = new File(CERTS_PATH + "ca_server_bundle.cert.pem"); - - this.wireMockServer = new WireMockServer(options() - .httpDisabled(true) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(serverKeyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(serverTrustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - ); - - wireMockServer.start(); - setupServerStub(); - properties.setProperty( - HttpConnectorConfigConstants.KEY_STORE_PASSWORD, - password - ); - properties.setProperty( - HttpConnectorConfigConstants.KEY_STORE_PATH, - clientKeyStoreFile.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - setupAndTestConnection(); - } - - private void setupAndTestConnection() throws ConfigurationException { - // test with basic auth - setupAndTestConnectionWithAuth( - HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor()); - // test with OIDC auth - setupAndTestConnectionWithAuth( - HttpHeaderUtils.createOIDCAuthorizationHeaderPreprocessor( - "http://abc", - "aaa", - Optional.of(Duration.ofSeconds(5)) - ) - ); - } - - private void setupAndTestConnectionWithAuth(HeaderPreprocessor headerPreprocessor) throws ConfigurationException { - setUpPollingClientFactory(wireMockServer.baseUrl(), - headerPreprocessor); - testPollingClientConnection(); - } - - @ParameterizedTest - @CsvSource(value = { - "invalid.crt, client.crt, clientPrivateKey.pem", - "ca.crt, invalid.crt, clientPrivateKey.pem", - "ca.crt, client.crt, invalid.pem" - }) - public void shouldThrowOnInvalidPath( - String serverCertName, - String clientCertName, - String clientKeyName) { - - File serverTrustedCert = new File(CERTS_PATH + serverCertName); - File clientCert = new File(CERTS_PATH + clientCertName); - File clientPrivateKey = new File(CERTS_PATH + clientKeyName); - - properties.setProperty( - HttpConnectorConfigConstants.SERVER_TRUSTED_CERT, - serverTrustedCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_CERT, - clientCert.getAbsolutePath() - ); - properties.setProperty( - HttpConnectorConfigConstants.CLIENT_PRIVATE_KEY, - clientPrivateKey.getAbsolutePath() - ); - - assertThrows(RuntimeException.class, () -> setUpPollingClient(properties)); - } - - private void testPollingClientConnection() throws ConfigurationException { - JavaNetHttpPollingClient pollingClient = setUpPollingClient(properties); - Collection result = pollingClient.pull(lookupRowData); - - assertResult(result); - } - - private JavaNetHttpPollingClient setUpPollingClient(Properties properties) throws ConfigurationException { - - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .url("https://localhost:" + HTTPS_SERVER_PORT + ENDPOINT) - .properties(properties) - .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) - .build(); - - DataType physicalDataType = DataTypes.ROW( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("msg", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()), - DataTypes.FIELD("details", DataTypes.ROW( - DataTypes.FIELD("isActive", DataTypes.BOOLEAN()), - DataTypes.FIELD("nestedDetails", DataTypes.ROW( - DataTypes.FIELD("balance", DataTypes.STRING()) - )) - )) - ); - - DeserializationSchema schemaDecoder = - new JsonFormatFactory() - .createDecodingFormat(dynamicTableFactoryContext, new Configuration()) - .createRuntimeDecoder(dynamicTableSourceContext, physicalDataType); - - try { - schemaDecoder.open( - SerializationSchemaUtils.createDeserializationInitContext( - JavaNetHttpPollingClientConnectionTest.class)); - } catch (Exception e) { - throw new RuntimeException("Unable to open schema decoder: " + e.getMessage(), e); - } - - return pollingClientFactory.createPollClient(lookupConfig, schemaDecoder); - } - - private void setupServerStub() { - wireMockServer.stubFor( - get(urlEqualTo("/service?id=1&uuid=2")) - .willReturn( - aResponse() - .withStatus(200) - .withBody(readTestFile(SAMPLES_FOLDER + "HttpResult.json")))); - } - - private void setUpPollingClientFactory(String baseUrl, HeaderPreprocessor headerPreprocessor) { - - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("id", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 0))) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("uuid", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 1)) - ); - lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); - - GetRequestFactory requestFactory = new GetRequestFactory( - new GenericGetQueryCreator(lookupRow), - headerPreprocessor, - HttpLookupConfig.builder() - .url(baseUrl + ENDPOINT) - .build() - ); - this.pollingClientFactory = new JavaNetHttpPollingClientFactory(requestFactory); - } - - private void assertResult(Collection results) { - assertThat(results).hasSize(1); - RowData result = results.iterator().next(); - assertThat(result.getArity()).isEqualTo(4); - assertThat(result.getString(1) - .toString()).isEqualTo("Returned HTTP message for parameter PARAM, COUNTER"); - - RowData detailsRow = result.getRow(3, 2); - assertThat(detailsRow.getBoolean(0)).isEqualTo(true); - - RowData nestedDetailsRow = detailsRow.getRow(1, 1); - assertThat(nestedDetailsRow.getString(0).toString()).isEqualTo("$1,729.34"); - } - -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientTest.java deleted file mode 100644 index 78490aad..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientTest.java +++ /dev/null @@ -1,227 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.util.*; - -import org.apache.flink.api.common.serialization.DeserializationSchema; -import org.apache.flink.api.common.serialization.SerializationSchema; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.formats.json.JsonFormatFactory; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.apache.flink.table.types.DataType; -import org.apache.flink.util.ConfigurationException; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericGetQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonQueryCreator; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.TestHelper.assertPropertyArray; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - -@ExtendWith(MockitoExtension.class) -public class JavaNetHttpPollingClientTest { - - @Mock - private HttpClient httpClient; - - @Mock - private DeserializationSchema decoder; - - @Mock - private LookupRow lookupRow; - - @Mock - private DynamicTableFactory.Context dynamicTableFactoryContext; - private HeaderPreprocessor headerPreprocessor; - - private HttpLookupConfig options; - - private static final String BASE_URL = "http://localhost.com"; - - @BeforeEach - public void setUp() { - this.headerPreprocessor = HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(); - this.options = HttpLookupConfig.builder().url(BASE_URL).build(); - } - - @Test - public void shouldBuildClientWithoutHeaders() throws ConfigurationException { - - JavaNetHttpPollingClient client = new JavaNetHttpPollingClient( - httpClient, - decoder, - options, - new GetRequestFactory( - new GenericGetQueryCreator(lookupRow), - headerPreprocessor, - options - ) - ); - - assertThat( - ((GetRequestFactory) client.getRequestFactory()).getHeadersAndValues()) - .isEmpty(); - } - - @Test - public void shouldBuildGetClientUri() throws ConfigurationException { - // GIVEN - JavaNetHttpPollingClient client = new JavaNetHttpPollingClient( - httpClient, - decoder, - options, - new GetRequestFactory( - new GenericGetQueryCreator(lookupRow), - headerPreprocessor, - options - ) - ); - - DataType lookupPhysicalDataType = row(List.of( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()) - )); - - RowData lookupRowData = GenericRowData.of( - StringData.fromString("1"), - StringData.fromString("2") - ); - - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("id", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 0))) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("uuid", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 1)) - ); - lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); - - GenericGetQueryCreator queryCreator = new GenericGetQueryCreator(lookupRow); - LookupQueryInfo lookupQueryInfo = queryCreator.createLookupQuery(lookupRowData); - - // WHEN - URI uri = ((GetRequestFactory) client.getRequestFactory()).constructGetUri(lookupQueryInfo); - - // THEN - assertThat(uri.toString()).isEqualTo(BASE_URL + "?id=1&uuid=2"); - } - - @Test - public void shouldBuildBodyBasedClientUri() { - // GIVEN - DataType lookupPhysicalDataType = row(List.of( - DataTypes.FIELD("id", DataTypes.STRING()), - DataTypes.FIELD("uuid", DataTypes.STRING()) - )); - - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("id", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 0))) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("uuid", - RowData.createFieldGetter( - DataTypes.STRING().getLogicalType(), - 1)) - ); - - lookupRow.setLookupPhysicalRowDataType(lookupPhysicalDataType); - - SerializationSchema jsonSerializer = - new JsonFormatFactory() - .createEncodingFormat(dynamicTableFactoryContext, new Configuration()) - .createRuntimeEncoder(null, lookupPhysicalDataType); - - BodyBasedRequestFactory requestFactory = new BodyBasedRequestFactory( - "POST", - new GenericJsonQueryCreator(jsonSerializer), - HttpHeaderUtils.createBasicAuthorizationHeaderPreprocessor(), - HttpLookupConfig.builder() - .url(BASE_URL) - .build() - ); - - Map urlBodyBasedQueryParameters = new LinkedHashMap<>(); - urlBodyBasedQueryParameters.put("key1", "value1"); - urlBodyBasedQueryParameters.put("key2", "value2"); - - LookupQueryInfo lookupQueryInfo = new LookupQueryInfo("{}", - urlBodyBasedQueryParameters, null); - - // WHEN - HttpRequest httpRequest = requestFactory.setUpRequestMethod(lookupQueryInfo).build(); - - // THEN - assertThat(httpRequest.uri().toString()).isEqualTo(BASE_URL + "?key1=value1&key2=value2"); - } - - @Test - public void shouldBuildClientWithHeaders() throws ConfigurationException { - - // GIVEN - Properties properties = new Properties(); - properties.setProperty("property", "val1"); - properties.setProperty("my.property", "val2"); - properties.setProperty( - HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Origin", - "https://developer.mozilla.org"); - - properties.setProperty( - HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX + "Cache-Control", - "no-cache, no-store, max-age=0, must-revalidate" - ); - properties.setProperty( - HttpConnectorConfigConstants.LOOKUP_SOURCE_HEADER_PREFIX - + "Access-Control-Allow-Origin", "*" - ); - - // WHEN - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .properties(properties) - .build(); - - JavaNetHttpPollingClient client = new JavaNetHttpPollingClient( - httpClient, - decoder, - lookupConfig, - new GetRequestFactory( - new GenericGetQueryCreator(lookupRow), - headerPreprocessor, - lookupConfig - ) - ); - - String[] headersAndValues = - ((GetRequestFactory) client.getRequestFactory()).getHeadersAndValues(); - assertThat(headersAndValues).hasSize(6); - - // THEN - // assert that we have property followed by its value. - assertPropertyArray(headersAndValues, "Origin", "https://developer.mozilla.org"); - assertPropertyArray( - headersAndValues, - "Cache-Control", "no-cache, no-store, max-age=0, must-revalidate" - ); - assertPropertyArray(headersAndValues, "Access-Control-Allow-Origin", "*"); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientWithWireTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientWithWireTest.java deleted file mode 100644 index ae4997ab..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JavaNetHttpPollingClientWithWireTest.java +++ /dev/null @@ -1,163 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.io.File; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.time.Duration; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.core.WireMockConfiguration; -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.configuration.ExecutionOptions; -import org.apache.flink.streaming.api.CheckpointingMode; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.ConfigurationException; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; -import static com.github.tomakehurst.wiremock.client.WireMock.post; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.mock; - -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import com.getindata.connectors.http.internal.utils.HttpHeaderUtils; -import static com.getindata.connectors.http.TestHelper.readTestFile; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST; - -public class JavaNetHttpPollingClientWithWireTest { - private static final String BASE_URL = "http://localhost.com"; - - private static final String SAMPLES_FOLDER = "/auth/"; - private static final int SERVER_PORT = 9090; - - private static final int HTTPS_SERVER_PORT = 8443; - - private static final String SERVER_KEYSTORE_PATH = - "src/test/resources/security/certs/serverKeyStore.jks"; - - private static final String SERVER_TRUSTSTORE_PATH = - "src/test/resources/security/certs/serverTrustStore.jks"; - - private static final String ENDPOINT = "/auth"; - private static final String BEARER_REQUEST = "Bearer Dummy"; - - private WireMockServer wireMockServer; - - @SuppressWarnings("unchecked") - @BeforeEach - public void setup() { - - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - - wireMockServer = new WireMockServer( - WireMockConfiguration.wireMockConfig() - .port(SERVER_PORT) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - .extensions(JsonTransform.class) - ); - wireMockServer.start(); - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setRestartStrategy(RestartStrategies.noRestart()); - Configuration config = new Configuration(); - config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.STREAMING); - env.configure(config, getClass().getClassLoader()); - env.enableCheckpointing(1000, CheckpointingMode.EXACTLY_ONCE); - } - - @AfterEach - public void tearDown() { - wireMockServer.stop(); - } - - - @Test - public void shouldUpdateHttpRequestIfRequiredGet() throws ConfigurationException { - HttpRequest httpRequest = HttpRequest.newBuilder() - .GET() - .uri(URI.create(BASE_URL)) - .timeout(Duration.ofSeconds(1)) - .setHeader("Origin", "*") - .setHeader("X-Content-Type-Options", "nosniff") - .setHeader("Content-Type", "application/json") - .build(); - shouldUpdateHttpRequestIfRequired(httpRequest); - } - - @Test - public void shouldUpdateHttpRequestIfRequiredPut() throws ConfigurationException { - HttpRequest httpRequest = HttpRequest.newBuilder() - .PUT(HttpRequest.BodyPublishers.ofString("foo")) - .uri(URI.create(BASE_URL)) - .timeout(Duration.ofSeconds(1)) - .setHeader("Origin", "*") - .setHeader("X-Content-Type-Options", "nosniff") - .setHeader("Content-Type", "application/json") - .build(); - shouldUpdateHttpRequestIfRequired(httpRequest); - } - - private void shouldUpdateHttpRequestIfRequired(HttpRequest httpRequest) throws ConfigurationException { - setUpServerBodyStub(); - JavaNetHttpPollingClient client = new JavaNetHttpPollingClient(mock(HttpClient.class), - null, - HttpLookupConfig.builder().url(BASE_URL).build(), - null); - LookupQueryInfo lookupQueryInfo = null; - HttpLookupSourceRequestEntry request = - new HttpLookupSourceRequestEntry(httpRequest, lookupQueryInfo); - - Configuration configuration = new Configuration(); - HeaderPreprocessor oidcHeaderPreProcessor = - HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); - HttpRequest newHttpRequest = client.updateHttpRequestIfRequired(request, - oidcHeaderPreProcessor); - assertThat(httpRequest).isEqualTo(newHttpRequest); - configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), "http://localhost:9090/auth"); - configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST, BEARER_REQUEST); - configuration.set(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, - Duration.ofSeconds(1L)); - client = new JavaNetHttpPollingClient(mock(HttpClient.class), - null, - HttpLookupConfig.builder().url(BASE_URL).readableConfig(configuration).build(), - null); - oidcHeaderPreProcessor = - HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); - // change oidcHeaderPreProcessor to use the mock http client for the authentication flow - newHttpRequest = client.updateHttpRequestIfRequired(request, - oidcHeaderPreProcessor); - assertThat(httpRequest).isNotEqualTo(newHttpRequest); - assertThat(httpRequest.headers().map().keySet().size()).isEqualTo(3); - assertThat(newHttpRequest.headers().map().keySet().size()).isEqualTo(4); - assertThat(httpRequest.headers().map().get("Content-Type")) - .isEqualTo(newHttpRequest.headers().map().get("Content-Type")); - } - - private void setUpServerBodyStub() { - wireMockServer.stubFor( - post(urlEqualTo(ENDPOINT)) - .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) - .withRequestBody(equalTo(BEARER_REQUEST)) - .willReturn( - aResponse() - .withStatus(200) - .withBody(readTestFile(SAMPLES_FOLDER + "AuthResult.json")) - ) - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JsonTransform.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/JsonTransform.java deleted file mode 100644 index 6daf5031..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/JsonTransform.java +++ /dev/null @@ -1,108 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.concurrent.atomic.AtomicInteger; - -import com.github.tomakehurst.wiremock.common.FileSource; -import com.github.tomakehurst.wiremock.extension.Parameters; -import com.github.tomakehurst.wiremock.extension.ResponseTransformer; -import com.github.tomakehurst.wiremock.http.Request; -import com.github.tomakehurst.wiremock.http.Response; - -/** - * Wiremock Extension that prepares HTTP REST endpoint response body. This extension is stateful, - * every next response will have id == counter and id2 == counter + 1 value in its response, where - * counter is incremented for every subsequent request. - */ -public class JsonTransform extends ResponseTransformer { - - public static final String NAME = "JsonTransform"; - - private static final String RESULT_JSON = - "{\n" - + "\t\"id\": \"&COUNTER&\",\n" - + "\t\"id2\": \"&COUNTER_2&\",\n" - + "\t\"uuid\": \"fbb68a46-80a9-46da-9d40-314b5287079c\",\n" - + "\t\"picture\": \"http://placehold.it/32x32\",\n" - + "\t\"msg\": \"&PARAM&, cnt: &COUNTER&\",\n" - + "\t\"age\": 30,\n" - + "\t\"eyeColor\": \"green\",\n" - + "\t\"name\": \"Marva Fischer\",\n" - + "\t\"gender\": \"female\",\n" - + "\t\"company\": \"SILODYNE\",\n" - + "\t\"email\": \"marvafischer@silodyne.com\",\n" - + "\t\"phone\": \"+1 (990) 562-2120\",\n" - + "\t\"address\": \"601 Auburn Place, Bynum, New York, 7057\",\n" - + "\t\"about\": \"Proident Lorem et duis nisi tempor elit occaecat laboris" - + " dolore magna Lorem consequat. Deserunt velit minim nisi consectetur duis " - + "amet labore cupidatat. Pariatur sunt occaecat qui reprehenderit ipsum ex culpa " - + "ullamco ex duis adipisicing commodo sunt. Ad cupidatat magna ad in officia " - + "irure aute duis culpa et. Magna esse adipisicing consequat occaecat. Excepteur amet " - + "dolore occaecat sit officia dolore elit in cupidatat non anim.\\r\\n\",\n" - + "\t\"registered\": \"2020-07-11T11:13:32 -02:00\",\n" - + "\t\"latitude\": -35.237843,\n" - + "\t\"longitude\": 60.386104,\n" - + "\t\"tags\": [\n" - + "\t\t\"officia\",\n" - + "\t\t\"eiusmod\",\n" - + "\t\t\"labore\",\n" - + "\t\t\"ex\",\n" - + "\t\t\"aliqua\",\n" - + "\t\t\"consectetur\",\n" - + "\t\t\"excepteur\"\n" - + "\t],\n" - + "\t\"friends\": [\n" - + "\t\t{\n" - + "\t\t\t\"id\": 0,\n" - + "\t\t\t\"name\": \"Kemp Newman\"\n" - + "\t\t},\n" - + "\t\t{\n" - + "\t\t\t\"id\": 1,\n" - + "\t\t\t\"name\": \"Sears Blackburn\"\n" - + "\t\t},\n" - + "\t\t{\n" - + "\t\t\t\"id\": 2,\n" - + "\t\t\t\"name\": \"Lula Rogers\"\n" - + "\t\t}\n" - + "\t],\n" - + "\t\"details\": {\n" - + "\t\t\"isActive\": true,\n" - + "\t\t\"nestedDetails\": {\n" - + "\t\t\t\"index\": 0,\n" - + "\t\t\t\"guid\": \"d81fc542-6b49-4d59-8fb9-d57430d4871d\",\n" - + "\t\t\t\"balance\": \"$1,729.34\"\n" - + "\t\t}\n" - + "\t},\n" - + "\t\"greeting\": \"Hello, Marva Fischer! You have 7 unread messages.\",\n" - + "\t\"favoriteFruit\": \"banana\"\n" - + "}"; - private final AtomicInteger counter = new AtomicInteger(0); - - @Override - public Response transform( - Request request, Response response, FileSource files, Parameters parameters) { - int cnt = counter.getAndIncrement(); - - return Response.response() - .body(generateResponse(request.getUrl(), cnt)) - .status(response.getStatus()) - .statusMessage(response.getStatusMessage()) - .build(); - } - - @Override - public String getName() { - return NAME; - } - - private String generateResponse(String param, int counter) { - return RESULT_JSON - .replaceAll("&PARAM&", param) - .replaceAll("&COUNTER&", String.valueOf(counter)) - .replaceAll("&COUNTER_2&", String.valueOf(counter + 1)); - } - - @Override - public boolean applyGlobally() { - return false; - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java deleted file mode 100644 index a30737d3..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowDataSingleValueLookupSchemaEntryTest.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.List; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.LookupArg; - -class RowDataSingleValueLookupSchemaEntryTest { - - // TODO Convert this to parametrized test and check all Flink types (Int, String etc). - @Test - public void shouldConvertFromSingleValue() { - - RowDataSingleValueLookupSchemaEntry entry = new RowDataSingleValueLookupSchemaEntry( - "col1", - RowData.createFieldGetter(DataTypes.BOOLEAN().getLogicalType(), 0) - ); - - List lookupArgs = entry.convertToLookupArg(GenericRowData.of(true)); - - assertThat(lookupArgs).containsExactly(new LookupArg("col1", "true")); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntryTest.java deleted file mode 100644 index b13712da..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/RowTypeLookupSchemaEntryTest.java +++ /dev/null @@ -1,144 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import java.util.List; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.LookupArg; - -class RowTypeLookupSchemaEntryTest { - - @Test - public void testEmptyRow() { - // GIVEN - RowTypeLookupSchemaEntry lookupSchemaEntry = new RowTypeLookupSchemaEntry( - "aRow", - RowData.createFieldGetter( - DataTypes.ROW( - DataTypes.FIELD("col1", DataTypes.STRING())) - .getLogicalType(), - 0)) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ); - - GenericRowData rowData = new GenericRowData(1); - rowData.setField(0, null); - - // WHEN - List lookupArgs = lookupSchemaEntry.convertToLookupArg(rowData); - - // THEN - assertThat(lookupArgs).isEmpty(); - } - - @Test - public void testRowWithMultipleSingleValues() { - - // GIVEN - RowTypeLookupSchemaEntry lookupSchemaEntry = new RowTypeLookupSchemaEntry( - "aRow", - RowData.createFieldGetter( - DataTypes.ROW( - DataTypes.FIELD("col1", DataTypes.STRING()), - DataTypes.FIELD("col2", DataTypes.STRING()), - DataTypes.FIELD("col3", DataTypes.STRING())) - .getLogicalType(), - 0)) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col2", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 1)) - ) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col3", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 2)) - ); - - GenericRowData rowData = - GenericRowData.of( - GenericRowData.of( - StringData.fromString("val1"), - StringData.fromString("val2"), - StringData.fromString("val3") - ) - ); - - // WHEN - List lookupArgs = lookupSchemaEntry.convertToLookupArg(rowData); - - // THEN - assertThat(lookupArgs).containsExactly( - new LookupArg("col1", "val1"), - new LookupArg("col2", "val2"), - new LookupArg("col3", "val3") - ); - } - - @Test - public void testRowWithNestedRowValues() { - - // GIVEN - RowTypeLookupSchemaEntry nestedRowLookupSchemaEntry = new RowTypeLookupSchemaEntry( - "aRow", - RowData.createFieldGetter( - DataTypes.FIELD("nestedRow", DataTypes.ROW( - DataTypes.FIELD("col1", DataTypes.STRING()), - DataTypes.FIELD("col2", DataTypes.STRING()) - ) - ).getDataType().getLogicalType(), - 0 - )) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col2", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 1)) - ); - - RowTypeLookupSchemaEntry rootSchemaEntry = new RowTypeLookupSchemaEntry( - "aRow", - RowData.createFieldGetter( - DataTypes.ROW( - DataTypes.ROW( - DataTypes.FIELD("nestedRow", DataTypes.ROW( - DataTypes.FIELD("col1", DataTypes.STRING()), - DataTypes.FIELD("col2", DataTypes.STRING()) - )) - ), - DataTypes.FIELD("col3", DataTypes.STRING()).getDataType() - ) - .getLogicalType(), - 0)).addLookupEntry(nestedRowLookupSchemaEntry); - - GenericRowData rowData = - GenericRowData.of( - GenericRowData.of( - GenericRowData.of( - StringData.fromString("val1"), - StringData.fromString("val2") - ) - ) - ); - - // WHEN - List lookupArgs = rootSchemaEntry.convertToLookupArg(rowData); - - // THEN - assertThat(lookupArgs).containsExactly( - new LookupArg("col1", "val1"), - new LookupArg("col2", "val2") - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelperTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelperTest.java deleted file mode 100644 index 186ceb12..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/TableSourceHelperTest.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup; - -import org.apache.flink.table.types.DataType; -import org.apache.flink.table.types.logical.LogicalType; -import org.apache.flink.table.types.logical.LogicalTypeRoot; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; -import static org.assertj.core.api.Assertions.assertThat; -import static org.mockito.Mockito.when; - -@ExtendWith(MockitoExtension.class) -class TableSourceHelperTest { - - @Mock - private DataType dataType; - - @Mock - private LogicalType logicalType; - - @BeforeEach - public void setUp() { - when(dataType.getLogicalType()).thenReturn(logicalType); - } - - @Test - void testNotComposite() { - when(logicalType.getTypeRoot()).thenReturn(LogicalTypeRoot.BIGINT); - - assertThat(TableSourceHelper.getFieldNames(dataType.getLogicalType())).isEmpty(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java deleted file mode 100644 index 9331ef5c..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/ElasticSearchLiteQueryCreatorTest.java +++ /dev/null @@ -1,139 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.math.BigDecimal; -import java.util.List; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.DecimalData; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.types.DataType; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.table.lookup.RowDataSingleValueLookupSchemaEntry; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - -public class ElasticSearchLiteQueryCreatorTest { - - - @Test - public void testWithEmptyLookupResult() { - - // GIVEN - LookupRow lookupRow = new LookupRow(); - lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); - - GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); - - // WHEN - var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo(""); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testQueryCreationForSingleQueryStringParam() { - - // GIVEN - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ); - lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); - - GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); - - // WHEN - var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("q=key1:%22val1%22"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testQueryCreationForSingleQueryIntParam() { - - // GIVEN - BigDecimal decimalValue = BigDecimal.valueOf(10); - DataType decimalValueType = DataTypes.DECIMAL( - decimalValue.precision(), - decimalValue.scale() - ); - - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("key1", - RowData.createFieldGetter( - decimalValueType.getLogicalType(), - 0) - ) - ); - lookupRow.setLookupPhysicalRowDataType(decimalValueType); - - GenericRowData lookupDataRow = GenericRowData.of( - DecimalData.fromBigDecimal(decimalValue, decimalValue.precision(), - decimalValue.scale())); - - // WHEN - var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("q=key1:%2210%22"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testGenericGetQueryCreationForMultipleQueryParam() { - - // GIVEN - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0) - )) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key2", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 1) - )) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key3", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 2) - )); - - lookupRow.setLookupPhysicalRowDataType( - row(List.of( - DataTypes.FIELD("key1", DataTypes.STRING()), - DataTypes.FIELD("key2", DataTypes.STRING()), - DataTypes.FIELD("key3", DataTypes.STRING()) - ))); - - GenericRowData lookupDataRow = GenericRowData.of( - StringData.fromString("val1"), - StringData.fromString("val2"), - StringData.fromString("3") - ); - - // WHEN - var queryCreator = new ElasticSearchLiteQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()) - .isEqualTo("q=key1:%22val1%22%20AND%20key2:%22val2%22%20AND%20key3:%223%22"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()) - .isEmpty(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorTest.java deleted file mode 100644 index b12fb9f5..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericGetQueryCreatorTest.java +++ /dev/null @@ -1,181 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.math.BigDecimal; -import java.util.List; - -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.DecimalData; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.types.DataType; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.table.lookup.RowDataSingleValueLookupSchemaEntry; -import com.getindata.connectors.http.internal.table.lookup.RowTypeLookupSchemaEntry; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - -public class GenericGetQueryCreatorTest { - - @Test - public void testQueryCreationForSingleQueryStringParam() { - - // GIVEN - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ); - lookupRow.setLookupPhysicalRowDataType(DataTypes.STRING()); - - GenericRowData lookupDataRow = GenericRowData.of(StringData.fromString("val1")); - - // WHEN - var queryCreator = new GenericGetQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=val1"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testQueryCreationForSingleQueryIntParam() { - - // GIVEN - BigDecimal decimalValue = BigDecimal.valueOf(10); - DataType decimalValueType = DataTypes.DECIMAL( - decimalValue.precision(), - decimalValue.scale() - ); - - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry("key1", - RowData.createFieldGetter( - decimalValueType.getLogicalType(), - 0) - ) - ); - lookupRow.setLookupPhysicalRowDataType(decimalValueType); - - GenericRowData lookupDataRow = GenericRowData.of( - DecimalData.fromBigDecimal(decimalValue, decimalValue.precision(), - decimalValue.scale())); - - // WHEN - var queryCreator = new GenericGetQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=10"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testQueryCreationForMultipleQueryParam() { - - // GIVEN - LookupRow lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0) - )) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key2", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 1) - )) - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key3", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 2) - )); - - lookupRow.setLookupPhysicalRowDataType( - row(List.of( - DataTypes.FIELD("key1", DataTypes.STRING()), - DataTypes.FIELD("key2", DataTypes.STRING()), - DataTypes.FIELD("key3", DataTypes.STRING()) - ))); - - GenericRowData lookupDataRow = GenericRowData.of( - StringData.fromString("val1"), - StringData.fromString("val2"), - StringData.fromString("3") - ); - - // WHEN - var queryCreator = new GenericGetQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("key1=val1&key2=val2&key3=3"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } - - @Test - public void testQueryCreationForRowType() { - - // GIVEN - LookupRow lookupRow = new LookupRow() - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0) - )) - .addLookupEntry( - new RowTypeLookupSchemaEntry( - "aRow", - RowData.createFieldGetter( - DataTypes.FIELD("aRow", DataTypes.ROW( - DataTypes.FIELD("col2", DataTypes.STRING()), - DataTypes.FIELD("col3", DataTypes.STRING()) - ) - ).getDataType().getLogicalType(), - 1 - )) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col2", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0)) - ) - .addLookupEntry(new RowDataSingleValueLookupSchemaEntry( - "col3", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 1)) - ) - ); - - // ROW> - lookupRow.setLookupPhysicalRowDataType( - row(List.of( - DataTypes.FIELD("col1", DataTypes.STRING()), - DataTypes.FIELD( - "aRow", - DataTypes.ROW( - DataTypes.FIELD("col2", DataTypes.STRING()), - DataTypes.FIELD("col3", DataTypes.STRING()) - ) - ) - ) - ) - ); - - GenericRowData lookupDataRow = GenericRowData.of( - StringData.fromString("val1"), - GenericRowData.of( - StringData.fromString("val2"), - StringData.fromString("val3") - ) - ); - - // WHEN - var queryCreator = new GenericGetQueryCreator(lookupRow); - var createdQuery = queryCreator.createLookupQuery(lookupDataRow); - - // THEN - assertThat(createdQuery.getLookupQuery()).isEqualTo("col1=val1&col2=val2&col3=val3"); - assertThat(createdQuery.getBodyBasedUrlQueryParameters()).isEmpty(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java deleted file mode 100644 index 83fa3826..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonAndUrlQueryCreatorFactoryTest.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * © Copyright IBM Corp. 2025 - */ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.List; - -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.catalog.*; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertThrows; - -import com.getindata.connectors.http.LookupQueryCreator; -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions; -import com.getindata.connectors.http.internal.table.lookup.LookupQueryInfo; -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.table.lookup.RowDataSingleValueLookupSchemaEntry; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; -import static com.getindata.connectors.http.internal.table.lookup.querycreators.GenericJsonAndUrlQueryCreatorFactory.*; -import static com.getindata.connectors.http.internal.table.lookup.querycreators.QueryCreatorUtils.getTableContext; - -class GenericJsonAndUrlQueryCreatorFactoryTest -{ - private Configuration config = new Configuration(); - - private DynamicTableFactory.Context tableContext; - - @BeforeEach - public void setUp() { - CustomJsonFormatFactory.requiredOptionsWereUsed = false; - this.tableContext = getTableContext(this.config, ResolvedSchema.of(Column.physical("key1", - DataTypes.STRING()))); - } - - @Test - public void lookupQueryInfoTestStr() { - assertThat(CustomJsonFormatFactory.requiredOptionsWereUsed) - .withFailMessage( - "CustomJsonFormat was not cleared, " - + "make sure `CustomJsonFormatFactory.requiredOptionsWereUsed" - + "= false` " - + "was called before this test execution.") - .isFalse(); - - this.config.setString("lookup-request.format", CustomJsonFormatFactory.IDENTIFIER); - this.config.setString( - String.format("lookup-request.format.%s.%s", CustomJsonFormatFactory.IDENTIFIER, - CustomJsonFormatFactory.REQUIRED_OPTION), "optionValue"); - this.config.set(REQUEST_QUERY_PARAM_FIELDS, List.of("key1")); - // with sync - createUsingFactory(false); - // with async - createUsingFactory(true); - } - - @Test - public void lookupQueryInfoTestRequiredConfig() { - GenericJsonAndUrlQueryCreatorFactory genericJsonAndUrlQueryCreatorFactory = - new GenericJsonAndUrlQueryCreatorFactory(); - assertThrows(RuntimeException.class, () -> { - genericJsonAndUrlQueryCreatorFactory.createLookupQueryCreator(config, - null, - null); - }); - // do not specify REQUEST_ARG_PATHS_CONFIG - assertThrows(RuntimeException.class, () -> { - genericJsonAndUrlQueryCreatorFactory.createLookupQueryCreator(config, - null, - null); - }); - } - - private void createUsingFactory(boolean async) { - this.config.setBoolean(HttpLookupConnectorOptions.ASYNC_POLLING, async); - LookupRow lookupRow= new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0) - )); - - lookupRow.setLookupPhysicalRowDataType( - row(List.of( - DataTypes.FIELD("key1", DataTypes.STRING()) - ))); - LookupQueryCreator lookupQueryCreator = new - GenericJsonAndUrlQueryCreatorFactory().createLookupQueryCreator( - config, - lookupRow, - tableContext - ); - GenericRowData lookupRowData = GenericRowData.of( - StringData.fromString("val1") - ); - - LookupQueryInfo lookupQueryInfo = lookupQueryCreator.createLookupQuery(lookupRowData); - assertThat(CustomJsonFormatFactory.requiredOptionsWereUsed).isTrue(); - assertThat(lookupQueryInfo.hasLookupQuery()).isTrue(); - assertThat(lookupQueryInfo.hasBodyBasedUrlQueryParameters()).isFalse(); - assertThat(lookupQueryInfo.hasPathBasedUrlParameters()).isFalse(); - } - @Test - void optionsTests() { - GenericJsonAndUrlQueryCreatorFactory factory = new GenericJsonAndUrlQueryCreatorFactory(); - assertThat(factory.requiredOptions()).isEmpty(); - assertThat(factory.optionalOptions()).contains(REQUEST_QUERY_PARAM_FIELDS); - assertThat(factory.optionalOptions()).contains(REQUEST_BODY_FIELDS); - assertThat(factory.optionalOptions()).contains(REQUEST_URL_MAP); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java deleted file mode 100644 index ce3c87de..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/GenericJsonQueryCreatorFactoryTest.java +++ /dev/null @@ -1,88 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Collections; -import java.util.List; - -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.api.Schema; -import org.apache.flink.table.catalog.*; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.factories.DynamicTableFactory; -import org.apache.flink.table.factories.FactoryUtil; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.table.lookup.LookupRow; -import com.getindata.connectors.http.internal.table.lookup.RowDataSingleValueLookupSchemaEntry; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupTableSourceFactory.row; - -class GenericJsonQueryCreatorFactoryTest { - - private Configuration config; - private LookupRow lookupRow; - - private DynamicTableFactory.Context tableContext; - - @BeforeEach - public void setUp() { - this.config = new Configuration(); - this.lookupRow = new LookupRow(); - lookupRow = new LookupRow() - .addLookupEntry( - new RowDataSingleValueLookupSchemaEntry( - "key1", - RowData.createFieldGetter(DataTypes.STRING().getLogicalType(), 0) - )); - - lookupRow.setLookupPhysicalRowDataType( - row(List.of( - DataTypes.FIELD("key1", DataTypes.STRING()) - ))); - - CustomFormatFactory.requiredOptionsWereUsed = false; - - ResolvedSchema resolvedSchema = - ResolvedSchema.of(Column.physical("key1", DataTypes.STRING())); - - this.tableContext = new FactoryUtil.DefaultDynamicTableContext( - ObjectIdentifier.of("default", "default", "test"), - new ResolvedCatalogTable( - CatalogTable.of( - Schema.newBuilder().fromResolvedSchema(resolvedSchema).build(), - null, - Collections.emptyList(), - Collections.emptyMap()), - resolvedSchema), - Collections.emptyMap(), - config, - Thread.currentThread().getContextClassLoader(), - false - ); - - } - - @Test - public void shouldPassPropertiesToQueryCreatorFormat() { - assertThat(CustomFormatFactory.requiredOptionsWereUsed) - .withFailMessage( - "CustomFormatFactory was not cleared, " - + "make sure `CustomFormatFactory.requiredOptionsWereUsed = false` " - + "was called before this test execution.") - .isFalse(); - - this.config.setString("lookup-request.format", CustomFormatFactory.IDENTIFIER); - this.config.setString( - String.format("lookup-request.format.%s.%s", CustomFormatFactory.IDENTIFIER, - CustomFormatFactory.REQUIRED_OPTION), "optionValue"); - - new GenericJsonQueryCreatorFactory().createLookupQueryCreator( - config, - lookupRow, - tableContext - ); - - assertThat(CustomFormatFactory.requiredOptionsWereUsed).isTrue(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PathBean.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PathBean.java deleted file mode 100644 index 40103ae2..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PathBean.java +++ /dev/null @@ -1,11 +0,0 @@ -/* - * © Copyright IBM Corp. 2025 - */ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import lombok.Data; - -@Data -public class PathBean { - private String key1; -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PersonBean.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PersonBean.java deleted file mode 100644 index 5ca4a0e0..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/PersonBean.java +++ /dev/null @@ -1,12 +0,0 @@ -/* - * © Copyright IBM Corp. 2025 - */ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import lombok.Data; - -@Data -public class PersonBean { - private final String firstName; - private final String lastName; -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java b/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java deleted file mode 100644 index 9daaecb5..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/lookup/querycreators/QueryFormatAwareConfigurationTest.java +++ /dev/null @@ -1,38 +0,0 @@ -package com.getindata.connectors.http.internal.table.lookup.querycreators; - -import java.util.Collections; -import java.util.Optional; - -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; -import org.apache.flink.configuration.Configuration; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -class QueryFormatAwareConfigurationTest { - - private static final ConfigOption configOption = ConfigOptions.key("key") - .stringType() - .noDefaultValue(); - - @Test - public void testWithDot() { - QueryFormatAwareConfiguration queryConfig = new QueryFormatAwareConfiguration( - "prefix.", Configuration.fromMap(Collections.singletonMap("prefix.key", "val")) - ); - - Optional optional = queryConfig.getOptional(configOption); - assertThat(optional.get()).isEqualTo("val"); - } - - @Test - public void testWithoutDot() { - QueryFormatAwareConfiguration queryConfig = new QueryFormatAwareConfiguration( - "prefix", Configuration.fromMap(Collections.singletonMap("prefix.key", "val")) - ); - - Optional optional = queryConfig.getOptional(configOption); - assertThat(optional.get()).isEqualTo("val"); - } - -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/sink/BatchRequestHttpDynamicSinkInsertTest.java b/src/test/java/com/getindata/connectors/http/internal/table/sink/BatchRequestHttpDynamicSinkInsertTest.java deleted file mode 100644 index cdf21cbf..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/sink/BatchRequestHttpDynamicSinkInsertTest.java +++ /dev/null @@ -1,341 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import java.io.File; -import java.util.Arrays; -import java.util.concurrent.ExecutionException; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.http.RequestMethod; -import com.github.tomakehurst.wiremock.verification.LoggedRequest; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import static com.getindata.connectors.http.TestHelper.readTestFile; - -public class BatchRequestHttpDynamicSinkInsertTest { - - private static final int SERVER_PORT = 9090; - - private static final int HTTPS_SERVER_PORT = 8443; - - private static final String CERTS_PATH = "src/test/resources/security/certs/"; - - private static final String SERVER_KEYSTORE_PATH = - "src/test/resources/security/certs/serverKeyStore.jks"; - - private static final String SERVER_TRUSTSTORE_PATH = - "src/test/resources/security/certs/serverTrustStore.jks"; - - protected StreamExecutionEnvironment env; - - protected StreamTableEnvironment tEnv; - - private WireMockServer wireMockServer; - - @BeforeEach - public void setup() { - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - - this.wireMockServer = new WireMockServer(options() - .port(SERVER_PORT) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - ); - - wireMockServer.start(); - - env = StreamExecutionEnvironment.getExecutionEnvironment(); - tEnv = StreamTableEnvironment.create(env); - } - - @AfterEach - public void tearDown() { - wireMockServer.stop(); - } - - private static Stream requestBatch() { - return Stream.of( - Arguments.of(50, "allInOneBatch.txt"), - Arguments.of(5, "allInOneBatch.txt"), - Arguments.of(3, "twoBatches.txt"), - Arguments.of(2, "threeBatches.txt"), - Arguments.of(1, "fourSingleEventBatches.txt") - ); - } - - @ParameterizedTest - @MethodSource("requestBatch") - public void testHttpDynamicSinkDefaultPost(int requestBatchSize, String expectedRequests) - throws Exception { - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.request.batch.size' = '%s',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - requestBatchSize, - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES\n" - + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" - + " (2, 'Rob', 'Zombie', 'Male', 'DGICA', 'GBP', TIMESTAMP '2021-10-25 20:53:54'), \n" - + " (3, 'Adam', 'Jones', 'Male', 'DGICA', 'PLN', TIMESTAMP '2021-10-26 20:53:54'), \n" - + " (4, 'Danny', 'Carey', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-27 20:53:54'), \n" - + " (5, 'Bob', 'Dylan', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-28 20:53:54')"; - tEnv.executeSql(insert).await(); - - verifyRequests(expectedRequests); - } - - @ParameterizedTest - @MethodSource("requestBatch") - public void testHttpDynamicSinkPut(int requestBatchSize, String expectedRequests) - throws Exception { - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'insert-method' = 'PUT',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.request.batch.size' = '%s',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - requestBatchSize, - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES\n" - + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" - + " (2, 'Rob', 'Zombie', 'Male', 'DGICA', 'GBP', TIMESTAMP '2021-10-25 20:53:54'), \n" - + " (3, 'Adam', 'Jones', 'Male', 'DGICA', 'PLN', TIMESTAMP '2021-10-26 20:53:54'), \n" - + " (4, 'Danny', 'Carey', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-27 20:53:54'), \n" - + " (5, 'Bob', 'Dylan', 'Male', 'DGICA', 'USD', TIMESTAMP '2021-10-28 20:53:54')"; - tEnv.executeSql(insert).await(); - - verifyRequests(expectedRequests); - } - - private void verifyRequests(String expectedResponse) { - ObjectMapper mapper = new ObjectMapper(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))) - .stream() - .map(LoggedRequest::getBodyAsString) - .map(content -> getJsonSipleString(mapper, content)) - .collect(Collectors.toList()); - - var expectedResponses = - Arrays.stream(readTestFile("/json/sink/" + expectedResponse).split("#-----#")) - .map(content -> getJsonSipleString(mapper, content)).collect(Collectors.toList()); - - // TODO this ideally should use containsExactlyElementsOf however Wiremock uses multiple - // threads to add events to its internal journal which can brea the order of - // received events. Probably use WireMock Scenarios feature can help here and allow to - // verify the order. Or maybe there is some other solution for that. - assertThat(postedRequests).containsExactlyInAnyOrderElementsOf(expectedResponses); - } - - private static String getJsonSipleString(ObjectMapper mapper, String content) { - try { - return mapper.readTree(content).toString(); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - - @Test - public void testHttpDynamicSinkRawFormat() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/octet-stream"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " last_name string" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'raw',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http VALUES ('Clee'), ('John')"; - tEnv.executeSql(insert).await(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals("[Clee,John]", request.getBodyAsString()); - assertEquals(RequestMethod.POST, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - } - - @Test - public void testHttpRequestWithHeadersFromDdl() - throws ExecutionException, InterruptedException { - String originHeaderValue = "*"; - String xContentTypeOptionsHeaderValue = "nosniff"; - String contentTypeHeaderValue = "application/json"; - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " last_name string" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'raw',\n" - + " 'gid.connector.http.sink.header.Origin' = '%s',\n" - + " 'gid.connector.http.sink.header.X-Content-Type-Options' = '%s',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - originHeaderValue, - xContentTypeOptionsHeaderValue, - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http VALUES ('Clee'), ('John')"; - tEnv.executeSql(insert).await(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals("[Clee,John]", request.getBodyAsString()); - assertEquals(RequestMethod.POST, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - assertEquals(originHeaderValue, request.getHeader("Origin")); - assertEquals(xContentTypeOptionsHeaderValue, request.getHeader("X-Content-Type-Options")); - } - - @Test - public void testHttpsWithMTls() throws Exception { - - File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); - - File clientCert = new File(CERTS_PATH + "client.crt"); - File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s',\n" - + " 'gid.connector.http.security.cert.server' = '%s',\n" - + " 'gid.connector.http.security.cert.client' = '%s',\n" - + " 'gid.connector.http.security.key.client' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "https://localhost:" + HTTPS_SERVER_PORT + "/myendpoint", - contentTypeHeaderValue, - serverTrustedCert.getAbsolutePath(), - clientCert.getAbsolutePath(), - clientPrivateKey.getAbsolutePath() - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " - + "TIMESTAMP '2021-08-24 15:22:59')"; - tEnv.executeSql(insert).await(); - - var postedRequests = - wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals( - "[{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," - + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," - + "\"tx_date\":\"2021-08-24 15:22:59\"}]", - request.getBodyAsString() - ); - - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkTest.java b/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkTest.java deleted file mode 100644 index faf0f0e8..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicSinkTest.java +++ /dev/null @@ -1,109 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import org.apache.flink.configuration.Configuration; -import org.apache.flink.table.connector.ChangelogMode; -import org.apache.flink.table.factories.FactoryUtil; -import org.apache.flink.table.factories.TestFormatFactory; -import org.apache.flink.table.types.AtomicDataType; -import org.apache.flink.table.types.logical.BooleanType; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotEquals; - -import com.getindata.connectors.http.internal.table.sink.HttpDynamicSink.HttpDynamicTableSinkBuilder; -import static com.getindata.connectors.http.internal.table.sink.HttpDynamicSinkConnectorOptions.INSERT_METHOD; -import static com.getindata.connectors.http.internal.table.sink.HttpDynamicSinkConnectorOptions.URL; - -public class HttpDynamicSinkTest { - - @Test - public void testAsSummaryString() { - var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); - - HttpDynamicSink dynamicSink = new HttpDynamicTableSinkBuilder() - .setTableOptions(new Configuration()) - .setConsumedDataType( - new AtomicDataType(new BooleanType(false))) - .setEncodingFormat(mockFormat) - .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) - .build(); - - assertThat(dynamicSink.asSummaryString()).isEqualTo("HttpSink"); - } - - @Test - public void copyEqualityTest() { - var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); - var sink = new HttpDynamicSink - .HttpDynamicTableSinkBuilder() - .setTableOptions( - new Configuration() { - { - this.set(URL, "localhost:8123"); - this.set(INSERT_METHOD, "POST"); - this.set(FactoryUtil.FORMAT, "json"); - } - } - ) - .setConsumedDataType( - new AtomicDataType(new BooleanType(false))) - .setEncodingFormat(mockFormat) - .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) - .build(); - - assertEquals(sink, sink.copy()); - assertEquals(sink.hashCode(), sink.copy().hashCode()); - } - - private HttpDynamicSink.HttpDynamicTableSinkBuilder getSinkBuilder() { - var mockFormat = new TestFormatFactory.EncodingFormatMock(",", ChangelogMode.insertOnly()); - var consumedDataType = new AtomicDataType(new BooleanType(false)); - - return new HttpDynamicSink.HttpDynamicTableSinkBuilder() - .setTableOptions( - new Configuration() { - { - this.set(URL, "localhost:8123"); - this.set(INSERT_METHOD, "POST"); - this.set(FactoryUtil.FORMAT, "json"); - } - } - ) - .setConsumedDataType(consumedDataType) - .setEncodingFormat(mockFormat) - .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()) - .setMaxBatchSize(1); - } - - @Test - public void nonEqualsTest() { - var sink = getSinkBuilder().build(); - var sinkBatchSize = getSinkBuilder().setMaxBatchSize(10).build(); - var sinkSinkConfig = getSinkBuilder().setTableOptions( - new Configuration() { - { - this.set(URL, "localhost:8124"); - this.set(INSERT_METHOD, "POST"); - this.set(FactoryUtil.FORMAT, "json"); - } - } - ).build(); - var sinkDataType = - getSinkBuilder().setConsumedDataType(new AtomicDataType(new BooleanType(true))).build(); - var sinkFormat = getSinkBuilder().setEncodingFormat( - new TestFormatFactory.EncodingFormatMock(";", ChangelogMode.all())).build(); - var sinkHttpPostRequestCallback = - getSinkBuilder() - .setHttpPostRequestCallback(new Slf4jHttpPostRequestCallback()).build(); - - assertEquals(sink, sink); - assertNotEquals(null, sink); - assertNotEquals("test-string", sink); - assertNotEquals(sink, sinkBatchSize); - assertNotEquals(sink, sinkSinkConfig); - assertNotEquals(sink, sinkDataType); - assertNotEquals(sink, sinkFormat); - assertNotEquals(sink, sinkHttpPostRequestCallback); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactoryTest.java deleted file mode 100644 index 269ee87b..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/sink/HttpDynamicTableSinkFactoryTest.java +++ /dev/null @@ -1,94 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.ValidationException; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertThrows; - -// Unfortunately it seems that Flink is lazy with connector instantiation, -// so one has to call INSERT in order to test the Factory -public class HttpDynamicTableSinkFactoryTest { - - protected StreamExecutionEnvironment env; - protected StreamTableEnvironment tEnv; - - @BeforeEach - public void setup() { - env = StreamExecutionEnvironment.getExecutionEnvironment(); - tEnv = StreamTableEnvironment.create(env); - } - - @Test - public void requiredOptionsTest() { - final String noFormatOptionCreate = - String.format( - "CREATE TABLE formatHttp (\n" - + " id bigint\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost/" - ); - tEnv.executeSql(noFormatOptionCreate); - assertThrows(ValidationException.class, - () -> tEnv.executeSql("INSERT INTO formatHttp VALUES (1)").await()); - - final String noUrlOptionCreate = - String.format( - "CREATE TABLE urlHttp (\n" - + " id bigint\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'format' = 'json'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER - ); - tEnv.executeSql(noUrlOptionCreate); - assertThrows(ValidationException.class, - () -> tEnv.executeSql("INSERT INTO urlHttp VALUES (1)").await()); - } - - @Test - public void validateHttpSinkOptionsTest() { - final String invalidInsertMethod = - String.format( - "CREATE TABLE http (\n" - + " id bigint\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'insert-method' = 'GET'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost/" - ); - tEnv.executeSql(invalidInsertMethod); - assertThrows(ValidationException.class, - () -> tEnv.executeSql("INSERT INTO http VALUES (1)").await()); - } - - @Test - public void nonexistentOptionsTest() { - final String invalidInsertMethod = - String.format( - "CREATE TABLE http (\n" - + " id bigint\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'some-random-totally-unexisting-option-!g*Av#' = '7123'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost/" - ); - tEnv.executeSql(invalidInsertMethod); - assertThrows(ValidationException.class, - () -> tEnv.executeSql("INSERT INTO http VALUES (1)").await()); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/table/sink/PerRequestHttpDynamicSinkInsertTest.java b/src/test/java/com/getindata/connectors/http/internal/table/sink/PerRequestHttpDynamicSinkInsertTest.java deleted file mode 100644 index 9ba751b2..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/table/sink/PerRequestHttpDynamicSinkInsertTest.java +++ /dev/null @@ -1,309 +0,0 @@ -package com.getindata.connectors.http.internal.table.sink; - -import java.io.File; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.ExecutionException; - -import com.github.tomakehurst.wiremock.WireMockServer; -import com.github.tomakehurst.wiremock.http.RequestMethod; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import static com.github.tomakehurst.wiremock.client.WireMock.*; -import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -public class PerRequestHttpDynamicSinkInsertTest { - - private static final int SERVER_PORT = 9090; - - private static final int HTTPS_SERVER_PORT = 8443; - - private static final String CERTS_PATH = "src/test/resources/security/certs/"; - - private static final String SERVER_KEYSTORE_PATH = - "src/test/resources/security/certs/serverKeyStore.jks"; - - private static final String SERVER_TRUSTSTORE_PATH = - "src/test/resources/security/certs/serverTrustStore.jks"; - - protected StreamExecutionEnvironment env; - - protected StreamTableEnvironment tEnv; - - private WireMockServer wireMockServer; - - @BeforeEach - public void setup() { - File keyStoreFile = new File(SERVER_KEYSTORE_PATH); - File trustStoreFile = new File(SERVER_TRUSTSTORE_PATH); - - this.wireMockServer = new WireMockServer(options() - .port(SERVER_PORT) - .httpsPort(HTTPS_SERVER_PORT) - .keystorePath(keyStoreFile.getAbsolutePath()) - .keystorePassword("password") - .keyManagerPassword("password") - .needClientAuth(true) - .trustStorePath(trustStoreFile.getAbsolutePath()) - .trustStorePassword("password") - ); - - wireMockServer.start(); - - env = StreamExecutionEnvironment.getExecutionEnvironment(); - tEnv = StreamTableEnvironment.create(env); - } - - @AfterEach - public void tearDown() { - wireMockServer.stop(); - } - - @Test - public void testHttpDynamicSinkDefaultPost() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.writer.request.mode' = 'single',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " - + "TIMESTAMP '2021-08-24 15:22:59')"; - tEnv.executeSql(insert).await(); - - var postedRequests = - wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals( - "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," - + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," - + "\"tx_date\":\"2021-08-24 15:22:59\"}", - request.getBodyAsString() - ); - assertEquals(RequestMethod.POST, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - } - - @Test - public void testHttpDynamicSinkPut() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'insert-method' = 'PUT',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.writer.request.mode' = 'single',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES\n" - + " (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', TIMESTAMP '2021-08-24 15:22:59'),\n" - + " (2, 'Hedy', 'Hedgecock', 'Female', 'DGICA', 'CNY', " - + "TIMESTAMP '2021-10-24 20:53:54')"; - tEnv.executeSql(insert).await(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(2, postedRequests.size()); - - var jsonRequests = new HashSet<>(Set.of( - "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\",\"gender\":\"Female\"," - + "\"stock\":\"CDZI\",\"currency\":\"RUB\",\"tx_date\":\"2021-08-24 15:22:59\"}", - "{\"id\":2,\"first_name\":\"Hedy\",\"last_name\":\"Hedgecock\",\"gender\":\"Female\"," - + "\"stock\":\"DGICA\",\"currency\":\"CNY\",\"tx_date\":\"2021-10-24 20:53:54\"}" - )); - for (var request : postedRequests) { - assertEquals(RequestMethod.PUT, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - assertTrue(jsonRequests.contains(request.getBodyAsString())); - jsonRequests.remove(request.getBodyAsString()); - } - } - - @Test - public void testHttpDynamicSinkRawFormat() throws Exception { - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/octet-stream"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " last_name string" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'raw',\n" - + " 'gid.connector.http.sink.writer.request.mode' = 'single',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http VALUES ('Clee')"; - tEnv.executeSql(insert).await(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals("Clee", request.getBodyAsString()); - assertEquals(RequestMethod.POST, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - } - - @Test - public void testHttpRequestWithHeadersFromDdl() - throws ExecutionException, InterruptedException { - String originHeaderValue = "*"; - String xContentTypeOptionsHeaderValue = "nosniff"; - String contentTypeHeaderValue = "application/json"; - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " last_name string" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'raw',\n" - + " 'gid.connector.http.sink.writer.request.mode' = 'single',\n" - + " 'gid.connector.http.sink.header.Origin' = '%s',\n" - + " 'gid.connector.http.sink.header.X-Content-Type-Options' = '%s',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "http://localhost:" + SERVER_PORT + "/myendpoint", - originHeaderValue, - xContentTypeOptionsHeaderValue, - contentTypeHeaderValue - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http VALUES ('Clee')"; - tEnv.executeSql(insert).await(); - - var postedRequests = wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals("Clee", request.getBodyAsString()); - assertEquals(RequestMethod.POST, request.getMethod()); - assertEquals(contentTypeHeaderValue, request.getHeader("Content-Type")); - assertEquals(originHeaderValue, request.getHeader("Origin")); - assertEquals(xContentTypeOptionsHeaderValue, request.getHeader("X-Content-Type-Options")); - } - - @Test - public void testHttpsWithMTls() throws Exception { - - File serverTrustedCert = new File(CERTS_PATH + "ca.crt"); - - File clientCert = new File(CERTS_PATH + "client.crt"); - File clientPrivateKey = new File(CERTS_PATH + "clientPrivateKey.pem"); - - wireMockServer.stubFor(any(urlPathEqualTo("/myendpoint")).willReturn(ok())); - String contentTypeHeaderValue = "application/json"; - - final String createTable = - String.format( - "CREATE TABLE http (\n" - + " id bigint,\n" - + " first_name string,\n" - + " last_name string,\n" - + " gender string,\n" - + " stock string,\n" - + " currency string,\n" - + " tx_date timestamp(3)\n" - + ") with (\n" - + " 'connector' = '%s',\n" - + " 'url' = '%s',\n" - + " 'format' = 'json',\n" - + " 'gid.connector.http.sink.writer.request.mode' = 'single',\n" - + " 'gid.connector.http.sink.header.Content-Type' = '%s',\n" - + " 'gid.connector.http.security.cert.server' = '%s',\n" - + " 'gid.connector.http.security.cert.client' = '%s',\n" - + " 'gid.connector.http.security.key.client' = '%s'\n" - + ")", - HttpDynamicTableSinkFactory.IDENTIFIER, - "https://localhost:" + HTTPS_SERVER_PORT + "/myendpoint", - contentTypeHeaderValue, - serverTrustedCert.getAbsolutePath(), - clientCert.getAbsolutePath(), - clientPrivateKey.getAbsolutePath() - ); - - tEnv.executeSql(createTable); - - final String insert = "INSERT INTO http\n" - + "VALUES (1, 'Ninette', 'Clee', 'Female', 'CDZI', 'RUB', " - + "TIMESTAMP '2021-08-24 15:22:59')"; - tEnv.executeSql(insert).await(); - - var postedRequests = - wireMockServer.findAll(anyRequestedFor(urlPathEqualTo("/myendpoint"))); - assertEquals(1, postedRequests.size()); - - var request = postedRequests.get(0); - assertEquals( - "{\"id\":1,\"first_name\":\"Ninette\",\"last_name\":\"Clee\"," - + "\"gender\":\"Female\",\"stock\":\"CDZI\",\"currency\":\"RUB\"," - + "\"tx_date\":\"2021-08-24 15:22:59\"}", - request.getBodyAsString() - ); - - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/ConfigUtilsTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/ConfigUtilsTest.java deleted file mode 100644 index 4ca20351..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/ConfigUtilsTest.java +++ /dev/null @@ -1,244 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.net.*; -import java.util.Arrays; -import java.util.Map; -import java.util.Optional; -import java.util.Properties; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.assertj.core.api.Assertions.assertThatThrownBy; - -import com.getindata.connectors.http.internal.config.ConfigException; -import static com.getindata.connectors.http.TestHelper.assertPropertyArray; - -class ConfigUtilsTest { - - @Test - public void shouldExtractPropertiesToMap() { - Properties properties = new Properties(); - properties.setProperty("property", "val1"); - properties.setProperty("my.property", "val2"); - properties.setProperty("my.super.property", "val3"); - properties.setProperty("my.property.detail", "val4"); - properties.setProperty("my.property.extra", "val5"); - properties.setProperty("another.my.property.extra", "val6"); - - Map mappedProperties = - ConfigUtils.propertiesToMap(properties, "my.property", String.class); - - assertThat(mappedProperties).hasSize(3); - assertThat(mappedProperties) - .containsAllEntriesOf( - Map.of( - "my.property", "val2", - "my.property.detail", "val4", - "my.property.extra", "val5" - )); - } - - @Test - public void shouldConvertNoProperty() { - Properties properties = new Properties(); - properties.setProperty("property", "val1"); - properties.setProperty("my.property", "val2"); - properties.setProperty("my.super.property", "val3"); - - Map mappedProperties = - ConfigUtils.propertiesToMap(properties, "my.custom", String.class); - assertThat(mappedProperties).isEmpty(); - } - - @Test - public void shouldGetProxyConfigWithAuthenticator() throws UnknownHostException { - String proxyHost = "proxy"; - Integer proxyPort = 9090; - Optional proxyUsername = Optional.of("username"); - Optional proxyPassword = Optional.of("password"); - - ProxyConfig proxyConfig = new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword ); - assertThat(proxyConfig.getHost().equals("proxy")); - assertThat(proxyConfig.getAuthenticator().isPresent()); - - PasswordAuthentication auth = proxyConfig.getAuthenticator().orElseGet(null) - .requestPasswordAuthenticationInstance( - "proxy", // host - InetAddress.getByName("127.0.0.1"), // address - 9090, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.SERVER // Requestor type - ); - - PasswordAuthentication auth2 = proxyConfig.getAuthenticator().orElseGet(null) - .requestPasswordAuthenticationInstance( - "proxy", // host - InetAddress.getByName("127.0.0.1"), // address - 9090, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.PROXY // Requestor type - ); - - assertThat(auth).isNull(); - assertThat(auth2).isNotNull(); - assertThat(auth2.getUserName().equals("username")).isTrue(); - assertThat(Arrays.equals(auth2.getPassword(), "password".toCharArray())).isTrue(); - } - - @Test - public void shouldGetProxyConfigWithAuthenticatorServer() throws UnknownHostException { - String proxyHost = "proxy"; - Integer proxyPort = 8080; - Optional proxyUsername = Optional.of("username"); - Optional proxyPassword = Optional.of("password"); - - ProxyConfig proxyConfig = new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword ); - assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); - assertThat(proxyConfig.getAuthenticator().isPresent()).isTrue(); - - PasswordAuthentication auth = proxyConfig.getAuthenticator().orElseGet(null) - .requestPasswordAuthenticationInstance( - "proxy", // host - InetAddress.getByName("127.0.0.1"), // address - 8080, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.SERVER // Requestor type - ); - - PasswordAuthentication auth2 = proxyConfig.getAuthenticator().orElseGet(null) - .requestPasswordAuthenticationInstance( - "proxy", // host - InetAddress.getByName("127.0.0.1"), // address - 8080, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.PROXY // Requestor type - ); - - assertThat(auth).isNull(); - assertThat(auth2).isNotNull(); - } - - @Test - public void shouldGetProxyConfigWithAuthenticatorWrongHost() throws UnknownHostException { - String proxyHost = "proxy"; - Integer proxyPort = 8080; - Optional proxyUsername = Optional.of("username"); - Optional proxyPassword = Optional.of("password"); - - ProxyConfig proxyConfig = new ProxyConfig(proxyHost, proxyPort, proxyUsername, proxyPassword ); - assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); - assertThat(proxyConfig.getAuthenticator().isPresent()).isTrue(); - - PasswordAuthentication auth = proxyConfig.getAuthenticator().get() - .requestPasswordAuthenticationInstance( - "wrong", // host - InetAddress.getByName("127.0.0.1"), // address - 8080, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.PROXY // Requestor type - ); - - PasswordAuthentication auth2 = proxyConfig.getAuthenticator().orElseGet(null) - .requestPasswordAuthenticationInstance( - "proxy", // host - InetAddress.getByName("127.0.0.1"), // address - 8080, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.PROXY // Requestor type - ); - - assertThat(auth).isNull(); - assertThat(auth2).isNotNull(); - } - - @Test - public void shouldGetProxyConfigWithoutAuthenticator() throws MalformedURLException, UnknownHostException { - String proxyHost = "proxy"; - Optional proxyUsername = Optional.of("username"); - Optional proxyPassword = Optional.empty(); - - ProxyConfig proxyConfig = new ProxyConfig(proxyHost, 80, proxyUsername, proxyPassword ); - assertThat(proxyConfig.getHost().equals("proxy")).isTrue(); - assertThat(proxyConfig.getAuthenticator().isEmpty()).isTrue(); - } - - @Test - public void shouldHandleInvalidPropertyType() { - - Properties properties = new Properties(); - properties.put("a.property", 1); - - // Should ignore "invalid" property since does not match the prefix - Map mappedProperties = - ConfigUtils.propertiesToMap(properties, "my.custom", String.class); - assertThat(mappedProperties).isEmpty(); - - // should throw on invalid value, when name matches the prefix. - assertThatThrownBy( - () -> ConfigUtils.propertiesToMap(properties, "a.property", String.class)) - .isInstanceOf(ConfigException.class); - - // should throw on non String key regardless of key prefix. - Properties nonStringProperties = new Properties(); - nonStringProperties.put(new Object(), 1); - assertThatThrownBy( - () -> ConfigUtils.propertiesToMap(nonStringProperties, "a.property", String.class)) - .isInstanceOf(ConfigException.class); - } - - @ParameterizedTest(name = "Property full name - {0}") - @ValueSource(strings = {"property", "my.property", "my.super.property", ".my.super.property"}) - public void shouldGetPropertyName(String fullPropertyName) { - - String propertyLastElement = ConfigUtils.extractPropertyLastElement(fullPropertyName); - assertThat(propertyLastElement).isEqualTo("property"); - } - - @ParameterizedTest(name = "Property full name - {0}") - @ValueSource(strings = {"", " ", "my.super.property.", ".", "..."}) - public void shouldThrowOnInvalidProperty(String invalidProperty) { - - assertThatThrownBy( - () -> ConfigUtils.extractPropertyLastElement(invalidProperty)) - .isInstanceOf(ConfigException.class); - } - - @Test - public void flatMapPropertyMap() { - Map propertyMap = Map.of( - "propertyOne", "val1", - "propertyTwo", "val2", - "propertyThree", "val3" - ); - - String[] propertyArray = HttpHeaderUtils.toHeaderAndValueArray(propertyMap); - - // size is == propertyMap.key size + propertyMap.value.size - assertThat(propertyArray).hasSize(6); - - // assert that we have property followed by its value. - assertPropertyArray(propertyArray, "propertyOne", "val1"); - assertPropertyArray(propertyArray, "propertyTwo", "val2"); - assertPropertyArray(propertyArray, "propertyThree", "val3"); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtilsTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtilsTest.java deleted file mode 100644 index 47dee2f1..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/HttpHeaderUtilsTest.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.getindata.connectors.http.internal.utils; -import java.time.Duration; - -import org.apache.flink.configuration.Configuration; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.AssertionsForClassTypes.assertThat; - -import com.getindata.connectors.http.internal.HeaderPreprocessor; -import static com.getindata.connectors.http.internal.table.lookup.HttpLookupConnectorOptions.*; - - - -public class HttpHeaderUtilsTest { - @Test - void shouldCreateOIDCHeaderPreprocessorTest() { - Configuration configuration = new Configuration(); - HeaderPreprocessor headerPreprocessor - = HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); - assertThat(headerPreprocessor).isNull(); - configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_ENDPOINT_URL.key(), "http://aaa"); - configuration.setString(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_REQUEST.key(), "ccc"); - configuration.set(SOURCE_LOOKUP_OIDC_AUTH_TOKEN_EXPIRY_REDUCTION, Duration.ofSeconds(1)); - headerPreprocessor - = HttpHeaderUtils.createOIDCHeaderPreprocessor(configuration); - assertThat(headerPreprocessor).isNotNull(); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactoryTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactoryTest.java deleted file mode 100644 index 774b65b5..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/JavaNetHttpClientFactoryTest.java +++ /dev/null @@ -1,112 +0,0 @@ -package com.getindata.connectors.http.internal.utils; - -import java.net.Authenticator; -import java.net.InetAddress; -import java.net.PasswordAuthentication; -import java.net.UnknownHostException; -import java.net.http.HttpClient; -import java.util.Arrays; -import java.util.Properties; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import org.apache.flink.configuration.Configuration; -import org.apache.flink.util.concurrent.ExecutorThreadFactory; -import org.junit.jupiter.api.Test; -import static org.assertj.core.api.Assertions.assertThat; - -import com.getindata.connectors.http.internal.table.lookup.HttpLookupConfig; -import com.getindata.connectors.http.internal.table.lookup.Slf4JHttpLookupPostRequestCallback; -import static com.getindata.connectors.http.internal.config.HttpConnectorConfigConstants.*; - -class JavaNetHttpClientFactoryTest { - - @Test - public void shouldGetClientWithAuthenticator() throws UnknownHostException { - Properties properties = new Properties(); - Configuration configuration = new Configuration(); - configuration.setString(SOURCE_PROXY_HOST, "google"); - configuration.setString(SOURCE_PROXY_PORT, "8080"); - configuration.setString(SOURCE_PROXY_USERNAME, "username"); - configuration.setString(SOURCE_PROXY_PASSWORD, "password"); - - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .url("https://google.com") - .readableConfig(configuration) - .properties(properties) - .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) - .build(); - - HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); - - assertThat(client.authenticator().isPresent()).isTrue(); - assertThat(client.proxy().isPresent()).isTrue(); - - PasswordAuthentication auth = client.authenticator().get().requestPasswordAuthenticationInstance( - "google", // host - InetAddress.getByName("127.0.0.1"), // address - 8080, // port - "http", // protocol - "Please authenticate", // prompt - "basic", // scheme - null, // URL - Authenticator.RequestorType.PROXY // Requestor type - ); - - assertThat(auth.getUserName().equals("username")).isTrue(); - assertThat(Arrays.equals(auth.getPassword(), "password".toCharArray())).isTrue(); - } - - @Test - public void shouldGetClientWithoutAuthenticator() throws UnknownHostException { - Properties properties = new Properties(); - Configuration configuration = new Configuration(); - configuration.setString(SOURCE_PROXY_HOST, "google"); - configuration.setString(SOURCE_PROXY_PORT, "8080"); - - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .url("https://google.com") - .readableConfig(configuration) - .properties(properties) - .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) - .build(); - - HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); - - assertThat(client.authenticator().isEmpty()).isTrue(); - assertThat(client.proxy().isPresent()).isTrue(); - } - - @Test - public void shouldGetClientWithoutProxy() { - Properties properties = new Properties(); - Configuration configuration = new Configuration(); - - HttpLookupConfig lookupConfig = HttpLookupConfig.builder() - .url("https://google.com") - .readableConfig(configuration) - .properties(properties) - .httpPostRequestCallback(new Slf4JHttpLookupPostRequestCallback()) - .build(); - - HttpClient client = JavaNetHttpClientFactory.createClient(lookupConfig); - assertThat(client.authenticator().isEmpty()).isTrue(); - assertThat(client.proxy().isEmpty()).isTrue(); - } - - @Test - public void shouldGetClientWithExecutor() { - Properties properties = new Properties(); - ExecutorService httpClientExecutor = - Executors.newFixedThreadPool( - 1, - new ExecutorThreadFactory( - "http-sink-client-batch-request-worker", - ThreadUtils.LOGGING_EXCEPTION_HANDLER) - ); - - HttpClient client = JavaNetHttpClientFactory.createClient(properties, httpClientExecutor); - assertThat(client.followRedirects().equals(HttpClient.Redirect.NORMAL)).isTrue(); - } - -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBufferTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBufferTest.java deleted file mode 100644 index 23738bcf..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/uri/CharArrayBufferTest.java +++ /dev/null @@ -1,73 +0,0 @@ -package com.getindata.connectors.http.internal.utils.uri; - -import java.util.stream.Stream; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class CharArrayBufferTest { - - @Test - public void testInvalidCapacity() { - assertThrows(IllegalArgumentException.class, () -> new CharArrayBuffer(0)); - } - - @Test - public void testExpandCapacity() { - String testText = "Hello My Friend"; - - CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); - charArrayBuffer.append(testText); - - assertThat(charArrayBuffer.length()).isEqualTo(testText.length()); - } - - @Test - public void testSubSequence() { - String testText = "Hello My Friend"; - - CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); - charArrayBuffer.append(testText); - - assertAll(() -> { - Assertions.assertThrows(IndexOutOfBoundsException.class, - () -> charArrayBuffer.subSequence(-1, 1)); - Assertions.assertThrows(IndexOutOfBoundsException.class, - () -> charArrayBuffer.subSequence(1, -1)); - Assertions.assertThrows(IndexOutOfBoundsException.class, - () -> charArrayBuffer.subSequence(2, 1)); - Assertions.assertThrows(IndexOutOfBoundsException.class, - () -> charArrayBuffer.subSequence(2, testText.length() + 5)); - assertThat(charArrayBuffer.subSequence(2, 10).toString()).isEqualTo("llo My Fri"); - } - ); - } - - private static Stream appendArgs() { - return Stream.of( - Arguments.of("", "baseString"), - Arguments.of(" ", "baseString "), - Arguments.of(null, "baseStringnull") - ); - } - - @ParameterizedTest - @MethodSource("appendArgs") - public void testAppend(String stringToAppend, String expected) { - CharArrayBuffer charArrayBuffer = new CharArrayBuffer(1); - charArrayBuffer.append("baseString"); - - assertAll(() -> { - assertThat(charArrayBuffer.toString()).isEqualTo("baseString"); - charArrayBuffer.append(stringToAppend); - assertThat(charArrayBuffer.toString()).isEqualTo(expected); - } - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/uri/ParserCursorTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/uri/ParserCursorTest.java deleted file mode 100644 index 5d25bec4..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/uri/ParserCursorTest.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.getindata.connectors.http.internal.utils.uri; - -import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertAll; -import static org.junit.jupiter.api.Assertions.assertThrows; - -class ParserCursorTest { - - @Test - public void testBoundsValidation() { - - assertAll(() -> { - assertThrows(IndexOutOfBoundsException.class, () -> new ParserCursor(-1, 1)); - assertThrows(IndexOutOfBoundsException.class, () -> new ParserCursor(1, -1)); - } - ); - } - - @Test - public void testUpdatePostValidation() { - ParserCursor cursor = new ParserCursor(1, 2); - - assertAll(() -> { - assertThrows(IndexOutOfBoundsException.class, () -> cursor.updatePos(0)); - assertThrows(IndexOutOfBoundsException.class, () -> cursor.updatePos(3)); - } - ); - } -} diff --git a/src/test/java/com/getindata/connectors/http/internal/utils/uri/TokenParserTest.java b/src/test/java/com/getindata/connectors/http/internal/utils/uri/TokenParserTest.java deleted file mode 100644 index 4027740a..00000000 --- a/src/test/java/com/getindata/connectors/http/internal/utils/uri/TokenParserTest.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.getindata.connectors.http.internal.utils.uri; - -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.CsvSource; -import static org.assertj.core.api.Assertions.assertThat; - -class TokenParserTest { - - @ParameterizedTest - @CsvSource({"a,a", "aa,aa", "a a,a a", "a ,a", " a,a"}) - public void testParse(String toParse, String expected) { - - CharArrayBuffer charBuff = new CharArrayBuffer(toParse.length()); - charBuff.append(toParse); - - TokenParser tokenParser = new TokenParser(); - String actual = tokenParser.parseToken( - charBuff, - new ParserCursor(0, toParse.length()), - null - ); - - assertThat(actual).isEqualTo(expected); - } -} diff --git a/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory b/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory deleted file mode 100644 index 065a168b..00000000 --- a/src/test/resources/META-INF/services/org.apache.flink.table.factories.Factory +++ /dev/null @@ -1,4 +0,0 @@ -com.getindata.connectors.http.TestPostRequestCallbackFactory -com.getindata.connectors.http.TestLookupPostRequestCallbackFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.CustomFormatFactory -com.getindata.connectors.http.internal.table.lookup.querycreators.CustomJsonFormatFactory \ No newline at end of file diff --git a/tools/ci/log4j.properties b/tools/ci/log4j.properties new file mode 100644 index 00000000..04a78962 --- /dev/null +++ b/tools/ci/log4j.properties @@ -0,0 +1,72 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +rootLogger.level = INFO +rootLogger.appenderRef.out.ref = ConsoleAppender + +# ----------------------------------------------------------------------------- +# Console (use 'console') +# ----------------------------------------------------------------------------- + +appender.console.name = ConsoleAppender +appender.console.type = CONSOLE +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %d{HH:mm:ss,SSS} [%20t] %-5p %-60c %x - %m%n + +# This section is in the Kafka connector, but is creating an empty folder called ${sys:log.dir} so removing +# ----------------------------------------------------------------------------- +# File (use 'file') +# ----------------------------------------------------------------------------- +#appender.file.name = FileAppender +#appender.file.type = FILE +#appender.file.fileName = ${sys:log.dir}/mvn-${sys:mvn.forkNumber:-output}.log +#appender.file.layout.type = PatternLayout +#appender.file.layout.pattern = %d{HH:mm:ss,SSS} [%20t] %-5p %-60c %x - %m%n +#appender.file.createOnDemand = true + +# Reduce most flink logs except for connector specific loggers +logger.flink.name = org.apache.flink +logger.flink.level = WARN +logger.flinkconnector.name = org.apache.flink.connector +logger.flinkconnector.level = INFO + + +# suppress the irrelevant (wrong) warnings from the netty channel handler +logger.netty.name = org.jboss.netty.channel.DefaultChannelPipeline +logger.netty.level = ERROR + +# Logger configuration for containers, by default this is off +# If you want to investigate test failures, overwrite the level as above +logger.container.name = container +logger.container.level = OFF +logger.container.additivity = false # This prevents messages from being logged by the root logger +logger.container.appenderRef.containerappender.ref = ContainerLogger + +logger.flinkcontainer.name = container.flink +logger.flinkcontainer.level = WARN + +logger.flinkenv.name = org.apache.flink.connector.testframe.container.FlinkContainerTestEnvironment +logger.flinkenv.level = WARN +logger.flinkenv.additivity = false # This prevents messages from being logged by the root logger +logger.flinkenv.appenderRef.containerappender.ref = ContainerLogger + +appender.containerappender.name = ContainerLogger +appender.containerappender.type = CONSOLE +appender.containerappender.target = SYSTEM_ERR +appender.containerappender.layout.type = PatternLayout +appender.containerappender.layout.pattern = [%c{1}] %m%n diff --git a/tools/maven/checkstyle.xml b/tools/maven/checkstyle.xml new file mode 100644 index 00000000..e44bd6f6 --- /dev/null +++ b/tools/maven/checkstyle.xml @@ -0,0 +1,564 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tools/maven/suppressions.xml b/tools/maven/suppressions.xml new file mode 100644 index 00000000..bd0c2cc8 --- /dev/null +++ b/tools/maven/suppressions.xml @@ -0,0 +1,27 @@ + + + + + + +