diff --git a/spring-batch-bigquery/pom.xml b/spring-batch-bigquery/pom.xml index 95fb210..0ab1b36 100644 --- a/spring-batch-bigquery/pom.xml +++ b/spring-batch-bigquery/pom.xml @@ -137,6 +137,7 @@ + org.apache.maven.plugins @@ -149,6 +150,7 @@ + org.codehaus.mojo @@ -177,6 +179,21 @@ + + + + io.spring.javaformat + spring-javaformat-maven-plugin + + + validate + true + + validate + + + + diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/BigQueryQueryItemReader.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/BigQueryQueryItemReader.java index 26ae7bd..8e83f1e 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/BigQueryQueryItemReader.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/BigQueryQueryItemReader.java @@ -31,7 +31,9 @@ /** * BigQuery {@link ItemReader} that accepts simple query as the input. *

- * Internally BigQuery Java library creates a {@link com.google.cloud.bigquery.JobConfiguration.Type#QUERY} job. + * Internally BigQuery Java library creates a + * {@link com.google.cloud.bigquery.JobConfiguration.Type#QUERY} job. + *

* Which means that result is coming asynchronously. *

* Also, worth mentioning that you should take into account concurrency limits. @@ -41,71 +43,76 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Interactive queries - * @see Batch queries - * @see Concurrency limits + * @see Interactive + * queries + * @see Batch + * queries + * @see Concurrency + * limits */ public class BigQueryQueryItemReader implements ItemReader, InitializingBean { - private final Log logger = LogFactory.getLog(getClass()); - - private BigQuery bigQuery; - private Converter rowMapper; - private QueryJobConfiguration jobConfiguration; - private Iterator iterator; - - /** - * BigQuery service, responsible for API calls. - * - * @param bigQuery BigQuery service - */ - public void setBigQuery(BigQuery bigQuery) { - this.bigQuery = bigQuery; - } - - /** - * Row mapper which transforms single BigQuery row into a desired type. - * - * @param rowMapper your row mapper - */ - public void setRowMapper(Converter rowMapper) { - this.rowMapper = rowMapper; - } - - /** - * Specifies query to run, destination table, etc. - * - * @param jobConfiguration BigQuery job configuration - */ - public void setJobConfiguration(QueryJobConfiguration jobConfiguration) { - this.jobConfiguration = jobConfiguration; - } - - @Override - public T read() throws Exception { - if (iterator == null) { - doOpen(); - } - - if (logger.isDebugEnabled()) { - logger.debug("Reading next element"); - } - - return iterator.hasNext() ? rowMapper.convert(iterator.next()) : null; - } - - private void doOpen() throws Exception { - if (logger.isDebugEnabled()) { - logger.debug("Executing query"); - } - iterator = bigQuery.query(jobConfiguration).getValues().iterator(); - } - - @Override - public void afterPropertiesSet() { - Assert.notNull(this.bigQuery, "BigQuery service must be provided"); - Assert.notNull(this.rowMapper, "Row mapper must be provided"); - Assert.notNull(this.jobConfiguration, "Job configuration must be provided"); - } + private final Log logger = LogFactory.getLog(getClass()); + + private BigQuery bigQuery; + + private Converter rowMapper; + + private QueryJobConfiguration jobConfiguration; + + private Iterator iterator; + + /** + * BigQuery service, responsible for API calls. + * @param bigQuery BigQuery service + */ + public void setBigQuery(final BigQuery bigQuery) { + this.bigQuery = bigQuery; + } + + /** + * Row mapper which transforms single BigQuery row into a desired type. + * @param rowMapper your row mapper + */ + public void setRowMapper(final Converter rowMapper) { + this.rowMapper = rowMapper; + } + + /** + * Specifies query to run, destination table, etc. + * @param jobConfiguration BigQuery job configuration + */ + public void setJobConfiguration(final QueryJobConfiguration jobConfiguration) { + this.jobConfiguration = jobConfiguration; + } + + @Override + public T read() throws Exception { + if (iterator == null) { + doOpen(); + } + + if (logger.isDebugEnabled()) { + logger.debug("Reading next element"); + } + + return iterator.hasNext() ? rowMapper.convert(iterator.next()) : null; + } + + private void doOpen() throws Exception { + if (logger.isDebugEnabled()) { + logger.debug("Executing query"); + } + iterator = bigQuery.query(jobConfiguration).getValues().iterator(); + } + + @Override + public void afterPropertiesSet() { + Assert.notNull(this.bigQuery, "BigQuery service must be provided"); + Assert.notNull(this.rowMapper, "Row mapper must be provided"); + Assert.notNull(this.jobConfiguration, "Job configuration must be provided"); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/BigQueryQueryItemReaderBuilder.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/BigQueryQueryItemReaderBuilder.java index 76c7deb..50f4d52 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/BigQueryQueryItemReaderBuilder.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/BigQueryQueryItemReaderBuilder.java @@ -31,107 +31,109 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Examples - * @see Examples + * @see Examples + * @see Examples */ public class BigQueryQueryItemReaderBuilder { - private BigQuery bigQuery; - private String query; - private Converter rowMapper; - private QueryJobConfiguration jobConfiguration; - private Class targetType; - - /** - * BigQuery service, responsible for API calls. - * - * @param bigQuery BigQuery service - * @return {@link BigQueryQueryItemReaderBuilder} - * @see BigQueryQueryItemReader#setBigQuery(BigQuery) - */ - public BigQueryQueryItemReaderBuilder bigQuery(final BigQuery bigQuery) { - this.bigQuery = bigQuery; - return this; - } - - /** - * Schema of the query: {@code SELECT FROM .}. - *

- * It is really recommended to use {@code LIMIT n} - * because BigQuery charges you for the amount of data that is being processed. - * - * @param query your query to run - * @return {@link BigQueryQueryItemReaderBuilder} - * @see BigQueryQueryItemReader#setJobConfiguration(QueryJobConfiguration) - */ - public BigQueryQueryItemReaderBuilder query(final String query) { - this.query = query; - return this; - } - - /** - * Row mapper which transforms single BigQuery row into a desired type. - * - * @param rowMapper your row mapper - * @return {@link BigQueryQueryItemReaderBuilder} - * @see BigQueryQueryItemReader#setRowMapper(Converter) - */ - public BigQueryQueryItemReaderBuilder rowMapper(final Converter rowMapper) { - this.rowMapper = rowMapper; - return this; - } - - /** - * Specifies query to run, destination table, etc. - * - * @param jobConfiguration BigQuery job configuration - * @return {@link BigQueryQueryItemReaderBuilder} - * @see BigQueryQueryItemReader#setJobConfiguration(QueryJobConfiguration) - */ - public BigQueryQueryItemReaderBuilder jobConfiguration(final QueryJobConfiguration jobConfiguration) { - this.jobConfiguration = jobConfiguration; - return this; - } - - /** - * Specifies a target type which will be used as a result. - * Only needed when {@link BigQueryQueryItemReaderBuilder#rowMapper} is not provided. - * Take into account that only {@link Class#isRecord()} supported. - * - * @param targetType a {@link Class} that represent desired type - * @return {@link BigQueryQueryItemReaderBuilder} - */ - public BigQueryQueryItemReaderBuilder targetType(final Class targetType) { - this.targetType = targetType; - return this; - } - - /** - * Please remember about {@link BigQueryQueryItemReader#afterPropertiesSet()}. - * - * @return {@link BigQueryQueryItemReader} - */ - public BigQueryQueryItemReader build() { - final BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - - reader.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); - - if (this.rowMapper == null) { - Assert.notNull(this.targetType, "No target type provided"); - Assert.isTrue(this.targetType.isRecord(), "Only Java record supported"); - reader.setRowMapper(new RecordMapper().generateMapper(this.targetType)); - } else { - reader.setRowMapper(this.rowMapper); - } - - if (this.jobConfiguration == null) { - Assert.isTrue(StringUtils.hasText(this.query), "No query provided"); - reader.setJobConfiguration(QueryJobConfiguration.newBuilder(this.query).build()); - } else { - reader.setJobConfiguration(this.jobConfiguration); - } - - return reader; - } + private BigQuery bigQuery; + + private String query; + + private Converter rowMapper; + + private QueryJobConfiguration jobConfiguration; + + private Class targetType; + + /** + * BigQuery service, responsible for API calls. + * @param bigQuery BigQuery service + * @return {@link BigQueryQueryItemReaderBuilder} + * @see BigQueryQueryItemReader#setBigQuery(BigQuery) + */ + public BigQueryQueryItemReaderBuilder bigQuery(final BigQuery bigQuery) { + this.bigQuery = bigQuery; + return this; + } + + /** + * Schema of the query: {@code SELECT column FROM dataset.table}. + *

+ * It is really recommended to use {@code LIMIT n} because BigQuery charges you for + * the amount of data that is being processed. + * @param query your query to run + * @return {@link BigQueryQueryItemReaderBuilder} + * @see BigQueryQueryItemReader#setJobConfiguration(QueryJobConfiguration) + */ + public BigQueryQueryItemReaderBuilder query(final String query) { + this.query = query; + return this; + } + + /** + * Row mapper which transforms single BigQuery row into a desired type. + * @param rowMapper your row mapper + * @return {@link BigQueryQueryItemReaderBuilder} + * @see BigQueryQueryItemReader#setRowMapper(Converter) + */ + public BigQueryQueryItemReaderBuilder rowMapper(final Converter rowMapper) { + this.rowMapper = rowMapper; + return this; + } + + /** + * Specifies query to run, destination table, etc. + * @param jobConfiguration BigQuery job configuration + * @return {@link BigQueryQueryItemReaderBuilder} + * @see BigQueryQueryItemReader#setJobConfiguration(QueryJobConfiguration) + */ + public BigQueryQueryItemReaderBuilder jobConfiguration(final QueryJobConfiguration jobConfiguration) { + this.jobConfiguration = jobConfiguration; + return this; + } + + /** + * Specifies a target type which will be used as a result. Only needed when + * {@link BigQueryQueryItemReaderBuilder#rowMapper} is not provided. Take into account + * that only {@link Class#isRecord()} supported. + * @param targetType a {@link Class} that represent desired type + * @return {@link BigQueryQueryItemReaderBuilder} + */ + public BigQueryQueryItemReaderBuilder targetType(final Class targetType) { + this.targetType = targetType; + return this; + } + + /** + * Please remember about {@link BigQueryQueryItemReader#afterPropertiesSet()}. + * @return {@link BigQueryQueryItemReader} + */ + public BigQueryQueryItemReader build() { + final BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + + reader.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); + + if (this.rowMapper == null) { + Assert.notNull(this.targetType, "No target type provided"); + Assert.isTrue(this.targetType.isRecord(), "Only Java record supported"); + reader.setRowMapper(new RecordMapper().generateMapper(this.targetType)); + } + else { + reader.setRowMapper(this.rowMapper); + } + + if (this.jobConfiguration == null) { + Assert.isTrue(StringUtils.hasText(this.query), "No query provided"); + reader.setJobConfiguration(QueryJobConfiguration.newBuilder(this.query).build()); + } + else { + reader.setJobConfiguration(this.jobConfiguration); + } + + return reader; + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/RecordMapper.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/RecordMapper.java index eaabfeb..1b3852a 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/RecordMapper.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/reader/builder/RecordMapper.java @@ -33,32 +33,33 @@ */ public final class RecordMapper { - private final SimpleTypeConverter simpleConverter = new SimpleTypeConverter(); + private final SimpleTypeConverter simpleConverter = new SimpleTypeConverter(); - /** - * Generates a conversion from BigQuery response to a Java record. - * - * @param targetType a Java record {@link Class} - * @return {@link Converter} - * @see org.springframework.batch.item.file.mapping.RecordFieldSetMapper - */ - public Converter generateMapper(Class targetType) { - Constructor constructor = BeanUtils.getResolvableConstructor(targetType); - Assert.isTrue(constructor.getParameterCount() > 0, "Record without fields is redundant"); + /** + * Generates a conversion from BigQuery response to a Java record. + * @param targetType a Java record {@link Class} + * @return {@link Converter} + * @see org.springframework.batch.item.file.mapping.RecordFieldSetMapper + */ + public Converter generateMapper(Class targetType) { + Constructor constructor = BeanUtils.getResolvableConstructor(targetType); + Assert.isTrue(constructor.getParameterCount() > 0, "Record without fields is redundant"); - String[] parameterNames = BeanUtils.getParameterNames(constructor); - Class[] parameterTypes = constructor.getParameterTypes(); + String[] parameterNames = BeanUtils.getParameterNames(constructor); + Class[] parameterTypes = constructor.getParameterTypes(); - Object[] args = new Object[parameterNames.length]; + Object[] args = new Object[parameterNames.length]; - return source -> { - if (args[0] == null) { - for (int i = 0; i < args.length; i++) { - args[i] = simpleConverter.convertIfNecessary(source.get(parameterNames[i]).getValue(), parameterTypes[i]); - } - } + return source -> { + if (args[0] == null) { + for (int i = 0; i < args.length; i++) { + args[i] = simpleConverter.convertIfNecessary(source.get(parameterNames[i]).getValue(), + parameterTypes[i]); + } + } + + return BeanUtils.instantiateClass(constructor, args); + }; + } - return BeanUtils.instantiateClass(constructor, args); - }; - } } diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/BigQueryItemWriterException.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/BigQueryItemWriterException.java index baa5429..da6b2bb 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/BigQueryItemWriterException.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/BigQueryItemWriterException.java @@ -21,26 +21,30 @@ import org.springframework.batch.item.ItemWriterException; /** - * Unchecked {@link Exception} indicating that an error has occurred on during {@link ItemWriter#write(Chunk)}. + * Unchecked {@link Exception} indicating that an error has occurred on during + * {@link ItemWriter#write(Chunk)}. + * * @author Volodymyr Perebykivskyi * @since 0.2.0 */ public class BigQueryItemWriterException extends ItemWriterException { - /** - * Create a new {@link BigQueryItemWriterException} based on a message and another {@link Exception}. - * @param message the message for this {@link Exception} - * @param cause the other {@link Exception} - */ - public BigQueryItemWriterException(String message, Throwable cause) { - super(message, cause); - } + /** + * Create a new {@link BigQueryItemWriterException} based on a message and another + * {@link Exception}. + * @param message the message for this {@link Exception} + * @param cause the other {@link Exception} + */ + public BigQueryItemWriterException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Create a new {@link BigQueryItemWriterException} based on a message. + * @param message the message for this {@link Exception} + */ + public BigQueryItemWriterException(String message) { + super(message); + } - /** - * Create a new {@link BigQueryItemWriterException} based on a message. - * @param message the message for this {@link Exception} - */ - public BigQueryItemWriterException(String message) { - super(message); - } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/BigQueryLoadJobBaseItemWriter.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/BigQueryLoadJobBaseItemWriter.java index 583ca3b..d8b6573 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/BigQueryLoadJobBaseItemWriter.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/BigQueryLoadJobBaseItemWriter.java @@ -43,254 +43,252 @@ */ public abstract class BigQueryLoadJobBaseItemWriter implements ItemWriter, InitializingBean { - /** Logger that can be reused */ - protected final Log logger = LogFactory.getLog(getClass()); - - private final AtomicLong bigQueryWriteCounter = new AtomicLong(); - - /** - * Describes what should be written (format) and its destination (table). - */ - protected WriteChannelConfiguration writeChannelConfig; - - /** - * You can specify here some specific dataset configuration, like location. - * This dataset will be created. - */ - private DatasetInfo datasetInfo; - - /** - * Your custom logic with {@link Job}. - * {@link Job} will be assigned after {@link TableDataWriteChannel#close()}. - */ - private Consumer jobConsumer; - - private BigQuery bigQuery; - - private boolean writeFailed; - - /** - * Fetches table from the provided configuration. - * - * @return {@link Table} that is described in {@link BigQueryLoadJobBaseItemWriter#writeChannelConfig} - */ - protected Table getTable() { - return this.bigQuery.getTable(this.writeChannelConfig.getDestinationTable()); - } - - /** - * Provides additional information about the {@link com.google.cloud.bigquery.Dataset}. - * - * @param datasetInfo BigQuery dataset info - */ - public void setDatasetInfo(final DatasetInfo datasetInfo) { - this.datasetInfo = datasetInfo; - } - - /** - * Callback when {@link Job} will be finished. - * - * @param consumer your consumer - */ - public void setJobConsumer(final Consumer consumer) { - this.jobConsumer = consumer; - } - - /** - * Describes what should be written (format) and its destination (table). - * - * @param writeChannelConfig BigQuery channel configuration - */ - public void setWriteChannelConfig(final WriteChannelConfiguration writeChannelConfig) { - this.writeChannelConfig = writeChannelConfig; - } - - /** - * BigQuery service, responsible for API calls. - * - * @param bigQuery BigQuery service - */ - public void setBigQuery(final BigQuery bigQuery) { - this.bigQuery = bigQuery; - } - - @Override - public void write(final Chunk chunk) throws Exception { - if (!chunk.isEmpty()) { - final List items = chunk.getItems(); - doInitializeProperties(items); - - if (logger.isDebugEnabled()) { - logger.debug(String.format("Mapping %d elements", items.size())); - } - - doWriteDataToBigQuery(mapDataToBigQueryFormat(items)); - } - } - - private ByteBuffer mapDataToBigQueryFormat(final List items) throws IOException { - final ByteBuffer byteBuffer; - try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { - - final List data = convertObjectsToByteArrays(items); - - for (byte[] byteArray : data) { - outputStream.write(byteArray); - } - - /* - * It is extremely important to create larger ByteBuffer. - * If you call TableDataWriteChannel too many times, it leads to BigQuery exceptions. - */ - byteBuffer = ByteBuffer.wrap(outputStream.toByteArray()); - } - return byteBuffer; - } - - private void doWriteDataToBigQuery(final ByteBuffer byteBuffer) { - if (logger.isDebugEnabled()) { - logger.debug("Writing data to BigQuery"); - } - - TableDataWriteChannel writeChannel = null; - - try (final TableDataWriteChannel writer = getWriteChannel()) { - /* TableDataWriteChannel is not thread safe */ - writer.write(byteBuffer); - writeChannel = writer; - } - catch (Exception e) { - writeFailed = true; - logger.error("BigQuery error", e); - throw new BigQueryItemWriterException("Error on write happened", e); - } - finally { - if (!writeFailed) { - String logMessage = "Write operation submitted: " + bigQueryWriteCounter.incrementAndGet(); - - if (writeChannel != null) { - logMessage += " -- Job ID: " + writeChannel.getJob().getJobId().getJob(); - if (this.jobConsumer != null) { - this.jobConsumer.accept(writeChannel.getJob()); - } - } - - if (logger.isDebugEnabled()) { - logger.debug(logMessage); - } - } - } - } - - /** - * @return {@link TableDataWriteChannel} that should be closed manually. - * @see Examples - */ - private TableDataWriteChannel getWriteChannel() { - return this.bigQuery.writer(this.writeChannelConfig); - } - - /** - * Performs common validation for CSV and JSON types. - */ - @Override - public void afterPropertiesSet() { - Assert.notNull(this.bigQuery, "BigQuery service must be provided"); - Assert.notNull(this.writeChannelConfig, "Write channel configuration must be provided"); - Assert.notNull(this.writeChannelConfig.getFormat(), "Data format must be provided"); - - Assert.isTrue(!isBigtable(), "Google BigTable is not supported"); - Assert.isTrue(!isGoogleSheets(), "Google Sheets is not supported"); - Assert.isTrue(!isDatastore(), "Google Datastore is not supported"); - Assert.isTrue(!isParquet(), "Parquet is not supported"); - Assert.isTrue(!isOrc(), "Orc is not supported"); - Assert.isTrue(!isAvro(), "Avro is not supported"); - Assert.isTrue(!isIceberg(), "Iceberg is not supported"); - - performFormatSpecificChecks(); - - final String dataset = this.writeChannelConfig.getDestinationTable().getDataset(); - if (this.datasetInfo == null) { - this.datasetInfo = DatasetInfo.newBuilder(dataset).build(); - } else { - Assert.isTrue(Objects.equals(this.datasetInfo.getDatasetId().getDataset(), dataset), "Dataset should be configured properly"); - } - - createDataset(); - } - - private void createDataset() { - final TableId tableId = this.writeChannelConfig.getDestinationTable(); - final String datasetToCheck = tableId.getDataset(); - - if (datasetToCheck != null && this.bigQuery.getDataset(datasetToCheck) == null && this.datasetInfo != null) { - this.bigQuery.create(this.datasetInfo); - } - } - - private boolean isAvro() { - return FormatOptions.avro().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isParquet() { - return FormatOptions.parquet().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isOrc() { - return FormatOptions.orc().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isBigtable() { - return FormatOptions.bigtable().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isGoogleSheets() { - return FormatOptions.googleSheets().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isDatastore() { - return FormatOptions.datastoreBackup().getType().equals(this.writeChannelConfig.getFormat()); - } - - private boolean isIceberg() { - return FormatOptions.iceberg().getType().equals(this.writeChannelConfig.getFormat()); - } - - /** - * Schema can be computed on the BigQuery side during upload, - * so it is good to know when schema is supplied by user manually. - * - * @param table BigQuery table - * @return {@code true} if BigQuery {@link Table} has schema already described - */ - protected boolean tableHasDefinedSchema(final Table table) { - return Optional - .ofNullable(table) - .map(Table::getDefinition) - .map(TableDefinition.class::cast) - .map(TableDefinition::getSchema) - .isPresent(); - } - - /** - * Method that setting up metadata about chunk that is being processed. In reality is called once. - * - * @param items current chunk - */ - protected abstract void doInitializeProperties(List items); - - /** - * Converts chunk into a byte array. - * Each data type should be converted with respect to its specification. - * - * @param items current chunk - * @return {@link List} converted list of byte arrays - */ - protected abstract List convertObjectsToByteArrays(List items); - - /** - * Performs specific checks that are unique to the format. - */ - protected abstract void performFormatSpecificChecks(); + /** Logger that can be reused */ + protected final Log logger = LogFactory.getLog(getClass()); + + private final AtomicLong bigQueryWriteCounter = new AtomicLong(); + + /** + * Describes what should be written (format) and its destination (table). + */ + protected WriteChannelConfiguration writeChannelConfig; + + /** + * You can specify here some specific dataset configuration, like location. This + * dataset will be created. + */ + private DatasetInfo datasetInfo; + + /** + * Your custom logic with {@link Job}. + *

+ * {@link Job} will be assigned after {@link TableDataWriteChannel#close()}. + */ + private Consumer jobConsumer; + + private BigQuery bigQuery; + + private boolean writeFailed; + + /** + * Fetches table from the provided configuration. + * @return {@link Table} that is described in + * {@link BigQueryLoadJobBaseItemWriter#writeChannelConfig} + */ + protected Table getTable() { + return this.bigQuery.getTable(this.writeChannelConfig.getDestinationTable()); + } + + /** + * Provides additional information about the + * {@link com.google.cloud.bigquery.Dataset}. + * @param datasetInfo BigQuery dataset info + */ + public void setDatasetInfo(final DatasetInfo datasetInfo) { + this.datasetInfo = datasetInfo; + } + + /** + * Callback when {@link Job} will be finished. + * @param consumer your consumer + */ + public void setJobConsumer(final Consumer consumer) { + this.jobConsumer = consumer; + } + + /** + * Describes what should be written (format) and its destination (table). + * @param writeChannelConfig BigQuery channel configuration + */ + public void setWriteChannelConfig(final WriteChannelConfiguration writeChannelConfig) { + this.writeChannelConfig = writeChannelConfig; + } + + /** + * BigQuery service, responsible for API calls. + * @param bigQuery BigQuery service + */ + public void setBigQuery(final BigQuery bigQuery) { + this.bigQuery = bigQuery; + } + + @Override + public void write(final Chunk chunk) throws Exception { + if (!chunk.isEmpty()) { + final List items = chunk.getItems(); + doInitializeProperties(items); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Mapping %d elements", items.size())); + } + + doWriteDataToBigQuery(mapDataToBigQueryFormat(items)); + } + } + + private ByteBuffer mapDataToBigQueryFormat(final List items) throws IOException { + final ByteBuffer byteBuffer; + try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { + + final List data = convertObjectsToByteArrays(items); + + for (byte[] byteArray : data) { + outputStream.write(byteArray); + } + + // It is extremely important to create larger ByteBuffer. + // If you call TableDataWriteChannel too many times, it leads to BigQuery + // exceptions. + byteBuffer = ByteBuffer.wrap(outputStream.toByteArray()); + } + return byteBuffer; + } + + private void doWriteDataToBigQuery(final ByteBuffer byteBuffer) { + if (logger.isDebugEnabled()) { + logger.debug("Writing data to BigQuery"); + } + + TableDataWriteChannel writeChannel = null; + + try (final TableDataWriteChannel writer = getWriteChannel()) { + /* TableDataWriteChannel is not thread safe */ + writer.write(byteBuffer); + writeChannel = writer; + } + catch (Exception e) { + writeFailed = true; + logger.error("BigQuery error", e); + throw new BigQueryItemWriterException("Error on write happened", e); + } + finally { + if (!writeFailed) { + String logMessage = "Write operation submitted: " + bigQueryWriteCounter.incrementAndGet(); + + if (writeChannel != null) { + logMessage += " -- Job ID: " + writeChannel.getJob().getJobId().getJob(); + if (this.jobConsumer != null) { + this.jobConsumer.accept(writeChannel.getJob()); + } + } + + if (logger.isDebugEnabled()) { + logger.debug(logMessage); + } + } + } + } + + /** + * @return {@link TableDataWriteChannel} that should be closed manually. + * @see Examples + */ + private TableDataWriteChannel getWriteChannel() { + return this.bigQuery.writer(this.writeChannelConfig); + } + + /** + * Performs common validation for CSV and JSON types. + */ + @Override + public void afterPropertiesSet() { + Assert.notNull(this.bigQuery, "BigQuery service must be provided"); + Assert.notNull(this.writeChannelConfig, "Write channel configuration must be provided"); + Assert.notNull(this.writeChannelConfig.getFormat(), "Data format must be provided"); + + Assert.isTrue(!isBigtable(), "Google BigTable is not supported"); + Assert.isTrue(!isGoogleSheets(), "Google Sheets is not supported"); + Assert.isTrue(!isDatastore(), "Google Datastore is not supported"); + Assert.isTrue(!isParquet(), "Parquet is not supported"); + Assert.isTrue(!isOrc(), "Orc is not supported"); + Assert.isTrue(!isAvro(), "Avro is not supported"); + Assert.isTrue(!isIceberg(), "Iceberg is not supported"); + + performFormatSpecificChecks(); + + final String dataset = this.writeChannelConfig.getDestinationTable().getDataset(); + if (this.datasetInfo == null) { + this.datasetInfo = DatasetInfo.newBuilder(dataset).build(); + } + else { + boolean datasetEquals = Objects.equals(this.datasetInfo.getDatasetId().getDataset(), dataset); + Assert.isTrue(datasetEquals, "Dataset should be configured properly"); + } + + createDataset(); + } + + private void createDataset() { + final TableId tableId = this.writeChannelConfig.getDestinationTable(); + final String datasetToCheck = tableId.getDataset(); + + if (datasetToCheck != null && this.bigQuery.getDataset(datasetToCheck) == null && this.datasetInfo != null) { + this.bigQuery.create(this.datasetInfo); + } + } + + private boolean isAvro() { + return FormatOptions.avro().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isParquet() { + return FormatOptions.parquet().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isOrc() { + return FormatOptions.orc().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isBigtable() { + return FormatOptions.bigtable().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isGoogleSheets() { + return FormatOptions.googleSheets().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isDatastore() { + return FormatOptions.datastoreBackup().getType().equals(this.writeChannelConfig.getFormat()); + } + + private boolean isIceberg() { + return FormatOptions.iceberg().getType().equals(this.writeChannelConfig.getFormat()); + } + + /** + * Schema can be computed on the BigQuery side during upload, so it is good to know + * when schema is supplied by user manually. + * @param table BigQuery table + * @return {@code true} if BigQuery {@link Table} has schema already described + */ + protected boolean tableHasDefinedSchema(final Table table) { + return Optional.ofNullable(table) + .map(Table::getDefinition) + .map(TableDefinition.class::cast) + .map(TableDefinition::getSchema) + .isPresent(); + } + + /** + * Method that setting up metadata about chunk that is being processed. + *

+ * In reality is called once. + * @param items current chunk + */ + protected abstract void doInitializeProperties(List items); + + /** + * Converts chunk into a byte array. Each data type should be converted with respect + * to its specification. + * @param items current chunk + * @return {@link List} converted list of byte arrays + */ + protected abstract List convertObjectsToByteArrays(List items); + + /** + * Performs specific checks that are unique to the format. + */ + protected abstract void performFormatSpecificChecks(); } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/BigQueryLoadJobCsvItemWriter.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/BigQueryLoadJobCsvItemWriter.java index aa5eb2d..ea1fe0b 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/BigQueryLoadJobCsvItemWriter.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/BigQueryLoadJobCsvItemWriter.java @@ -40,80 +40,78 @@ */ public class BigQueryLoadJobCsvItemWriter extends BigQueryLoadJobBaseItemWriter { - private Converter rowMapper; - private ObjectWriter objectWriter; - private Class itemClass; - - /** - * Actual type of incoming data can be obtained only in runtime - */ - @Override - protected synchronized void doInitializeProperties(List items) { - if (this.itemClass == null) { - T firstItem = items.stream().findFirst().orElseThrow(() -> { - logger.warn("Class type was not found"); - return new IllegalStateException("Class type was not found"); - }); - this.itemClass = firstItem.getClass(); - - if (this.rowMapper == null) { - this.objectWriter = new CsvMapper().writerWithTypedSchemaFor(this.itemClass); - } - - logger.debug("Writer setup is completed"); - } - } - - @Override - protected List convertObjectsToByteArrays(List items) { - return items - .stream() - .map(this::mapItemToCsv) - .filter(Predicate.not(ObjectUtils::isEmpty)) - .toList(); - } - - @Override - protected void performFormatSpecificChecks() { - Table table = getTable(); - - if (Boolean.TRUE.equals(super.writeChannelConfig.getAutodetect())) { - if (tableHasDefinedSchema(table) && super.logger.isWarnEnabled()) { - logger.warn("Mixing autodetect mode with already defined schema may lead to errors on BigQuery side"); - } - } else { - Assert.notNull(super.writeChannelConfig.getSchema(), "Schema must be provided"); - - if (tableHasDefinedSchema(table)) { - Assert.isTrue( - Objects.equals(table.getDefinition().getSchema(), super.writeChannelConfig.getSchema()), - "Schema must be the same" - ); - } - } - - String format = FormatOptions.csv().getType(); - Assert.isTrue(Objects.equals(format, super.writeChannelConfig.getFormat()), "Only %s format is allowed".formatted(format)); - - } - - /** - * Row mapper which transforms single BigQuery row into a desired type. - * - * @param rowMapper your row mapper - */ - public void setRowMapper(Converter rowMapper) { - this.rowMapper = rowMapper; - } - - private byte[] mapItemToCsv(T t) { - try { - return rowMapper == null ? objectWriter.writeValueAsBytes(t) : rowMapper.convert(t); - } - catch (JsonProcessingException e) { - logger.error("Error during processing of the line: ", e); - return new byte[]{}; - } - } + private Converter rowMapper; + + private ObjectWriter objectWriter; + + private Class itemClass; + + /** + * Actual type of incoming data can be obtained only in runtime + */ + @Override + protected synchronized void doInitializeProperties(List items) { + if (this.itemClass == null) { + T firstItem = items.stream().findFirst().orElseThrow(() -> { + logger.warn("Class type was not found"); + return new IllegalStateException("Class type was not found"); + }); + this.itemClass = firstItem.getClass(); + + if (this.rowMapper == null) { + this.objectWriter = new CsvMapper().writerWithTypedSchemaFor(this.itemClass); + } + + logger.debug("Writer setup is completed"); + } + } + + @Override + protected List convertObjectsToByteArrays(List items) { + return items.stream().map(this::mapItemToCsv).filter(Predicate.not(ObjectUtils::isEmpty)).toList(); + } + + @Override + protected void performFormatSpecificChecks() { + Table table = getTable(); + + if (Boolean.TRUE.equals(super.writeChannelConfig.getAutodetect())) { + if (tableHasDefinedSchema(table) && super.logger.isWarnEnabled()) { + logger.warn("Mixing autodetect mode with already defined schema may lead to errors on BigQuery side"); + } + } + else { + Assert.notNull(super.writeChannelConfig.getSchema(), "Schema must be provided"); + + if (tableHasDefinedSchema(table)) { + boolean schemaEquals = Objects.equals(table.getDefinition().getSchema(), + super.writeChannelConfig.getSchema()); + Assert.isTrue(schemaEquals, "Schema must be the same"); + } + } + + String format = FormatOptions.csv().getType(); + boolean formatEquals = Objects.equals(format, super.writeChannelConfig.getFormat()); + Assert.isTrue(formatEquals, "Only %s format is allowed".formatted(format)); + + } + + /** + * Row mapper which transforms single BigQuery row into a desired type. + * @param rowMapper your row mapper + */ + public void setRowMapper(Converter rowMapper) { + this.rowMapper = rowMapper; + } + + private byte[] mapItemToCsv(T t) { + try { + return rowMapper == null ? objectWriter.writeValueAsBytes(t) : rowMapper.convert(t); + } + catch (JsonProcessingException e) { + logger.error("Error during processing of the line: ", e); + return new byte[] {}; + } + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/builder/BigQueryCsvItemWriterBuilder.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/builder/BigQueryCsvItemWriterBuilder.java index fa13ed8..6e6ea83 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/builder/BigQueryCsvItemWriterBuilder.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/csv/builder/BigQueryCsvItemWriterBuilder.java @@ -32,93 +32,92 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Examples + * @see Examples */ -public class BigQueryCsvItemWriterBuilder { - - private Converter rowMapper; - - private Consumer jobConsumer; - private DatasetInfo datasetInfo; - private WriteChannelConfiguration writeChannelConfig; - private BigQuery bigQuery; - - /** - * Row mapper which transforms single BigQuery row into desired type. - * - * @param rowMapper your row mapper - * @return {@link BigQueryCsvItemWriterBuilder} - * @see BigQueryLoadJobCsvItemWriter#setRowMapper(Converter) - */ - public BigQueryCsvItemWriterBuilder rowMapper(Converter rowMapper) { - this.rowMapper = rowMapper; - return this; - } - - /** - * Provides additional information about the {@link com.google.cloud.bigquery.Dataset}. - * - * @param datasetInfo BigQuery dataset info - * @return {@link BigQueryCsvItemWriterBuilder} - * @see BigQueryLoadJobCsvItemWriter#setDatasetInfo(DatasetInfo) - */ - public BigQueryCsvItemWriterBuilder datasetInfo(DatasetInfo datasetInfo) { - this.datasetInfo = datasetInfo; - return this; - } - - /** - * Callback when {@link Job} will be finished. - * - * @param consumer your consumer - * @return {@link BigQueryCsvItemWriterBuilder} - * @see BigQueryLoadJobCsvItemWriter#setJobConsumer(Consumer) - */ - public BigQueryCsvItemWriterBuilder jobConsumer(Consumer consumer) { - this.jobConsumer = consumer; - return this; - } - - /** - * Describes what should be written (format) and its destination (table). - * - * @param configuration BigQuery channel configuration - * @return {@link BigQueryCsvItemWriterBuilder} - * @see BigQueryLoadJobCsvItemWriter#setWriteChannelConfig(WriteChannelConfiguration) - */ - public BigQueryCsvItemWriterBuilder writeChannelConfig(WriteChannelConfiguration configuration) { - this.writeChannelConfig = configuration; - return this; - } - - /** - * BigQuery service, responsible for API calls. - * - * @param bigQuery BigQuery service - * @return {@link BigQueryCsvItemWriterBuilder} - * @see BigQueryLoadJobCsvItemWriter#setBigQuery(BigQuery) - */ - public BigQueryCsvItemWriterBuilder bigQuery(BigQuery bigQuery) { - this.bigQuery = bigQuery; - return this; - } - - /** - * Please remember about {@link BigQueryLoadJobCsvItemWriter#afterPropertiesSet()}. - * - * @return {@link BigQueryLoadJobCsvItemWriter} - */ - public BigQueryLoadJobCsvItemWriter build() { - BigQueryLoadJobCsvItemWriter writer = new BigQueryLoadJobCsvItemWriter<>(); - - writer.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); - - writer.setRowMapper(this.rowMapper); - writer.setWriteChannelConfig(this.writeChannelConfig); - writer.setJobConsumer(this.jobConsumer); - writer.setDatasetInfo(this.datasetInfo); - - return writer; - } +public class BigQueryCsvItemWriterBuilder { + + private Converter rowMapper; + + private Consumer jobConsumer; + + private DatasetInfo datasetInfo; + + private WriteChannelConfiguration writeChannelConfig; + + private BigQuery bigQuery; + + /** + * Row mapper which transforms single BigQuery row into desired type. + * @param rowMapper your row mapper + * @return {@link BigQueryCsvItemWriterBuilder} + * @see BigQueryLoadJobCsvItemWriter#setRowMapper(Converter) + */ + public BigQueryCsvItemWriterBuilder rowMapper(Converter rowMapper) { + this.rowMapper = rowMapper; + return this; + } + + /** + * Provides additional information about the + * {@link com.google.cloud.bigquery.Dataset}. + * @param datasetInfo BigQuery dataset info + * @return {@link BigQueryCsvItemWriterBuilder} + * @see BigQueryLoadJobCsvItemWriter#setDatasetInfo(DatasetInfo) + */ + public BigQueryCsvItemWriterBuilder datasetInfo(DatasetInfo datasetInfo) { + this.datasetInfo = datasetInfo; + return this; + } + + /** + * Callback when {@link Job} will be finished. + * @param consumer your consumer + * @return {@link BigQueryCsvItemWriterBuilder} + * @see BigQueryLoadJobCsvItemWriter#setJobConsumer(Consumer) + */ + public BigQueryCsvItemWriterBuilder jobConsumer(Consumer consumer) { + this.jobConsumer = consumer; + return this; + } + + /** + * Describes what should be written (format) and its destination (table). + * @param configuration BigQuery channel configuration + * @return {@link BigQueryCsvItemWriterBuilder} + * @see BigQueryLoadJobCsvItemWriter#setWriteChannelConfig(WriteChannelConfiguration) + */ + public BigQueryCsvItemWriterBuilder writeChannelConfig(WriteChannelConfiguration configuration) { + this.writeChannelConfig = configuration; + return this; + } + + /** + * BigQuery service, responsible for API calls. + * @param bigQuery BigQuery service + * @return {@link BigQueryCsvItemWriterBuilder} + * @see BigQueryLoadJobCsvItemWriter#setBigQuery(BigQuery) + */ + public BigQueryCsvItemWriterBuilder bigQuery(BigQuery bigQuery) { + this.bigQuery = bigQuery; + return this; + } + + /** + * Please remember about {@link BigQueryLoadJobCsvItemWriter#afterPropertiesSet()}. + * @return {@link BigQueryLoadJobCsvItemWriter} + */ + public BigQueryLoadJobCsvItemWriter build() { + BigQueryLoadJobCsvItemWriter writer = new BigQueryLoadJobCsvItemWriter<>(); + + writer.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); + + writer.setRowMapper(this.rowMapper); + writer.setWriteChannelConfig(this.writeChannelConfig); + writer.setJobConsumer(this.jobConsumer); + writer.setDatasetInfo(this.datasetInfo); + + return writer; + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/BigQueryLoadJobJsonItemWriter.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/BigQueryLoadJobJsonItemWriter.java index a557110..e36ea5d 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/BigQueryLoadJobJsonItemWriter.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/BigQueryLoadJobJsonItemWriter.java @@ -38,67 +38,66 @@ */ public class BigQueryLoadJobJsonItemWriter extends BigQueryLoadJobBaseItemWriter { - private static final String LF = "\n"; - - private JsonObjectMarshaller marshaller; - - @Override - protected void doInitializeProperties(List items) { - // Unused - } - - @Override - protected List convertObjectsToByteArrays(List items) { - return items - .stream() - .map(marshaller::marshal) - .filter(Predicate.not(ObjectUtils::isEmpty)) - .map(this::convertToNdJson) - .map(row -> row.getBytes(StandardCharsets.UTF_8)) - .toList(); - } - - @Override - protected void performFormatSpecificChecks() { - Assert.notNull(this.marshaller, "Marshaller must be provided"); - - Table table = getTable(); - - if (Boolean.TRUE.equals(writeChannelConfig.getAutodetect())) { - if (tableHasDefinedSchema(table) && logger.isWarnEnabled()) { - logger.warn("Mixing autodetect mode with already defined schema may lead to errors on BigQuery side"); - } - } else { - Assert.notNull(writeChannelConfig.getSchema(), "Schema must be provided"); - - if (tableHasDefinedSchema(table)) { - Assert.isTrue( - Objects.equals(table.getDefinition().getSchema(), writeChannelConfig.getSchema()), - "Schema must be the same" - ); - } - } - - String format = FormatOptions.json().getType(); - Assert.isTrue(Objects.equals(format, super.writeChannelConfig.getFormat()), "Only %s format is allowed".formatted(format)); - } - - /** - * Converter that transforms a single row into a {@link String}. - * - * @param marshaller your JSON mapper - */ - public void setMarshaller(JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - } - - /** - * BigQuery uses ndjson. - * It is expected that to pass here JSON line generated by - * {@link com.fasterxml.jackson.databind.ObjectMapper} or any other JSON parser. - */ - private String convertToNdJson(String json) { - return json.concat(LF); - } + private static final String LF = "\n"; + + private JsonObjectMarshaller marshaller; + + @Override + protected void doInitializeProperties(List items) { + // Unused + } + + @Override + protected List convertObjectsToByteArrays(List items) { + return items.stream() + .map(marshaller::marshal) + .filter(Predicate.not(ObjectUtils::isEmpty)) + .map(this::convertToNdJson) + .map(row -> row.getBytes(StandardCharsets.UTF_8)) + .toList(); + } + + @Override + protected void performFormatSpecificChecks() { + Assert.notNull(this.marshaller, "Marshaller must be provided"); + + Table table = getTable(); + + if (Boolean.TRUE.equals(writeChannelConfig.getAutodetect())) { + if (tableHasDefinedSchema(table) && logger.isWarnEnabled()) { + logger.warn("Mixing autodetect mode with already defined schema may lead to errors on BigQuery side"); + } + } + else { + Assert.notNull(writeChannelConfig.getSchema(), "Schema must be provided"); + + if (tableHasDefinedSchema(table)) { + boolean schemaEquals = Objects.equals(table.getDefinition().getSchema(), + writeChannelConfig.getSchema()); + Assert.isTrue(schemaEquals, "Schema must be the same"); + } + } + + String format = FormatOptions.json().getType(); + boolean formatEquals = Objects.equals(format, super.writeChannelConfig.getFormat()); + Assert.isTrue(formatEquals, "Only %s format is allowed".formatted(format)); + } + + /** + * Converter that transforms a single row into a {@link String}. + * @param marshaller your JSON mapper + */ + public void setMarshaller(JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + } + + /** + * BigQuery uses ndjson. It is + * expected that to pass here JSON line generated by + * {@link com.fasterxml.jackson.databind.ObjectMapper} or any other JSON parser. + */ + private String convertToNdJson(String json) { + return json.concat(LF); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilder.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilder.java index 7f8c612..59255a4 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilder.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilder.java @@ -33,92 +33,92 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Examples + * @see Examples */ -public class BigQueryLoadJobJsonItemWriterBuilder { - - private JsonObjectMarshaller marshaller; - private Consumer jobConsumer; - private DatasetInfo datasetInfo; - private WriteChannelConfiguration writeChannelConfig; - private BigQuery bigQuery; - - /** - * Converts your DTO into a {@link String}. - * - * @param marshaller your mapper - * @return {@link BigQueryLoadJobJsonItemWriterBuilder} - * @see BigQueryLoadJobJsonItemWriter#setMarshaller(JsonObjectMarshaller) - */ - public BigQueryLoadJobJsonItemWriterBuilder marshaller(JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - return this; - } - - /** - * Provides additional information about the {@link com.google.cloud.bigquery.Dataset}. - * - * @param datasetInfo BigQuery dataset info - * @return {@link BigQueryLoadJobJsonItemWriterBuilder} - * @see BigQueryLoadJobJsonItemWriter#setDatasetInfo(DatasetInfo) - */ - public BigQueryLoadJobJsonItemWriterBuilder datasetInfo(DatasetInfo datasetInfo) { - this.datasetInfo = datasetInfo; - return this; - } - - /** - * Callback when {@link Job} will be finished. - * - * @param consumer your consumer - * @return {@link BigQueryLoadJobJsonItemWriterBuilder} - * @see BigQueryLoadJobJsonItemWriter#setJobConsumer(Consumer) - */ - public BigQueryLoadJobJsonItemWriterBuilder jobConsumer(Consumer consumer) { - this.jobConsumer = consumer; - return this; - } - - /** - * Describes what should be written (format) and its destination (table). - * - * @param configuration BigQuery channel configuration - * @return {@link BigQueryLoadJobJsonItemWriterBuilder} - * @see BigQueryLoadJobJsonItemWriter#setWriteChannelConfig(WriteChannelConfiguration) - */ - public BigQueryLoadJobJsonItemWriterBuilder writeChannelConfig(WriteChannelConfiguration configuration) { - this.writeChannelConfig = configuration; - return this; - } - - /** - * BigQuery service, responsible for API calls. - * - * @param bigQuery BigQuery service - * @return {@link BigQueryLoadJobJsonItemWriter} - * @see BigQueryLoadJobJsonItemWriter#setBigQuery(BigQuery) - */ - public BigQueryLoadJobJsonItemWriterBuilder bigQuery(BigQuery bigQuery) { - this.bigQuery = bigQuery; - return this; - } - - /** - * Please remember about {@link BigQueryLoadJobJsonItemWriter#afterPropertiesSet()}. - * - * @return {@link BigQueryLoadJobJsonItemWriter} - */ - public BigQueryLoadJobJsonItemWriter build() { - BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriter<>(); - - writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); - writer.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); - - writer.setWriteChannelConfig(this.writeChannelConfig); - writer.setJobConsumer(this.jobConsumer); - writer.setDatasetInfo(this.datasetInfo); - - return writer; - } +public class BigQueryLoadJobJsonItemWriterBuilder { + + private JsonObjectMarshaller marshaller; + + private Consumer jobConsumer; + + private DatasetInfo datasetInfo; + + private WriteChannelConfiguration writeChannelConfig; + + private BigQuery bigQuery; + + /** + * Converts your DTO into a {@link String}. + * @param marshaller your mapper + * @return {@link BigQueryLoadJobJsonItemWriterBuilder} + * @see BigQueryLoadJobJsonItemWriter#setMarshaller(JsonObjectMarshaller) + */ + public BigQueryLoadJobJsonItemWriterBuilder marshaller(JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + return this; + } + + /** + * Provides additional information about the + * {@link com.google.cloud.bigquery.Dataset}. + * @param datasetInfo BigQuery dataset info + * @return {@link BigQueryLoadJobJsonItemWriterBuilder} + * @see BigQueryLoadJobJsonItemWriter#setDatasetInfo(DatasetInfo) + */ + public BigQueryLoadJobJsonItemWriterBuilder datasetInfo(DatasetInfo datasetInfo) { + this.datasetInfo = datasetInfo; + return this; + } + + /** + * Callback when {@link Job} will be finished. + * @param consumer your consumer + * @return {@link BigQueryLoadJobJsonItemWriterBuilder} + * @see BigQueryLoadJobJsonItemWriter#setJobConsumer(Consumer) + */ + public BigQueryLoadJobJsonItemWriterBuilder jobConsumer(Consumer consumer) { + this.jobConsumer = consumer; + return this; + } + + /** + * Describes what should be written (format) and its destination (table). + * @param configuration BigQuery channel configuration + * @return {@link BigQueryLoadJobJsonItemWriterBuilder} + * @see BigQueryLoadJobJsonItemWriter#setWriteChannelConfig(WriteChannelConfiguration) + */ + public BigQueryLoadJobJsonItemWriterBuilder writeChannelConfig(WriteChannelConfiguration configuration) { + this.writeChannelConfig = configuration; + return this; + } + + /** + * BigQuery service, responsible for API calls. + * @param bigQuery BigQuery service + * @return {@link BigQueryLoadJobJsonItemWriter} + * @see BigQueryLoadJobJsonItemWriter#setBigQuery(BigQuery) + */ + public BigQueryLoadJobJsonItemWriterBuilder bigQuery(BigQuery bigQuery) { + this.bigQuery = bigQuery; + return this; + } + + /** + * Please remember about {@link BigQueryLoadJobJsonItemWriter#afterPropertiesSet()}. + * @return {@link BigQueryLoadJobJsonItemWriter} + */ + public BigQueryLoadJobJsonItemWriter build() { + BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriter<>(); + + writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); + writer.setBigQuery(this.bigQuery == null ? BigQueryOptions.getDefaultInstance().getService() : this.bigQuery); + + writer.setWriteChannelConfig(this.writeChannelConfig); + writer.setJobConsumer(this.jobConsumer); + writer.setDatasetInfo(this.datasetInfo); + + return writer; + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/package-info.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/package-info.java index 77e882a..1e110f3 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/package-info.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/loadjob/package-info.java @@ -15,16 +15,19 @@ */ /** - * {@link com.google.cloud.bigquery.JobConfiguration.Type#LOAD} {@link com.google.cloud.bigquery.Job} + * {@link com.google.cloud.bigquery.JobConfiguration.Type#LOAD} + * {@link com.google.cloud.bigquery.Job} * - *

Supported formats: + *

+ * Supported formats: *

    - *
  • JSON
  • - *
  • CSV
  • + *
  • JSON
  • + *
  • CSV
  • *
* - *

If you generate {@link com.google.cloud.bigquery.TableDataWriteChannel} - * and you {@link com.google.cloud.bigquery.TableDataWriteChannel#close()} it, - * there is no guarantee that single {@link com.google.cloud.bigquery.Job} will be created. + *

+ * If you generate {@link com.google.cloud.bigquery.TableDataWriteChannel} and you + * {@link com.google.cloud.bigquery.TableDataWriteChannel#close()} it, there is no + * guarantee that single {@link com.google.cloud.bigquery.Job} will be created. */ package org.springframework.batch.extensions.bigquery.writer.loadjob; \ No newline at end of file diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/package-info.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/package-info.java index 48c3746..3d30a24 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/package-info.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/package-info.java @@ -20,16 +20,20 @@ * These writers use a Java client from Google, so we cannot control this flow fully. *

* - * Take into account that BigQuery has rate limits, and it is very easy to exceed those in concurrent environment. + * Take into account that BigQuery has rate limits, and it is very easy to exceed those in + * concurrent environment. *

- * Also, worth mentioning that you should ensure ordering of the fields in DTO that you are going to send to the BigQuery. - * In case of CSV/JSON and Jackson consider using {@link com.fasterxml.jackson.annotation.JsonPropertyOrder}. + * Also, worth mentioning that you should ensure ordering of the fields in DTO that you + * are going to send to the BigQuery. In case of CSV/JSON and Jackson consider using + * {@link com.fasterxml.jackson.annotation.JsonPropertyOrder}. * * @author Volodymyr Perebykivskyi * @since 0.2.0 * @see Google BigQuery - * @see BigQuery Java Client on GitHub - * @see BigQuery Quotas & Limits + * @see BigQuery Java Client on + * GitHub + * @see BigQuery Quotas & + * Limits */ @NonNullApi package org.springframework.batch.extensions.bigquery.writer; diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriter.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriter.java index 7c41069..fa93814 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriter.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriter.java @@ -47,139 +47,146 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @see JSON - * @see Commited type storage write API + * @see Commited + * type storage write API * @since 0.2.0 */ public class BigQueryWriteApiCommitedJsonItemWriter implements ItemWriter, InitializingBean { - /** - * Logger that can be reused - */ - private final Log logger = LogFactory.getLog(getClass()); - - private final AtomicLong bigQueryWriteCounter = new AtomicLong(); - - private BigQueryWriteClient bigQueryWriteClient; - private TableName tableName; - private JsonObjectMarshaller marshaller; - private ApiFutureCallback apiFutureCallback; - private Executor executor; - - private boolean writeFailed; - - @Override - public void write(final Chunk chunk) throws Exception { - if (!chunk.isEmpty()) { - final List items = chunk.getItems(); - String streamName = null; - - try { - WriteStream writeStreamToCreate = WriteStream.newBuilder() - .setType(WriteStream.Type.COMMITTED) - .build(); - - CreateWriteStreamRequest createStreamRequest = CreateWriteStreamRequest.newBuilder() - .setParent(tableName.toString()) - .setWriteStream(writeStreamToCreate) - .build(); - - WriteStream writeStream = bigQueryWriteClient.createWriteStream(createStreamRequest); - streamName = writeStream.getName(); - - if (logger.isDebugEnabled()) { - logger.debug("Created a stream=" + streamName); - } - - try (final JsonStreamWriter writer = JsonStreamWriter.newBuilder(writeStream.getName(), bigQueryWriteClient).build()) { - if (logger.isDebugEnabled()) { - logger.debug(String.format("Mapping %d elements", items.size())); - } - final JSONArray array = new JSONArray(); - items.stream().map(marshaller::marshal).map(JSONObject::new).forEach(array::put); - - if (logger.isDebugEnabled()) { - logger.debug("Writing data to BigQuery"); - } - final ApiFuture future = writer.append(array); - - if (apiFutureCallback != null) { - ApiFutures.addCallback(future, apiFutureCallback, executor); - } - } - } catch (Exception e) { - writeFailed = true; - logger.error("BigQuery error", e); - throw new BigQueryItemWriterException("Error on write happened", e); - } finally { - if (StringUtils.hasText(streamName)) { - long rowCount = bigQueryWriteClient.finalizeWriteStream(streamName).getRowCount(); - if (chunk.size() != rowCount) { - logger.warn("Finalized response row count=%d is not the same as chunk size=%d".formatted(rowCount, chunk.size())); - } - } - - if (!writeFailed && logger.isDebugEnabled()) { - logger.debug("Write operation submitted: " + bigQueryWriteCounter.incrementAndGet()); - } - } - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(this.bigQueryWriteClient, "BigQuery write client must be provided"); - Assert.notNull(this.tableName, "Table name must be provided"); - Assert.notNull(this.marshaller, "Marshaller must be provided"); - - if (this.apiFutureCallback != null) { - Assert.notNull(this.executor, "Executor must be provided"); - } - } - - /** - * GRPC client that wraps communication with BigQuery. - * - * @param bigQueryWriteClient a client - */ - public void setBigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { - this.bigQueryWriteClient = bigQueryWriteClient; - } - - /** - * A full path to the BigQuery table. - * - * @param tableName a name - */ - public void setTableName(final TableName tableName) { - this.tableName = tableName; - } - - /** - * Converter that transforms a single row into a {@link String}. - * - * @param marshaller your JSON mapper - */ - public void setMarshaller(final JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - } - - /** - * {@link ApiFutureCallback} that will be called in case of successful of failed response. - * - * @param apiFutureCallback a callback - * @see BigQueryWriteApiCommitedJsonItemWriter#setExecutor(Executor) - */ - public void setApiFutureCallback(final ApiFutureCallback apiFutureCallback) { - this.apiFutureCallback = apiFutureCallback; - } - - /** - * An {@link Executor} that will be calling a {@link ApiFutureCallback}. - * - * @param executor an executor - * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public void setExecutor(final Executor executor) { - this.executor = executor; - } + /** + * Logger that can be reused + */ + private final Log logger = LogFactory.getLog(getClass()); + + private final AtomicLong bigQueryWriteCounter = new AtomicLong(); + + private BigQueryWriteClient bigQueryWriteClient; + + private TableName tableName; + + private JsonObjectMarshaller marshaller; + + private ApiFutureCallback apiFutureCallback; + + private Executor executor; + + private boolean writeFailed; + + @Override + public void write(final Chunk chunk) throws Exception { + if (!chunk.isEmpty()) { + final List items = chunk.getItems(); + String streamName = null; + + try { + WriteStream writeStreamToCreate = WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build(); + + CreateWriteStreamRequest createStreamRequest = CreateWriteStreamRequest.newBuilder() + .setParent(tableName.toString()) + .setWriteStream(writeStreamToCreate) + .build(); + + WriteStream writeStream = bigQueryWriteClient.createWriteStream(createStreamRequest); + streamName = writeStream.getName(); + + if (logger.isDebugEnabled()) { + logger.debug("Created a stream=" + streamName); + } + + final JsonStreamWriter jsonWriter = JsonStreamWriter + .newBuilder(writeStream.getName(), bigQueryWriteClient) + .build(); + + try (jsonWriter) { + if (logger.isDebugEnabled()) { + logger.debug(String.format("Mapping %d elements", items.size())); + } + final JSONArray array = new JSONArray(); + items.stream().map(marshaller::marshal).map(JSONObject::new).forEach(array::put); + + if (logger.isDebugEnabled()) { + logger.debug("Writing data to BigQuery"); + } + final ApiFuture future = jsonWriter.append(array); + + if (apiFutureCallback != null) { + ApiFutures.addCallback(future, apiFutureCallback, executor); + } + } + } + catch (Exception e) { + writeFailed = true; + logger.error("BigQuery error", e); + throw new BigQueryItemWriterException("Error on write happened", e); + } + finally { + if (StringUtils.hasText(streamName)) { + final long rowCount = bigQueryWriteClient.finalizeWriteStream(streamName).getRowCount(); + if (chunk.size() != rowCount) { + logger.warn("Finalized response row count=%d is not the same as chunk size=%d" + .formatted(rowCount, chunk.size())); + } + } + + if (!writeFailed && logger.isDebugEnabled()) { + logger.debug("Write operation submitted: " + bigQueryWriteCounter.incrementAndGet()); + } + } + } + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.notNull(this.bigQueryWriteClient, "BigQuery write client must be provided"); + Assert.notNull(this.tableName, "Table name must be provided"); + Assert.notNull(this.marshaller, "Marshaller must be provided"); + + if (this.apiFutureCallback != null) { + Assert.notNull(this.executor, "Executor must be provided"); + } + } + + /** + * GRPC client that wraps communication with BigQuery. + * @param bigQueryWriteClient a client + */ + public void setBigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { + this.bigQueryWriteClient = bigQueryWriteClient; + } + + /** + * A full path to the BigQuery table. + * @param tableName a name + */ + public void setTableName(final TableName tableName) { + this.tableName = tableName; + } + + /** + * Converter that transforms a single row into a {@link String}. + * @param marshaller your JSON mapper + */ + public void setMarshaller(final JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + } + + /** + * {@link ApiFutureCallback} that will be called in case of successful of failed + * response. + * @param apiFutureCallback a callback + * @see BigQueryWriteApiCommitedJsonItemWriter#setExecutor(Executor) + */ + public void setApiFutureCallback(final ApiFutureCallback apiFutureCallback) { + this.apiFutureCallback = apiFutureCallback; + } + + /** + * An {@link Executor} that will be calling a {@link ApiFutureCallback}. + * @param executor an executor + * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public void setExecutor(final Executor executor) { + this.executor = executor; + } + } diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriter.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriter.java index b64fc20..01025f9 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriter.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriter.java @@ -49,154 +49,163 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @see JSON - * @see Pending type storage write API + * @see Pending + * type storage write API * @since 0.2.0 */ public class BigQueryWriteApiPendingJsonItemWriter implements ItemWriter, InitializingBean { - /** - * Logger that can be reused - */ - private final Log logger = LogFactory.getLog(getClass()); - - private final AtomicLong bigQueryWriteCounter = new AtomicLong(); - - private BigQueryWriteClient bigQueryWriteClient; - private TableName tableName; - private JsonObjectMarshaller marshaller; - private ApiFutureCallback apiFutureCallback; - private Executor executor; - - private boolean writeFailed; - - @Override - public void write(final Chunk chunk) throws Exception { - if (!chunk.isEmpty()) { - final List items = chunk.getItems(); - String streamName = null; - - try { - WriteStream writeStreamToCreate = WriteStream.newBuilder() - .setType(WriteStream.Type.PENDING) - .build(); - - CreateWriteStreamRequest createStreamRequest = CreateWriteStreamRequest.newBuilder() - .setParent(tableName.toString()) - .setWriteStream(writeStreamToCreate) - .build(); - - WriteStream writeStream = bigQueryWriteClient.createWriteStream(createStreamRequest); - streamName = writeStream.getName(); - - if (logger.isDebugEnabled()) { - logger.debug("Created a stream=" + streamName); - } - - try (final JsonStreamWriter writer = JsonStreamWriter.newBuilder(writeStream.getName(), bigQueryWriteClient).build()) { - if (logger.isDebugEnabled()) { - logger.debug(String.format("Mapping %d elements", items.size())); - } - final JSONArray array = new JSONArray(); - items.stream().map(marshaller::marshal).map(JSONObject::new).forEach(array::put); - - if (logger.isDebugEnabled()) { - logger.debug("Writing data to BigQuery"); - } - final ApiFuture future = writer.append(array); - - if (apiFutureCallback != null) { - ApiFutures.addCallback(future, apiFutureCallback, executor); - } - } - } catch (Exception e) { - writeFailed = true; - logger.error("BigQuery error", e); - throw new BigQueryItemWriterException("Error on write happened", e); - } finally { - if (StringUtils.hasText(streamName)) { - long rowCount = bigQueryWriteClient.finalizeWriteStream(streamName).getRowCount(); - if (chunk.size() != rowCount) { - logger.warn("Finalized response row count=%d is not the same as chunk size=%d".formatted(rowCount, chunk.size())); - } - - BatchCommitWriteStreamsRequest batchRequest = BatchCommitWriteStreamsRequest.newBuilder() - .setParent(tableName.toString()) - .addWriteStreams(streamName) - .build(); - BatchCommitWriteStreamsResponse batchResponse = bigQueryWriteClient.batchCommitWriteStreams(batchRequest); - - if (!batchResponse.hasCommitTime()) { - writeFailed = true; - logger.error("BigQuery error=" + batchResponse.getStreamErrorsList()); - } - } - - if (!writeFailed && logger.isDebugEnabled()) { - logger.debug("Write operation submitted: " + bigQueryWriteCounter.incrementAndGet()); - } - } - - if (writeFailed) { - throw new BigQueryItemWriterException("Error on write happened"); - } - } - } - - @Override - public void afterPropertiesSet() throws Exception { - Assert.notNull(this.bigQueryWriteClient, "BigQuery write client must be provided"); - Assert.notNull(this.tableName, "Table name must be provided"); - Assert.notNull(this.marshaller, "Marshaller must be provided"); - - if (this.apiFutureCallback != null) { - Assert.notNull(this.executor, "Executor must be provided"); - } - } - - /** - * GRPC client that wraps communication with BigQuery. - * - * @param bigQueryWriteClient a client - */ - public void setBigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { - this.bigQueryWriteClient = bigQueryWriteClient; - } - - /** - * A full path to the BigQuery table. - * - * @param tableName a name - */ - public void setTableName(final TableName tableName) { - this.tableName = tableName; - } - - /** - * Converter that transforms a single row into a {@link String}. - * - * @param marshaller your JSON mapper - */ - public void setMarshaller(final JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - } - - /** - * {@link ApiFutureCallback} that will be called in case of successful of failed response. - * - * @param apiFutureCallback a callback - * @see BigQueryWriteApiPendingJsonItemWriter#setExecutor(Executor) - */ - public void setApiFutureCallback(final ApiFutureCallback apiFutureCallback) { - this.apiFutureCallback = apiFutureCallback; - } - - /** - * An {@link Executor} that will be calling a {@link ApiFutureCallback}. - * - * @param executor an executor - * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public void setExecutor(final Executor executor) { - this.executor = executor; - } + /** + * Logger that can be reused + */ + private final Log logger = LogFactory.getLog(getClass()); + + private final AtomicLong bigQueryWriteCounter = new AtomicLong(); + + private BigQueryWriteClient bigQueryWriteClient; + + private TableName tableName; + + private JsonObjectMarshaller marshaller; + + private ApiFutureCallback apiFutureCallback; + + private Executor executor; + + private boolean writeFailed; + + @Override + public void write(final Chunk chunk) throws Exception { + if (!chunk.isEmpty()) { + final List items = chunk.getItems(); + String streamName = null; + + try { + WriteStream writeStreamToCreate = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); + + CreateWriteStreamRequest createStreamRequest = CreateWriteStreamRequest.newBuilder() + .setParent(tableName.toString()) + .setWriteStream(writeStreamToCreate) + .build(); + + WriteStream writeStream = bigQueryWriteClient.createWriteStream(createStreamRequest); + streamName = writeStream.getName(); + + if (logger.isDebugEnabled()) { + logger.debug("Created a stream=" + streamName); + } + + final JsonStreamWriter jsonWriter = JsonStreamWriter + .newBuilder(writeStream.getName(), bigQueryWriteClient) + .build(); + + try (jsonWriter) { + if (logger.isDebugEnabled()) { + logger.debug(String.format("Mapping %d elements", items.size())); + } + final JSONArray array = new JSONArray(); + items.stream().map(marshaller::marshal).map(JSONObject::new).forEach(array::put); + + if (logger.isDebugEnabled()) { + logger.debug("Writing data to BigQuery"); + } + final ApiFuture future = jsonWriter.append(array); + + if (apiFutureCallback != null) { + ApiFutures.addCallback(future, apiFutureCallback, executor); + } + } + } + catch (Exception e) { + writeFailed = true; + logger.error("BigQuery error", e); + throw new BigQueryItemWriterException("Error on write happened", e); + } + finally { + if (StringUtils.hasText(streamName)) { + final long rowCount = bigQueryWriteClient.finalizeWriteStream(streamName).getRowCount(); + if (chunk.size() != rowCount) { + logger.warn("Finalized response row count=%d is not the same as chunk size=%d" + .formatted(rowCount, chunk.size())); + } + + final BatchCommitWriteStreamsRequest batchRequest = BatchCommitWriteStreamsRequest.newBuilder() + .setParent(tableName.toString()) + .addWriteStreams(streamName) + .build(); + + final BatchCommitWriteStreamsResponse batchResponse = bigQueryWriteClient + .batchCommitWriteStreams(batchRequest); + + if (!batchResponse.hasCommitTime()) { + writeFailed = true; + logger.error("BigQuery error=" + batchResponse.getStreamErrorsList()); + } + } + + if (!writeFailed && logger.isDebugEnabled()) { + logger.debug("Write operation submitted: " + bigQueryWriteCounter.incrementAndGet()); + } + } + + if (writeFailed) { + throw new BigQueryItemWriterException("Error on write happened"); + } + } + } + + @Override + public void afterPropertiesSet() throws Exception { + Assert.notNull(this.bigQueryWriteClient, "BigQuery write client must be provided"); + Assert.notNull(this.tableName, "Table name must be provided"); + Assert.notNull(this.marshaller, "Marshaller must be provided"); + + if (this.apiFutureCallback != null) { + Assert.notNull(this.executor, "Executor must be provided"); + } + } + + /** + * GRPC client that wraps communication with BigQuery. + * @param bigQueryWriteClient a client + */ + public void setBigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { + this.bigQueryWriteClient = bigQueryWriteClient; + } + + /** + * A full path to the BigQuery table. + * @param tableName a name + */ + public void setTableName(final TableName tableName) { + this.tableName = tableName; + } + + /** + * Converter that transforms a single row into a {@link String}. + * @param marshaller your JSON mapper + */ + public void setMarshaller(final JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + } + + /** + * {@link ApiFutureCallback} that will be called in case of successful of failed + * response. + * @param apiFutureCallback a callback + * @see BigQueryWriteApiPendingJsonItemWriter#setExecutor(Executor) + */ + public void setApiFutureCallback(final ApiFutureCallback apiFutureCallback) { + this.apiFutureCallback = apiFutureCallback; + } + + /** + * An {@link Executor} that will be calling a {@link ApiFutureCallback}. + * @param executor an executor + * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public void setExecutor(final Executor executor) { + this.executor = executor; + } + } diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilder.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilder.java index fa8bd88..2639800 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilder.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilder.java @@ -34,97 +34,102 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Examples + * @see Examples */ public class BigQueryWriteApiCommitedJsonItemWriterBuilder { - private BigQueryWriteClient bigQueryWriteClient; - private TableName tableName; - private JsonObjectMarshaller marshaller; - private ApiFutureCallback apiFutureCallback; - private Executor executor; - - /** - * GRPC client that will be responsible for communication with BigQuery. - * - * @param bigQueryWriteClient a client - * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} - * @see BigQueryWriteApiCommitedJsonItemWriter#setBigQueryWriteClient(BigQueryWriteClient) - */ - public BigQueryWriteApiCommitedJsonItemWriterBuilder bigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { - this.bigQueryWriteClient = bigQueryWriteClient; - return this; - } - - /** - * A table name along with a full path. - * - * @param tableName a name - * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} - * @see BigQueryWriteApiCommitedJsonItemWriter#setTableName(TableName) - */ - public BigQueryWriteApiCommitedJsonItemWriterBuilder tableName(final TableName tableName) { - this.tableName = tableName; - return this; - } - - /** - * Converts your DTO into a {@link String}. - * - * @param marshaller your mapper - * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} - * @see BigQueryWriteApiCommitedJsonItemWriter#setMarshaller(JsonObjectMarshaller) - */ - public BigQueryWriteApiCommitedJsonItemWriterBuilder marshaller(final JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - return this; - } - - /** - * A {@link ApiFutureCallback} that will be called on successful or failed event. - * - * @param apiFutureCallback a callback - * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} - * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public BigQueryWriteApiCommitedJsonItemWriterBuilder apiFutureCallback(final ApiFutureCallback apiFutureCallback) { - this.apiFutureCallback = apiFutureCallback; - return this; - } - - /** - * {@link Executor} that will be used for {@link ApiFutureCallback}. - * - * @param executor an executor - * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} - * @see BigQueryWriteApiCommitedJsonItemWriter#setExecutor(Executor) - * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public BigQueryWriteApiCommitedJsonItemWriterBuilder executor(final Executor executor) { - this.executor = executor; - return this; - } - - /** - * Please remember about {@link BigQueryWriteApiCommitedJsonItemWriter#afterPropertiesSet()}. - * - * @return {@link BigQueryWriteApiCommitedJsonItemWriter} - * @throws IOException in case when {@link BigQueryWriteClient} failed to be created automatically - */ - public BigQueryWriteApiCommitedJsonItemWriter build() throws IOException { - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - - writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); - writer.setBigQueryWriteClient(this.bigQueryWriteClient == null ? BigQueryWriteClient.create() : this.bigQueryWriteClient); - - if (apiFutureCallback != null) { - writer.setApiFutureCallback(apiFutureCallback); - writer.setExecutor(this.executor == null ? MoreExecutors.directExecutor() : this.executor); - } - - writer.setTableName(tableName); - - return writer; - } + private BigQueryWriteClient bigQueryWriteClient; + + private TableName tableName; + + private JsonObjectMarshaller marshaller; + + private ApiFutureCallback apiFutureCallback; + + private Executor executor; + + /** + * GRPC client that will be responsible for communication with BigQuery. + * @param bigQueryWriteClient a client + * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} + * @see BigQueryWriteApiCommitedJsonItemWriter#setBigQueryWriteClient(BigQueryWriteClient) + */ + public BigQueryWriteApiCommitedJsonItemWriterBuilder bigQueryWriteClient( + final BigQueryWriteClient bigQueryWriteClient) { + this.bigQueryWriteClient = bigQueryWriteClient; + return this; + } + + /** + * A table name along with a full path. + * @param tableName a name + * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} + * @see BigQueryWriteApiCommitedJsonItemWriter#setTableName(TableName) + */ + public BigQueryWriteApiCommitedJsonItemWriterBuilder tableName(final TableName tableName) { + this.tableName = tableName; + return this; + } + + /** + * Converts your DTO into a {@link String}. + * @param marshaller your mapper + * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} + * @see BigQueryWriteApiCommitedJsonItemWriter#setMarshaller(JsonObjectMarshaller) + */ + public BigQueryWriteApiCommitedJsonItemWriterBuilder marshaller(final JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + return this; + } + + /** + * A {@link ApiFutureCallback} that will be called on successful or failed event. + * @param apiFutureCallback a callback + * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} + * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public BigQueryWriteApiCommitedJsonItemWriterBuilder apiFutureCallback( + final ApiFutureCallback apiFutureCallback) { + this.apiFutureCallback = apiFutureCallback; + return this; + } + + /** + * {@link Executor} that will be used for {@link ApiFutureCallback}. + * @param executor an executor + * @return {@link BigQueryWriteApiCommitedJsonItemWriterBuilder} + * @see BigQueryWriteApiCommitedJsonItemWriter#setExecutor(Executor) + * @see BigQueryWriteApiCommitedJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public BigQueryWriteApiCommitedJsonItemWriterBuilder executor(final Executor executor) { + this.executor = executor; + return this; + } + + /** + * Please remember about + * {@link BigQueryWriteApiCommitedJsonItemWriter#afterPropertiesSet()}. + * @return {@link BigQueryWriteApiCommitedJsonItemWriter} + * @throws IOException in case when {@link BigQueryWriteClient} failed to be created + * automatically + */ + public BigQueryWriteApiCommitedJsonItemWriter build() throws IOException { + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + + writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); + + writer.setBigQueryWriteClient( + this.bigQueryWriteClient == null ? BigQueryWriteClient.create() : this.bigQueryWriteClient); + + if (apiFutureCallback != null) { + writer.setApiFutureCallback(apiFutureCallback); + writer.setExecutor(this.executor == null ? MoreExecutors.directExecutor() : this.executor); + } + + writer.setTableName(tableName); + + return writer; + } } diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilder.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilder.java index 51986b4..000368a 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilder.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilder.java @@ -34,97 +34,102 @@ * @param your DTO type * @author Volodymyr Perebykivskyi * @since 0.2.0 - * @see Examples + * @see Examples */ public class BigQueryWriteApiPendingJsonItemWriterBuilder { - private BigQueryWriteClient bigQueryWriteClient; - private TableName tableName; - private JsonObjectMarshaller marshaller; - private ApiFutureCallback apiFutureCallback; - private Executor executor; - - /** - * GRPC client that will be responsible for communication with BigQuery. - * - * @param bigQueryWriteClient a client - * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} - * @see BigQueryWriteApiPendingJsonItemWriter#setBigQueryWriteClient(BigQueryWriteClient) - */ - public BigQueryWriteApiPendingJsonItemWriterBuilder bigQueryWriteClient(final BigQueryWriteClient bigQueryWriteClient) { - this.bigQueryWriteClient = bigQueryWriteClient; - return this; - } - - /** - * A table name along with a full path. - * - * @param tableName a name - * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} - * @see BigQueryWriteApiPendingJsonItemWriter#setTableName(TableName) - */ - public BigQueryWriteApiPendingJsonItemWriterBuilder tableName(final TableName tableName) { - this.tableName = tableName; - return this; - } - - /** - * Converts your DTO into a {@link String}. - * - * @param marshaller your mapper - * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} - * @see BigQueryWriteApiPendingJsonItemWriter#setMarshaller(JsonObjectMarshaller) - */ - public BigQueryWriteApiPendingJsonItemWriterBuilder marshaller(final JsonObjectMarshaller marshaller) { - this.marshaller = marshaller; - return this; - } - - /** - * A {@link ApiFutureCallback} that will be called on successful or failed event. - * - * @param apiFutureCallback a callback - * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} - * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public BigQueryWriteApiPendingJsonItemWriterBuilder apiFutureCallback(final ApiFutureCallback apiFutureCallback) { - this.apiFutureCallback = apiFutureCallback; - return this; - } - - /** - * {@link Executor} that will be used for {@link ApiFutureCallback}. - * - * @param executor an executor - * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} - * @see BigQueryWriteApiPendingJsonItemWriter#setExecutor(Executor) - * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) - */ - public BigQueryWriteApiPendingJsonItemWriterBuilder executor(final Executor executor) { - this.executor = executor; - return this; - } - - /** - * Please remember about {@link BigQueryWriteApiPendingJsonItemWriter#afterPropertiesSet()}. - * - * @return {@link BigQueryWriteApiPendingJsonItemWriter} - * @throws IOException in case when {@link BigQueryWriteClient} failed to be created automatically - */ - public BigQueryWriteApiPendingJsonItemWriter build() throws IOException { - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - - writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); - writer.setBigQueryWriteClient(this.bigQueryWriteClient == null ? BigQueryWriteClient.create() : this.bigQueryWriteClient); - - if (apiFutureCallback != null) { - writer.setApiFutureCallback(apiFutureCallback); - writer.setExecutor(this.executor == null ? MoreExecutors.directExecutor() : this.executor); - } - - writer.setTableName(tableName); - - return writer; - } + private BigQueryWriteClient bigQueryWriteClient; + + private TableName tableName; + + private JsonObjectMarshaller marshaller; + + private ApiFutureCallback apiFutureCallback; + + private Executor executor; + + /** + * GRPC client that will be responsible for communication with BigQuery. + * @param bigQueryWriteClient a client + * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} + * @see BigQueryWriteApiPendingJsonItemWriter#setBigQueryWriteClient(BigQueryWriteClient) + */ + public BigQueryWriteApiPendingJsonItemWriterBuilder bigQueryWriteClient( + final BigQueryWriteClient bigQueryWriteClient) { + this.bigQueryWriteClient = bigQueryWriteClient; + return this; + } + + /** + * A table name along with a full path. + * @param tableName a name + * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} + * @see BigQueryWriteApiPendingJsonItemWriter#setTableName(TableName) + */ + public BigQueryWriteApiPendingJsonItemWriterBuilder tableName(final TableName tableName) { + this.tableName = tableName; + return this; + } + + /** + * Converts your DTO into a {@link String}. + * @param marshaller your mapper + * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} + * @see BigQueryWriteApiPendingJsonItemWriter#setMarshaller(JsonObjectMarshaller) + */ + public BigQueryWriteApiPendingJsonItemWriterBuilder marshaller(final JsonObjectMarshaller marshaller) { + this.marshaller = marshaller; + return this; + } + + /** + * A {@link ApiFutureCallback} that will be called on successful or failed event. + * @param apiFutureCallback a callback + * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} + * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public BigQueryWriteApiPendingJsonItemWriterBuilder apiFutureCallback( + final ApiFutureCallback apiFutureCallback) { + this.apiFutureCallback = apiFutureCallback; + return this; + } + + /** + * {@link Executor} that will be used for {@link ApiFutureCallback}. + * @param executor an executor + * @return {@link BigQueryWriteApiPendingJsonItemWriterBuilder} + * @see BigQueryWriteApiPendingJsonItemWriter#setExecutor(Executor) + * @see BigQueryWriteApiPendingJsonItemWriter#setApiFutureCallback(ApiFutureCallback) + */ + public BigQueryWriteApiPendingJsonItemWriterBuilder executor(final Executor executor) { + this.executor = executor; + return this; + } + + /** + * Please remember about + * {@link BigQueryWriteApiPendingJsonItemWriter#afterPropertiesSet()}. + * @return {@link BigQueryWriteApiPendingJsonItemWriter} + * @throws IOException in case when {@link BigQueryWriteClient} failed to be created + * automatically + */ + public BigQueryWriteApiPendingJsonItemWriter build() throws IOException { + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + + writer.setMarshaller(this.marshaller == null ? new JacksonJsonObjectMarshaller<>() : this.marshaller); + + writer.setBigQueryWriteClient( + this.bigQueryWriteClient == null ? BigQueryWriteClient.create() : this.bigQueryWriteClient); + + if (apiFutureCallback != null) { + writer.setApiFutureCallback(apiFutureCallback); + writer.setExecutor(this.executor == null ? MoreExecutors.directExecutor() : this.executor); + } + + writer.setTableName(tableName); + + return writer; + } } diff --git a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/package-info.java b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/package-info.java index d81b6e9..a539c9c 100644 --- a/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/package-info.java +++ b/spring-batch-bigquery/src/main/java/org/springframework/batch/extensions/bigquery/writer/writeapi/package-info.java @@ -19,13 +19,13 @@ * * Supported types: *

    - *
  • {@link com.google.cloud.bigquery.storage.v1.WriteStream.Type#COMMITTED}
  • - *
  • {@link com.google.cloud.bigquery.storage.v1.WriteStream.Type#PENDING}
  • + *
  • {@link com.google.cloud.bigquery.storage.v1.WriteStream.Type#COMMITTED}
  • + *
  • {@link com.google.cloud.bigquery.storage.v1.WriteStream.Type#PENDING}
  • *
* * Supported formats: *
    - *
  • JSON
  • + *
  • JSON
  • *
*/ package org.springframework.batch.extensions.bigquery.writer.writeapi; \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/NameUtils.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/NameUtils.java index 73fb713..5c461d4 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/NameUtils.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/NameUtils.java @@ -19,11 +19,14 @@ import java.util.concurrent.ThreadLocalRandom; public final class NameUtils { - private static final String DASH = "-"; - private NameUtils() {} - public static String generateTableName(String testType) { - return testType + DASH + ThreadLocalRandom.current().nextInt(100); - } + private static final String DASH = "-"; + + private NameUtils() { + } + + public static String generateTableName(String testType) { + return testType + DASH + ThreadLocalRandom.current().nextInt(100); + } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/PersonDto.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/PersonDto.java index fec598a..15648e9 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/PersonDto.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/PersonDto.java @@ -23,27 +23,35 @@ import com.google.cloud.bigquery.storage.v1.TableFieldSchema; import com.google.cloud.bigquery.storage.v1.TableSchema; -@JsonPropertyOrder(value = {TestConstants.NAME, TestConstants.AGE}) +@JsonPropertyOrder(value = { TestConstants.NAME, TestConstants.AGE }) public record PersonDto(String name, Integer age) { - public static Schema getBigQuerySchema() { - Field nameField = Field.newBuilder(TestConstants.NAME, StandardSQLTypeName.STRING).setMode(Field.Mode.REQUIRED).build(); - Field ageField = Field.newBuilder(TestConstants.AGE, StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(); - return Schema.of(nameField, ageField); - } - - public static TableSchema getWriteApiSchema() { - TableFieldSchema name = TableFieldSchema.newBuilder() - .setType(TableFieldSchema.Type.STRING) - .setName(TestConstants.NAME) - .setMode(TableFieldSchema.Mode.REQUIRED) - .build(); - TableFieldSchema age = TableFieldSchema.newBuilder() - .setType(TableFieldSchema.Type.INT64) - .setName(TestConstants.AGE) - .setMode(TableFieldSchema.Mode.REQUIRED) - .build(); - return TableSchema.newBuilder().addFields(name).addFields(age).build(); - } + public static Schema getBigQuerySchema() { + Field nameField = Field.newBuilder(TestConstants.NAME, StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build(); + + Field ageField = Field.newBuilder(TestConstants.AGE, StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build(); + + return Schema.of(nameField, ageField); + } + + public static TableSchema getWriteApiSchema() { + TableFieldSchema name = TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.STRING) + .setName(TestConstants.NAME) + .setMode(TableFieldSchema.Mode.REQUIRED) + .build(); + + TableFieldSchema age = TableFieldSchema.newBuilder() + .setType(TableFieldSchema.Type.INT64) + .setName(TestConstants.AGE) + .setMode(TableFieldSchema.Mode.REQUIRED) + .build(); + + return TableSchema.newBuilder().addFields(name).addFields(age).build(); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/ResultVerifier.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/ResultVerifier.java index d5e62c4..3cd35ef 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/ResultVerifier.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/ResultVerifier.java @@ -24,31 +24,31 @@ import java.util.List; public final class ResultVerifier { - private ResultVerifier() { - } - - public static void verifyTableResult(Chunk expected, TableResult actual) { - List actualList = actual.streamValues().toList(); - - Assertions.assertEquals(expected.size(), actual.getTotalRows()); - Assertions.assertEquals(expected.size(), actualList.size()); - - actualList.forEach(field -> { - boolean containsName = expected - .getItems() - .stream() - .map(PersonDto::name) - .anyMatch(name -> field.get(0).getStringValue().equals(name)); - - boolean containsAge = expected - .getItems() - .stream() - .map(PersonDto::age) - .map(Long::valueOf) - .anyMatch(age -> age.compareTo(field.get(1).getLongValue()) == 0); - - Assertions.assertTrue(containsName); - Assertions.assertTrue(containsAge); - }); - } + + private ResultVerifier() { + } + + public static void verifyTableResult(Chunk expected, TableResult actual) { + List actualList = actual.streamValues().toList(); + + Assertions.assertEquals(expected.size(), actual.getTotalRows()); + Assertions.assertEquals(expected.size(), actualList.size()); + + actualList.forEach(field -> { + boolean containsName = expected.getItems() + .stream() + .map(PersonDto::name) + .anyMatch(name -> field.get(0).getStringValue().equals(name)); + + boolean containsAge = expected.getItems() + .stream() + .map(PersonDto::age) + .map(Long::valueOf) + .anyMatch(age -> age.compareTo(field.get(1).getLongValue()) == 0); + + Assertions.assertTrue(containsName); + Assertions.assertTrue(containsAge); + }); + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/TestConstants.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/TestConstants.java index c8809fa..8b4a139 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/TestConstants.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/common/TestConstants.java @@ -26,26 +26,30 @@ public final class TestConstants { - private TestConstants() {} + private TestConstants() { + } - public static final String DATASET = "spring_batch_extensions"; - public static final String PROJECT = "batch-test"; + public static final String DATASET = "spring_batch_extensions"; - public static final String NAME = "name"; - public static final String AGE = "age"; - public static final String CSV = "csv"; - public static final String JSON = "json"; + public static final String PROJECT = "batch-test"; - public static final Converter PERSON_MAPPER = res -> new PersonDto( - res.get(NAME).getStringValue(), res.get(AGE).getNumericValue().intValue() - ); + public static final String NAME = "name"; - /** Order must be defined so later executed queries results could be predictable */ - private static final List PERSONS = Stream - .of(new PersonDto("Volodymyr", 27), new PersonDto("Oleksandra", 26)) - .sorted(Comparator.comparing(PersonDto::name)) - .toList(); + public static final String AGE = "age"; - public static final Chunk CHUNK = new Chunk<>(PERSONS); + public static final String CSV = "csv"; + + public static final String JSON = "json"; + + public static final Converter PERSON_MAPPER = res -> new PersonDto( + res.get(NAME).getStringValue(), res.get(AGE).getNumericValue().intValue()); + + /** Order must be defined so later executed queries results could be predictable */ + private static final List PERSONS = Stream + .of(new PersonDto("Volodymyr", 27), new PersonDto("Oleksandra", 26)) + .sorted(Comparator.comparing(PersonDto::name)) + .toList(); + + public static final Chunk CHUNK = new Chunk<>(PERSONS); } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBaseTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBaseTest.java index de7b76e..e8068e4 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBaseTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBaseTest.java @@ -23,14 +23,15 @@ public abstract class EmulatorBaseTest { - protected static BigQueryOptions.Builder prepareBigQueryBuilder() { - return BigQueryOptions - .newBuilder() - .setProjectId(TestConstants.PROJECT) - .setCredentials(NoCredentials.getInstance()); - } + protected static BigQueryOptions.Builder prepareBigQueryBuilder() { + return BigQueryOptions.newBuilder() + .setProjectId(TestConstants.PROJECT) + .setCredentials(NoCredentials.getInstance()); + } + + protected static String getBigQueryUrl(GenericContainer container) { + return "http://" + container.getHost() + ":" + + container.getMappedPort(EmulatorBigQueryBaseDockerConfiguration.REST_PORT); + } - protected static String getBigQueryUrl(GenericContainer container) { - return "http://%s:%d".formatted(container.getHost(), container.getMappedPort(EmulatorBigQueryBaseDockerConfiguration.REST_PORT)); - } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBigQueryBaseDockerConfiguration.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBigQueryBaseDockerConfiguration.java index b09e5f0..a859aef 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBigQueryBaseDockerConfiguration.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/base/EmulatorBigQueryBaseDockerConfiguration.java @@ -20,11 +20,14 @@ public final class EmulatorBigQueryBaseDockerConfiguration { - public static final int REST_PORT = 9050; - public static final int GRPC_PORT = 9060; + public static final int REST_PORT = 9050; - public static final GenericContainer CONTAINER = new GenericContainer<>("ghcr.io/goccy/bigquery-emulator:0.6.6") - .withExposedPorts(REST_PORT, GRPC_PORT); + public static final int GRPC_PORT = 9060; + + public static final GenericContainer CONTAINER = new GenericContainer<>("ghcr.io/goccy/bigquery-emulator:0.6.6") + .withExposedPorts(REST_PORT, GRPC_PORT); + + private EmulatorBigQueryBaseDockerConfiguration() { + } - private EmulatorBigQueryBaseDockerConfiguration() {} } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/package-info.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/package-info.java index 740c2f8..c105d3a 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/package-info.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/package-info.java @@ -15,8 +15,8 @@ */ /** - * These tests are run against BigQuery emulator. It is the next test level after unit tests. - * An attempt to cover integration with fake BigQuery. + * These tests are run against BigQuery emulator. It is the next test level after unit + * tests. An attempt to cover integration with fake BigQuery. * * @see GitHub */ diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/EmulatorBigQueryItemReaderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/EmulatorBigQueryItemReaderTest.java index cefe4bd..8ef60a3 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/EmulatorBigQueryItemReaderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/EmulatorBigQueryItemReaderTest.java @@ -27,52 +27,51 @@ class EmulatorBigQueryItemReaderTest extends EmulatorBaseItemReaderTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); - @Test - void testBatchReader() throws Exception { - QueryJobConfiguration jobConfiguration = QueryJobConfiguration - .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p ORDER BY p.name LIMIT 2") - .setDestinationTable(TABLE_ID) - .setPriority(QueryJobConfiguration.Priority.BATCH) - .build(); + @Test + void testBatchReader() throws Exception { + QueryJobConfiguration jobConfiguration = QueryJobConfiguration + .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p ORDER BY p.name LIMIT 2") + .setDestinationTable(TABLE_ID) + .setPriority(QueryJobConfiguration.Priority.BATCH) + .build(); - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(bigQuery) - .rowMapper(TestConstants.PERSON_MAPPER) - .jobConfiguration(jobConfiguration) - .build(); + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery) + .rowMapper(TestConstants.PERSON_MAPPER) + .jobConfiguration(jobConfiguration) + .build(); - reader.afterPropertiesSet(); + reader.afterPropertiesSet(); - verifyResult(reader); - } + verifyResult(reader); + } - @Test - void testInteractiveReader() throws Exception { - QueryJobConfiguration jobConfiguration = QueryJobConfiguration - .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p ORDER BY p.name LIMIT 2") - .setDestinationTable(TABLE_ID) - .build(); + @Test + void testInteractiveReader() throws Exception { + QueryJobConfiguration jobConfiguration = QueryJobConfiguration + .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p ORDER BY p.name LIMIT 2") + .setDestinationTable(TABLE_ID) + .build(); - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(bigQuery) - .rowMapper(TestConstants.PERSON_MAPPER) - .jobConfiguration(jobConfiguration) - .build(); + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery) + .rowMapper(TestConstants.PERSON_MAPPER) + .jobConfiguration(jobConfiguration) + .build(); - reader.afterPropertiesSet(); + reader.afterPropertiesSet(); - verifyResult(reader); - } + verifyResult(reader); + } - private void verifyResult(BigQueryQueryItemReader reader) throws Exception { - PersonDto actual1 = reader.read(); - Assertions.assertEquals("Volodymyr", actual1.name()); - Assertions.assertEquals(27, actual1.age()); + private void verifyResult(BigQueryQueryItemReader reader) throws Exception { + PersonDto actual1 = reader.read(); + Assertions.assertEquals("Volodymyr", actual1.name()); + Assertions.assertEquals(27, actual1.age()); + + PersonDto actual2 = reader.read(); + Assertions.assertEquals("Oleksandra", actual2.name()); + Assertions.assertEquals(26, actual2.age()); + } - PersonDto actual2 = reader.read(); - Assertions.assertEquals("Oleksandra", actual2.name()); - Assertions.assertEquals(26, actual2.age()); - } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/base/EmulatorBaseItemReaderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/base/EmulatorBaseItemReaderTest.java index 3da22ba..1c75836 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/base/EmulatorBaseItemReaderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/reader/base/EmulatorBaseItemReaderTest.java @@ -29,15 +29,16 @@ @Testcontainers public abstract class EmulatorBaseItemReaderTest extends EmulatorBaseTest { - @Container - private static final GenericContainer CONTAINER = EmulatorBigQueryBaseDockerConfiguration.CONTAINER - .withCommand("--project=" + TestConstants.PROJECT, "--log-level=debug", "--data-from-yaml=/reader-test.yaml") - .withCopyFileToContainer(MountableFile.forClasspathResource("reader-test.yaml"), "/reader-test.yaml"); + @Container + private static final GenericContainer CONTAINER = EmulatorBigQueryBaseDockerConfiguration.CONTAINER + .withCommand("--project=" + TestConstants.PROJECT, "--log-level=debug", "--data-from-yaml=/reader-test.yaml") + .withCopyFileToContainer(MountableFile.forClasspathResource("reader-test.yaml"), "/reader-test.yaml"); - protected static BigQuery bigQuery; + protected static BigQuery bigQuery; + + @BeforeAll + static void init() { + bigQuery = prepareBigQueryBuilder().setHost(getBigQueryUrl(CONTAINER)).build().getService(); + } - @BeforeAll - static void init() { - bigQuery = prepareBigQueryBuilder().setHost(getBigQueryUrl(CONTAINER)).build().getService(); - } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/EmulatorBaseItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/EmulatorBaseItemWriterTest.java index fbc9ea0..a99d3f9 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/EmulatorBaseItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/EmulatorBaseItemWriterTest.java @@ -43,53 +43,60 @@ @Testcontainers public abstract class EmulatorBaseItemWriterTest extends EmulatorBaseTest { - @Container - protected static final GenericContainer BIG_QUERY_CONTAINER = EmulatorBigQueryBaseDockerConfiguration.CONTAINER - .withCommand("--project=" + TestConstants.PROJECT, "--log-level=debug", "--data-from-yaml=/writer-test.yaml", "--database=/test-db") - .withCopyFileToContainer(MountableFile.forClasspathResource("writer-test.yaml"), "/writer-test.yaml"); - - protected static BigQuery bigQuery; - protected static BigQueryWriteClient bigQueryWriteClient; - - private static WireMockServer wireMockServer; - - static { - try { - LogManager.getLogManager().readConfiguration(new ClassPathResource("java-util-logging.properties").getInputStream()); - } catch (IOException e) { - throw new IllegalStateException(); - } - } - - @BeforeAll - static void setupAll() throws IOException { - SpyResponseExtension extension = new SpyResponseExtension(); - - wireMockServer = new WireMockServer(WireMockConfiguration.wireMockConfig().dynamicPort().extensions(extension)); - wireMockServer.start(); - extension.setWireMockPort(wireMockServer.port()); - - wireMockServer.stubFor( - WireMock.any(WireMock.urlMatching(".*")).willReturn(WireMock.aResponse().proxiedFrom(getBigQueryUrl(BIG_QUERY_CONTAINER))) - ); - - bigQuery = prepareBigQueryBuilder().setHost(wireMockServer.baseUrl()).build().getService(); - - InstantiatingGrpcChannelProvider grpc = BigQueryWriteSettings - .defaultGrpcTransportProviderBuilder() - .setChannelConfigurator(ManagedChannelBuilder::usePlaintext) - .build(); - BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() - .setCredentialsProvider(NoCredentialsProvider.create()) - .setEndpoint("127.0.0.1:" + BIG_QUERY_CONTAINER.getMappedPort(EmulatorBigQueryBaseDockerConfiguration.GRPC_PORT)) - .setTransportChannelProvider(grpc) - .build(); - bigQueryWriteClient = BigQueryWriteClient.create(settings); - } - - @AfterAll - static void shutdownAll() { - Optional.ofNullable(wireMockServer).ifPresent(WireMockServer::stop); - } + @Container + protected static final GenericContainer BIG_QUERY_CONTAINER = EmulatorBigQueryBaseDockerConfiguration.CONTAINER + .withCommand("--project=" + TestConstants.PROJECT, "--log-level=debug", "--data-from-yaml=/writer-test.yaml", + "--database=/test-db") + .withCopyFileToContainer(MountableFile.forClasspathResource("writer-test.yaml"), "/writer-test.yaml"); + + protected static BigQuery bigQuery; + + protected static BigQueryWriteClient bigQueryWriteClient; + + private static WireMockServer wireMockServer; + + static { + try { + LogManager.getLogManager() + .readConfiguration(new ClassPathResource("java-util-logging.properties").getInputStream()); + } + catch (IOException e) { + throw new IllegalStateException(); + } + } + + @BeforeAll + static void setupAll() throws IOException { + SpyResponseExtension extension = new SpyResponseExtension(); + + wireMockServer = new WireMockServer(WireMockConfiguration.wireMockConfig().dynamicPort().extensions(extension)); + wireMockServer.start(); + extension.setWireMockPort(wireMockServer.port()); + + wireMockServer.stubFor(WireMock.any(WireMock.urlMatching(".*")) + .willReturn(WireMock.aResponse().proxiedFrom(getBigQueryUrl(BIG_QUERY_CONTAINER)))); + + bigQuery = prepareBigQueryBuilder().setHost(wireMockServer.baseUrl()).build().getService(); + + InstantiatingGrpcChannelProvider grpc = BigQueryWriteSettings.defaultGrpcTransportProviderBuilder() + .setChannelConfigurator(ManagedChannelBuilder::usePlaintext) + .build(); + + String endpoint = "127.0.0.1:" + + BIG_QUERY_CONTAINER.getMappedPort(EmulatorBigQueryBaseDockerConfiguration.GRPC_PORT); + + BigQueryWriteSettings settings = BigQueryWriteSettings.newBuilder() + .setCredentialsProvider(NoCredentialsProvider.create()) + .setEndpoint(endpoint) + .setTransportChannelProvider(grpc) + .build(); + + bigQueryWriteClient = BigQueryWriteClient.create(settings); + } + + @AfterAll + static void shutdownAll() { + Optional.ofNullable(wireMockServer).ifPresent(WireMockServer::stop); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/SpyResponseExtension.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/SpyResponseExtension.java index 554c8c7..0d8176a 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/SpyResponseExtension.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/base/SpyResponseExtension.java @@ -28,49 +28,50 @@ public final class SpyResponseExtension implements ResponseTransformerV2 { - private static final String BQ_DOCKER_URL_PREFIX = "http://0.0.0.0:"; - private static final String BQ_DOCKER_URL = BQ_DOCKER_URL_PREFIX + EmulatorBigQueryBaseDockerConfiguration.REST_PORT; - - private int wireMockPort; - - @Override - public Response transform(Response response, ServeEvent serveEvent) { - var originalHeaders = response.getHeaders(); - HttpHeader originalLocationHeader = originalHeaders.getHeader(HttpHeaders.LOCATION); - - List locationHeaderValues = originalLocationHeader.getValues(); - boolean containsLocationHeader = locationHeaderValues.stream().anyMatch(s -> s.startsWith(BQ_DOCKER_URL)); - - if (containsLocationHeader) { - if (locationHeaderValues.size() > 1) { - throw new IllegalStateException(); - } - - List headersWithoutLocation = originalHeaders - .all() - .stream() - .filter(Predicate.not(hh -> hh.keyEquals(HttpHeaders.LOCATION))) - .toList(); - - HttpHeader updatedHeader = HttpHeader.httpHeader( - HttpHeaders.LOCATION, locationHeaderValues.get(0).replace(BQ_DOCKER_URL, BQ_DOCKER_URL_PREFIX + wireMockPort) - ); - - return Response.Builder - .like(response) - .but() - .headers(new com.github.tomakehurst.wiremock.http.HttpHeaders(headersWithoutLocation).plus(updatedHeader)) - .build(); - } - return response; - } - - @Override - public String getName() { - return "spy-response-extension"; - } - - public void setWireMockPort(int wireMockPort) { - this.wireMockPort = wireMockPort; - } + private static final String BQ_DOCKER_URL_PREFIX = "http://0.0.0.0:"; + + private static final String BQ_DOCKER_URL = BQ_DOCKER_URL_PREFIX + + EmulatorBigQueryBaseDockerConfiguration.REST_PORT; + + private int wireMockPort; + + @Override + public Response transform(Response response, ServeEvent serveEvent) { + var originalHeaders = response.getHeaders(); + HttpHeader originalLocationHeader = originalHeaders.getHeader(HttpHeaders.LOCATION); + + List locationHeaderValues = originalLocationHeader.getValues(); + boolean containsLocationHeader = locationHeaderValues.stream().anyMatch(s -> s.startsWith(BQ_DOCKER_URL)); + + if (containsLocationHeader) { + if (locationHeaderValues.size() > 1) { + throw new IllegalStateException(); + } + + List headersWithoutLocation = originalHeaders.all() + .stream() + .filter(Predicate.not(hh -> hh.keyEquals(HttpHeaders.LOCATION))) + .toList(); + + HttpHeader updatedHeader = HttpHeader.httpHeader(HttpHeaders.LOCATION, + locationHeaderValues.get(0).replace(BQ_DOCKER_URL, BQ_DOCKER_URL_PREFIX + wireMockPort)); + + com.github.tomakehurst.wiremock.http.HttpHeaders headers = new com.github.tomakehurst.wiremock.http.HttpHeaders( + headersWithoutLocation) + .plus(updatedHeader); + + return Response.Builder.like(response).but().headers(headers).build(); + } + return response; + } + + @Override + public String getName() { + return "spy-response-extension"; + } + + public void setWireMockPort(int wireMockPort) { + this.wireMockPort = wireMockPort; + } + } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/csv/EmulatorBigQueryLoadJobCsvItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/csv/EmulatorBigQueryLoadJobCsvItemWriterTest.java index 127d741..5f9eb8e 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/csv/EmulatorBigQueryLoadJobCsvItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/csv/EmulatorBigQueryLoadJobCsvItemWriterTest.java @@ -33,29 +33,30 @@ class EmulatorBigQueryLoadJobCsvItemWriterTest extends EmulatorBaseItemWriterTest { - // TODO find out why data is not persisted into the sqlite database - // at the same time it works fine with json/insertAll job/yaml file - // cover 2 scenarios (predefined schema + generate on the fly) - @Test - @Disabled("Not working at the moment") - void testWrite() throws Exception { - TableId tableId = TableId.of(TestConstants.DATASET, NameUtils.generateTableName(TestConstants.CSV)); - Chunk expectedChunk = Chunk.of(new PersonDto("Ivan", 30)); - - WriteChannelConfiguration channelConfig = WriteChannelConfiguration - .newBuilder(tableId) - .setFormatOptions(FormatOptions.csv()) - .setSchema(PersonDto.getBigQuerySchema()) - .build(); - - BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() - .bigQuery(bigQuery) - .writeChannelConfig(channelConfig) - .build(); - writer.afterPropertiesSet(); - - writer.write(expectedChunk); - - ResultVerifier.verifyTableResult(expectedChunk, bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(5L))); - } + // TODO find out why data is not persisted into the sqlite database + // at the same time it works fine with json/insertAll job/yaml file + // cover 2 scenarios (predefined schema + generate on the fly) + @Test + @Disabled("Not working at the moment") + void testWrite() throws Exception { + TableId tableId = TableId.of(TestConstants.DATASET, NameUtils.generateTableName(TestConstants.CSV)); + Chunk expectedChunk = Chunk.of(new PersonDto("Ivan", 30)); + + WriteChannelConfiguration channelConfig = WriteChannelConfiguration.newBuilder(tableId) + .setFormatOptions(FormatOptions.csv()) + .setSchema(PersonDto.getBigQuerySchema()) + .build(); + + BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() + .bigQuery(bigQuery) + .writeChannelConfig(channelConfig) + .build(); + writer.afterPropertiesSet(); + + writer.write(expectedChunk); + + ResultVerifier.verifyTableResult(expectedChunk, + bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(5L))); + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/json/EmulatorBigQueryLoadJobJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/json/EmulatorBigQueryLoadJobJsonItemWriterTest.java index 038d94b..d02ba63 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/json/EmulatorBigQueryLoadJobJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/loadjob/json/EmulatorBigQueryLoadJobJsonItemWriterTest.java @@ -37,39 +37,38 @@ class EmulatorBigQueryLoadJobJsonItemWriterTest extends EmulatorBaseItemWriterTest { - @ParameterizedTest - @MethodSource("tables") - void testWrite(String table, boolean autodetect) throws Exception { - TableId tableId = TableId.of(TestConstants.DATASET, table); - Chunk expectedChunk = Chunk.of(new PersonDto("Ivan", 30)); + @ParameterizedTest + @MethodSource("tables") + void testWrite(String table, boolean autodetect) throws Exception { + TableId tableId = TableId.of(TestConstants.DATASET, table); + Chunk expectedChunk = Chunk.of(new PersonDto("Ivan", 30)); - WriteChannelConfiguration channelConfig = WriteChannelConfiguration - .newBuilder(tableId) - .setFormatOptions(FormatOptions.json()) - .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) - .setAutodetect(autodetect) - .build(); + WriteChannelConfiguration channelConfig = WriteChannelConfiguration.newBuilder(tableId) + .setFormatOptions(FormatOptions.json()) + .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) + .setAutodetect(autodetect) + .build(); - BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() - .bigQuery(bigQuery) - .writeChannelConfig(channelConfig) - .marshaller(new JacksonJsonObjectMarshaller<>()) - .build(); - writer.afterPropertiesSet(); + BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() + .bigQuery(bigQuery) + .writeChannelConfig(channelConfig) + .marshaller(new JacksonJsonObjectMarshaller<>()) + .build(); + writer.afterPropertiesSet(); - writer.write(expectedChunk); + writer.write(expectedChunk); - ResultVerifier.verifyTableResult(expectedChunk, bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(5L))); - } + ResultVerifier.verifyTableResult(expectedChunk, + bigQuery.listTableData(tableId, BigQuery.TableDataListOption.pageSize(5L))); + } - private static Stream tables() { - return Stream.of( - Arguments.of(NameUtils.generateTableName(TestConstants.JSON), false), + private static Stream tables() { + return Stream.of(Arguments.of(NameUtils.generateTableName(TestConstants.JSON), false), - // TODO auto detect is broken on big query container side? - // Arguments.of(TableUtils.generateTableName(TestConstants.JSON), true), + // TODO auto detect is broken on big query container side? + // Arguments.of(TableUtils.generateTableName(TestConstants.JSON), true), + + Arguments.of(TestConstants.JSON, false)); + } - Arguments.of(TestConstants.JSON, false) - ); - } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiCommitedJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiCommitedJsonItemWriterTest.java index ad069d7..aa73537 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiCommitedJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiCommitedJsonItemWriterTest.java @@ -23,34 +23,36 @@ class EmulatorBigQueryWriteApiCommitedJsonItemWriterTest extends EmulatorBaseItemWriterTest { - @Test - void testWrite() throws Exception { - AtomicBoolean consumerCalled = new AtomicBoolean(); - TableId tableId = TableId.of(TestConstants.PROJECT, TestConstants.DATASET, NameUtils.generateTableName(TestConstants.JSON)); - TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); - bigQuery.create(TableInfo.of(tableId, tableDefinition)); - - Chunk expected = TestConstants.CHUNK; - - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - writer.setBigQueryWriteClient(bigQueryWriteClient); - writer.setTableName(TableName.of(tableId.getProject(), tableId.getDataset(), tableId.getTable())); - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - writer.setApiFutureCallback(new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) { - consumerCalled.set(true); - } - }); - writer.setExecutor(Executors.newSingleThreadExecutor()); - - writer.write(expected); - - ResultVerifier.verifyTableResult(expected, bigQuery.listTableData(tableId)); - Assertions.assertTrue(consumerCalled.get()); - } + @Test + void testWrite() throws Exception { + AtomicBoolean consumerCalled = new AtomicBoolean(); + TableId tableId = TableId.of(TestConstants.PROJECT, TestConstants.DATASET, + NameUtils.generateTableName(TestConstants.JSON)); + TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); + bigQuery.create(TableInfo.of(tableId, tableDefinition)); + + Chunk expected = TestConstants.CHUNK; + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + writer.setBigQueryWriteClient(bigQueryWriteClient); + writer.setTableName(TableName.of(tableId.getProject(), tableId.getDataset(), tableId.getTable())); + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + writer.setApiFutureCallback(new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + consumerCalled.set(true); + } + }); + writer.setExecutor(Executors.newSingleThreadExecutor()); + + writer.write(expected); + + ResultVerifier.verifyTableResult(expected, bigQuery.listTableData(tableId)); + Assertions.assertTrue(consumerCalled.get()); + } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiPendingJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiPendingJsonItemWriterTest.java index 75aef26..6effddb 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiPendingJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/emulator/writer/writeapi/json/EmulatorBigQueryWriteApiPendingJsonItemWriterTest.java @@ -23,34 +23,36 @@ class EmulatorBigQueryWriteApiPendingJsonItemWriterTest extends EmulatorBaseItemWriterTest { - @Test - void testWrite() throws Exception { - AtomicBoolean consumerCalled = new AtomicBoolean(); - TableId tableId = TableId.of(TestConstants.PROJECT, TestConstants.DATASET, NameUtils.generateTableName(TestConstants.JSON)); - TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); - bigQuery.create(TableInfo.of(tableId, tableDefinition)); - - Chunk expected = TestConstants.CHUNK; - - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - writer.setBigQueryWriteClient(bigQueryWriteClient); - writer.setTableName(TableName.of(tableId.getProject(), tableId.getDataset(), tableId.getTable())); - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - writer.setApiFutureCallback(new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) { - consumerCalled.set(true); - } - }); - writer.setExecutor(Executors.newSingleThreadExecutor()); - - writer.write(expected); - - ResultVerifier.verifyTableResult(expected, bigQuery.listTableData(tableId)); - Assertions.assertTrue(consumerCalled.get()); - } + @Test + void testWrite() throws Exception { + AtomicBoolean consumerCalled = new AtomicBoolean(); + TableId tableId = TableId.of(TestConstants.PROJECT, TestConstants.DATASET, + NameUtils.generateTableName(TestConstants.JSON)); + TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); + bigQuery.create(TableInfo.of(tableId, tableDefinition)); + + Chunk expected = TestConstants.CHUNK; + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + writer.setBigQueryWriteClient(bigQueryWriteClient); + writer.setTableName(TableName.of(tableId.getProject(), tableId.getDataset(), tableId.getTable())); + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + writer.setApiFutureCallback(new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + consumerCalled.set(true); + } + }); + writer.setExecutor(Executors.newSingleThreadExecutor()); + + writer.write(expected); + + ResultVerifier.verifyTableResult(expected, bigQuery.listTableData(tableId)); + Assertions.assertTrue(consumerCalled.get()); + } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/base/GcloudBaseBigQueryIntegrationTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/base/GcloudBaseBigQueryIntegrationTest.java index 4fc0ca4..040498c 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/base/GcloudBaseBigQueryIntegrationTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/base/GcloudBaseBigQueryIntegrationTest.java @@ -20,5 +20,7 @@ import com.google.cloud.bigquery.BigQueryOptions; public abstract class GcloudBaseBigQueryIntegrationTest { - protected static final BigQuery BIG_QUERY = BigQueryOptions.getDefaultInstance().getService(); + + protected static final BigQuery BIG_QUERY = BigQueryOptions.getDefaultInstance().getService(); + } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/package-info.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/package-info.java index 757ab80..26158ff 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/package-info.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/package-info.java @@ -16,15 +16,17 @@ /** * To launch these tests, you should provide a way how to authorize to Google BigQuery. - * A simple way is to create a service account, store credentials as JSON file and provide environment variable. - * Example: GOOGLE_APPLICATION_CREDENTIALS=/home/user/Downloads/bq-key.json *

- * Test names should follow this pattern: test1, test2, testN. - * So later in BigQuery you will see generated table name: csv_test1, csv_test2, csv_testN. - * This way, it will be easier to trace errors in BigQuery. + * A simple way is to create a service account, store credentials as JSON file and provide + * environment variable. Example: + * GOOGLE_APPLICATION_CREDENTIALS=/home/user/Downloads/bq-key.json *

- * It is the next test level after emulator tests. - * Real world integration testing. - * @see Authentication + * Test names should follow this pattern: test1, test2, testN. So later in BigQuery you + * will see generated table name: csv_test1, csv_test2, csv_testN. This way, it will be + * easier to trace errors in BigQuery. + *

+ * It is the next test level after emulator tests. Real world integration testing. + * @see Authentication */ package org.springframework.batch.extensions.bigquery.gcloud; \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/reader/GcloudBigQueryItemReaderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/reader/GcloudBigQueryItemReaderTest.java index 63ce46f..9cfec45 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/reader/GcloudBigQueryItemReaderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/reader/GcloudBigQueryItemReaderTest.java @@ -41,98 +41,100 @@ class GcloudBigQueryItemReaderTest extends GcloudBaseBigQueryIntegrationTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); - - @BeforeAll - static void init() throws Exception { - if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { - BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); - } - - if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.CSV) == null) { - TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); - BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); - } - - loadCsvSample(); - } - - @AfterAll - static void cleanupTest() { - BIG_QUERY.delete(TABLE_ID); - } - - @Test - void testBatchQuery() throws Exception { - QueryJobConfiguration jobConfiguration = QueryJobConfiguration - .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.%s p ORDER BY p.name LIMIT 2".formatted(TestConstants.CSV)) - .setDestinationTable(TABLE_ID) - .setPriority(QueryJobConfiguration.Priority.BATCH) - .build(); - - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(BIG_QUERY) - .rowMapper(TestConstants.PERSON_MAPPER) - .jobConfiguration(jobConfiguration) - .build(); - - reader.afterPropertiesSet(); - - verifyResult(reader); - } - - @Test - void testInteractiveQuery() throws Exception { - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(BIG_QUERY) - .rowMapper(TestConstants.PERSON_MAPPER) - .query("SELECT p.name, p.age FROM spring_batch_extensions.%s p ORDER BY p.name LIMIT 2".formatted(TestConstants.CSV)) - .build(); - - reader.afterPropertiesSet(); - - verifyResult(reader); - } - - private void verifyResult(BigQueryQueryItemReader reader) throws Exception { - PersonDto actualFirstPerson = reader.read(); - PersonDto expectedFirstPerson = TestConstants.CHUNK.getItems().get(0); - - PersonDto actualSecondPerson = reader.read(); - PersonDto expectedSecondPerson = TestConstants.CHUNK.getItems().get(1); - - PersonDto actualThirdPerson = reader.read(); - - Assertions.assertNotNull(actualFirstPerson); - Assertions.assertEquals(expectedFirstPerson.name(), actualFirstPerson.name()); - Assertions.assertEquals(0, expectedFirstPerson.age().compareTo(actualFirstPerson.age())); - - Assertions.assertNotNull(actualSecondPerson); - Assertions.assertEquals(expectedSecondPerson.name(), actualSecondPerson.name()); - Assertions.assertEquals(0, expectedSecondPerson.age().compareTo(actualSecondPerson.age())); - - Assertions.assertNull(actualThirdPerson); - } - - private static void loadCsvSample() throws Exception { - AtomicReference job = new AtomicReference<>(); - - WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration - .newBuilder(TABLE_ID) - .setSchema(PersonDto.getBigQuerySchema()) - .setAutodetect(false) - .setFormatOptions(FormatOptions.csv()) - .build(); - - BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() - .bigQuery(BIG_QUERY) - .writeChannelConfig(channelConfiguration) - .jobConsumer(job::set) - .build(); - - writer.afterPropertiesSet(); - writer.write(TestConstants.CHUNK); - job.get().waitFor(); - } + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); + + @BeforeAll + static void init() throws Exception { + if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { + BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); + } + + if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.CSV) == null) { + TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); + BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); + } + + loadCsvSample(); + } + + @AfterAll + static void cleanupTest() { + BIG_QUERY.delete(TABLE_ID); + } + + @Test + void testBatchQuery() throws Exception { + String query = "SELECT p.name, p.age FROM spring_batch_extensions.%s p ORDER BY p.name LIMIT 2" + .formatted(TestConstants.CSV); + + QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder(query) + .setDestinationTable(TABLE_ID) + .setPriority(QueryJobConfiguration.Priority.BATCH) + .build(); + + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder().bigQuery(BIG_QUERY) + .rowMapper(TestConstants.PERSON_MAPPER) + .jobConfiguration(jobConfiguration) + .build(); + + reader.afterPropertiesSet(); + + verifyResult(reader); + } + + @Test + void testInteractiveQuery() throws Exception { + String query = "SELECT p.name, p.age FROM spring_batch_extensions.%s p ORDER BY p.name LIMIT 2" + .formatted(TestConstants.CSV); + + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder().bigQuery(BIG_QUERY) + .rowMapper(TestConstants.PERSON_MAPPER) + .query(query) + .build(); + + reader.afterPropertiesSet(); + + verifyResult(reader); + } + + private void verifyResult(BigQueryQueryItemReader reader) throws Exception { + PersonDto actualFirstPerson = reader.read(); + PersonDto expectedFirstPerson = TestConstants.CHUNK.getItems().get(0); + + PersonDto actualSecondPerson = reader.read(); + PersonDto expectedSecondPerson = TestConstants.CHUNK.getItems().get(1); + + PersonDto actualThirdPerson = reader.read(); + + Assertions.assertNotNull(actualFirstPerson); + Assertions.assertEquals(expectedFirstPerson.name(), actualFirstPerson.name()); + Assertions.assertEquals(0, expectedFirstPerson.age().compareTo(actualFirstPerson.age())); + + Assertions.assertNotNull(actualSecondPerson); + Assertions.assertEquals(expectedSecondPerson.name(), actualSecondPerson.name()); + Assertions.assertEquals(0, expectedSecondPerson.age().compareTo(actualSecondPerson.age())); + + Assertions.assertNull(actualThirdPerson); + } + + private static void loadCsvSample() throws Exception { + AtomicReference job = new AtomicReference<>(); + + WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration.newBuilder(TABLE_ID) + .setSchema(PersonDto.getBigQuerySchema()) + .setAutodetect(false) + .setFormatOptions(FormatOptions.csv()) + .build(); + + BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() + .bigQuery(BIG_QUERY) + .writeChannelConfig(channelConfiguration) + .jobConsumer(job::set) + .build(); + + writer.afterPropertiesSet(); + writer.write(TestConstants.CHUNK); + job.get().waitFor(); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/GcloudBaseBigQueryItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/GcloudBaseBigQueryItemWriterTest.java index 52f471f..0dbbc41 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/GcloudBaseBigQueryItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/GcloudBaseBigQueryItemWriterTest.java @@ -28,15 +28,15 @@ public abstract class GcloudBaseBigQueryItemWriterTest extends GcloudBaseBigQueryIntegrationTest { - protected void verifyResults(String tableName) { - Dataset dataset = BIG_QUERY.getDataset(TestConstants.DATASET); - Table table = BIG_QUERY.getTable(TableId.of(TestConstants.DATASET, tableName)); - TableId tableId = table.getTableId(); - TableResult tableResult = BIG_QUERY.listTableData(tableId, BigQuery.TableDataListOption.pageSize(2L)); + protected void verifyResults(String tableName) { + Dataset dataset = BIG_QUERY.getDataset(TestConstants.DATASET); + Table table = BIG_QUERY.getTable(TableId.of(TestConstants.DATASET, tableName)); + TableId tableId = table.getTableId(); + TableResult tableResult = BIG_QUERY.listTableData(tableId, BigQuery.TableDataListOption.pageSize(2L)); - Assertions.assertNotNull(dataset.getDatasetId()); - Assertions.assertNotNull(tableId); - ResultVerifier.verifyTableResult(TestConstants.CHUNK, tableResult); - } + Assertions.assertNotNull(dataset.getDatasetId()); + Assertions.assertNotNull(tableId); + ResultVerifier.verifyTableResult(TestConstants.CHUNK, tableResult); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/csv/GcloudBigQueryLoadJobCsvItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/csv/GcloudBigQueryLoadJobCsvItemWriterTest.java index 1c0e600..209a6e8 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/csv/GcloudBigQueryLoadJobCsvItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/csv/GcloudBigQueryLoadJobCsvItemWriterTest.java @@ -41,56 +41,54 @@ class GcloudBigQueryLoadJobCsvItemWriterTest extends GcloudBaseBigQueryItemWriterTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); - @BeforeAll - static void prepareTest() { - if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { - BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); - } + @BeforeAll + static void prepareTest() { + if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { + BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); + } - if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.CSV) == null) { - TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); - BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); - } - } + if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.CSV) == null) { + TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); + BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); + } + } - @AfterAll - static void cleanup() { - BIG_QUERY.delete(TABLE_ID); - } + @AfterAll + static void cleanup() { + BIG_QUERY.delete(TABLE_ID); + } - @ParameterizedTest - @MethodSource("tables") - void testWriteCsv(String tableName, boolean autodetect) throws Exception { - AtomicReference job = new AtomicReference<>(); + @ParameterizedTest + @MethodSource("tables") + void testWriteCsv(String tableName, boolean autodetect) throws Exception { + AtomicReference job = new AtomicReference<>(); - WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration - .newBuilder(TableId.of(TestConstants.DATASET, tableName)) - .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) - .setAutodetect(autodetect) - .setFormatOptions(FormatOptions.csv()) - .build(); + WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration + .newBuilder(TableId.of(TestConstants.DATASET, tableName)) + .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) + .setAutodetect(autodetect) + .setFormatOptions(FormatOptions.csv()) + .build(); - BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() - .bigQuery(BIG_QUERY) - .writeChannelConfig(channelConfiguration) - .jobConsumer(job::set) - .build(); + BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() + .bigQuery(BIG_QUERY) + .writeChannelConfig(channelConfiguration) + .jobConsumer(job::set) + .build(); - writer.afterPropertiesSet(); - writer.write(TestConstants.CHUNK); - job.get().waitFor(); + writer.afterPropertiesSet(); + writer.write(TestConstants.CHUNK); + job.get().waitFor(); - verifyResults(tableName); - } + verifyResults(tableName); + } - private static Stream tables() { - return Stream.of( - Arguments.of(NameUtils.generateTableName(TestConstants.CSV), false), - Arguments.of(NameUtils.generateTableName(TestConstants.CSV), true), - Arguments.of(TestConstants.CSV, false) - ); - } + private static Stream tables() { + return Stream.of(Arguments.of(NameUtils.generateTableName(TestConstants.CSV), false), + Arguments.of(NameUtils.generateTableName(TestConstants.CSV), true), + Arguments.of(TestConstants.CSV, false)); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/json/GcloudBigQueryLoadJobJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/json/GcloudBigQueryLoadJobJsonItemWriterTest.java index 38b1019..a7d1d7c 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/json/GcloudBigQueryLoadJobJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/loadjob/json/GcloudBigQueryLoadJobJsonItemWriterTest.java @@ -41,56 +41,54 @@ class GcloudBigQueryLoadJobJsonItemWriterTest extends GcloudBaseBigQueryItemWriterTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.JSON); + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.JSON); - @BeforeAll - static void prepareTest() { - if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { - BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); - } + @BeforeAll + static void prepareTest() { + if (BIG_QUERY.getDataset(TestConstants.DATASET) == null) { + BIG_QUERY.create(DatasetInfo.of(TestConstants.DATASET)); + } - if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.JSON) == null) { - TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); - BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); - } - } + if (BIG_QUERY.getTable(TestConstants.DATASET, TestConstants.JSON) == null) { + TableDefinition tableDefinition = StandardTableDefinition.of(PersonDto.getBigQuerySchema()); + BIG_QUERY.create(TableInfo.of(TABLE_ID, tableDefinition)); + } + } - @AfterAll - static void cleanup() { - BIG_QUERY.delete(TABLE_ID); - } + @AfterAll + static void cleanup() { + BIG_QUERY.delete(TABLE_ID); + } - @ParameterizedTest - @MethodSource("tables") - void testWrite(String tableName, boolean autodetect) throws Exception { - AtomicReference job = new AtomicReference<>(); + @ParameterizedTest + @MethodSource("tables") + void testWrite(String tableName, boolean autodetect) throws Exception { + AtomicReference job = new AtomicReference<>(); - WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration - .newBuilder(TableId.of(TestConstants.DATASET, tableName)) - .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) - .setAutodetect(autodetect) - .setFormatOptions(FormatOptions.json()) - .build(); + WriteChannelConfiguration channelConfiguration = WriteChannelConfiguration + .newBuilder(TableId.of(TestConstants.DATASET, tableName)) + .setSchema(autodetect ? null : PersonDto.getBigQuerySchema()) + .setAutodetect(autodetect) + .setFormatOptions(FormatOptions.json()) + .build(); - BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() - .bigQuery(BIG_QUERY) - .writeChannelConfig(channelConfiguration) - .jobConsumer(job::set) - .build(); + BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() + .bigQuery(BIG_QUERY) + .writeChannelConfig(channelConfiguration) + .jobConsumer(job::set) + .build(); - writer.afterPropertiesSet(); - writer.write(TestConstants.CHUNK); - job.get().waitFor(); + writer.afterPropertiesSet(); + writer.write(TestConstants.CHUNK); + job.get().waitFor(); - verifyResults(tableName); - } + verifyResults(tableName); + } - private static Stream tables() { - return Stream.of( - Arguments.of(NameUtils.generateTableName(TestConstants.JSON), false), - Arguments.of(NameUtils.generateTableName(TestConstants.JSON), true), - Arguments.of(TestConstants.JSON, false) - ); - } + private static Stream tables() { + return Stream.of(Arguments.of(NameUtils.generateTableName(TestConstants.JSON), false), + Arguments.of(NameUtils.generateTableName(TestConstants.JSON), true), + Arguments.of(TestConstants.JSON, false)); + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/writeapi/json/GcloudBigQueryWriteApiCommitedJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/writeapi/json/GcloudBigQueryWriteApiCommitedJsonItemWriterTest.java index db35366..5f535f2 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/writeapi/json/GcloudBigQueryWriteApiCommitedJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/gcloud/writer/writeapi/json/GcloudBigQueryWriteApiCommitedJsonItemWriterTest.java @@ -5,8 +5,9 @@ public class GcloudBigQueryWriteApiCommitedJsonItemWriterTest extends GcloudBaseBigQueryItemWriterTest { - @Test - void testWrite() { - // TODO - } + @Test + void testWrite() { + // TODO + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/base/AbstractBigQueryTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/base/AbstractBigQueryTest.java index f28a26f..0862dfa 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/base/AbstractBigQueryTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/base/AbstractBigQueryTest.java @@ -21,18 +21,14 @@ public abstract class AbstractBigQueryTest { - protected BigQuery prepareMockedBigQuery() { - BigQuery mockedBigQuery = Mockito.mock(BigQuery.class); + protected BigQuery prepareMockedBigQuery() { + BigQuery mockedBigQuery = Mockito.mock(BigQuery.class); - Mockito - .when(mockedBigQuery.getTable(Mockito.any())) - .thenReturn(null); + Mockito.when(mockedBigQuery.getTable(Mockito.any())).thenReturn(null); - Mockito - .when(mockedBigQuery.getDataset(Mockito.anyString())) - .thenReturn(null); + Mockito.when(mockedBigQuery.getDataset(Mockito.anyString())).thenReturn(null); - return mockedBigQuery; - } + return mockedBigQuery; + } } \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/BigQueryItemReaderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/BigQueryItemReaderTest.java index ebd40b4..f573ea2 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/BigQueryItemReaderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/BigQueryItemReaderTest.java @@ -37,101 +37,105 @@ class BigQueryItemReaderTest extends AbstractBigQueryTest { - @Test - void testSetBigQuery() throws IllegalAccessException, NoSuchFieldException { - BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - BigQuery bigQuery = prepareMockedBigQuery(); + @Test + void testSetBigQuery() throws IllegalAccessException, NoSuchFieldException { + BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + BigQuery bigQuery = prepareMockedBigQuery(); - reader.setBigQuery(bigQuery); + reader.setBigQuery(bigQuery); - BigQuery actualBigQuery = (BigQuery) handle - .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) - .get(reader); + BigQuery actualBigQuery = (BigQuery) handle + .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) + .get(reader); - Assertions.assertEquals(bigQuery, actualBigQuery); - } + Assertions.assertEquals(bigQuery, actualBigQuery); + } - @Test - void testSetRowMapper() throws IllegalAccessException, NoSuchFieldException { - BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - Converter rowMapper = source -> null; + @Test + void testSetRowMapper() throws IllegalAccessException, NoSuchFieldException { + BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + Converter rowMapper = source -> null; - reader.setRowMapper(rowMapper); + reader.setRowMapper(rowMapper); - Converter actualRowMapper = (Converter) handle - .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) - .get(reader); + Converter actualRowMapper = (Converter) handle + .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) + .get(reader); - Assertions.assertEquals(rowMapper, actualRowMapper); - } + Assertions.assertEquals(rowMapper, actualRowMapper); + } - @Test - void testSetJobConfiguration() throws IllegalAccessException, NoSuchFieldException { - BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder("select").build(); + @Test + void testSetJobConfiguration() throws IllegalAccessException, NoSuchFieldException { + BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder("select").build(); - reader.setJobConfiguration(jobConfiguration); + reader.setJobConfiguration(jobConfiguration); - QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle - .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) - .get(reader); + QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle + .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) + .get(reader); - Assertions.assertEquals(jobConfiguration, actualJobConfiguration); - } + Assertions.assertEquals(jobConfiguration, actualJobConfiguration); + } - @Test - void testRead() throws Exception { - BigQuery bigQuery = prepareMockedBigQuery(); - List items = TestConstants.CHUNK.getItems(); + @Test + void testRead() throws Exception { + BigQuery bigQuery = prepareMockedBigQuery(); + List items = TestConstants.CHUNK.getItems(); - Field name = Field.of(TestConstants.NAME, StandardSQLTypeName.STRING); - Field age = Field.of(TestConstants.AGE, StandardSQLTypeName.INT64); + Field name = Field.of(TestConstants.NAME, StandardSQLTypeName.STRING); + Field age = Field.of(TestConstants.AGE, StandardSQLTypeName.INT64); - PersonDto person1 = items.get(0); - FieldValue value10 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.name()); - FieldValue value11 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.age().toString()); + PersonDto person1 = items.get(0); + FieldValue value10 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.name()); + FieldValue value11 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.age().toString()); - FieldValueList row1 = FieldValueList.of(List.of(value10, value11), name, age); + FieldValueList row1 = FieldValueList.of(List.of(value10, value11), name, age); - TableResult tableResult = Mockito.mock(TableResult.class); - Mockito.when(tableResult.getValues()).thenReturn(List.of(row1)); + TableResult tableResult = Mockito.mock(TableResult.class); + Mockito.when(tableResult.getValues()).thenReturn(List.of(row1)); - Mockito.when(bigQuery.query(Mockito.any(QueryJobConfiguration.class))).thenReturn(tableResult); + Mockito.when(bigQuery.query(Mockito.any(QueryJobConfiguration.class))).thenReturn(tableResult); - BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - reader.setRowMapper(TestConstants.PERSON_MAPPER); - reader.setBigQuery(bigQuery); - reader.setJobConfiguration(QueryJobConfiguration.of("select")); + BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + reader.setRowMapper(TestConstants.PERSON_MAPPER); + reader.setBigQuery(bigQuery); + reader.setJobConfiguration(QueryJobConfiguration.of("select")); - // First call - PersonDto actual = reader.read(); - Assertions.assertEquals(person1.name(), actual.name()); - Assertions.assertEquals(person1.age(), actual.age()); + // First call + PersonDto actual = reader.read(); + Assertions.assertEquals(person1.name(), actual.name()); + Assertions.assertEquals(person1.age(), actual.age()); - // Second call - Assertions.assertNull(reader.read()); - } + // Second call + Assertions.assertNull(reader.read()); + } - @Test - void testAfterPropertiesSet() { - BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); + @Test + void testAfterPropertiesSet() { + BigQueryQueryItemReader reader = new BigQueryQueryItemReader<>(); - // bigQuery - Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); + // bigQuery + Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); - // rowMapper - reader.setBigQuery(prepareMockedBigQuery()); - Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); + // rowMapper + reader.setBigQuery(prepareMockedBigQuery()); + Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); - // jobConfiguration - reader.setRowMapper(TestConstants.PERSON_MAPPER); - Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); + // jobConfiguration + reader.setRowMapper(TestConstants.PERSON_MAPPER); + Assertions.assertThrows(IllegalArgumentException.class, reader::afterPropertiesSet); + + // No exception + reader.setJobConfiguration(QueryJobConfiguration.of("select")); + Assertions.assertDoesNotThrow(reader::afterPropertiesSet); + } - // No exception - reader.setJobConfiguration(QueryJobConfiguration.of("select")); - Assertions.assertDoesNotThrow(reader::afterPropertiesSet); - } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/BigQueryItemReaderBuilderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/BigQueryItemReaderBuilderTest.java index 4c8f7ae..9381aab 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/BigQueryItemReaderBuilderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/BigQueryItemReaderBuilderTest.java @@ -38,129 +38,129 @@ class BigQueryItemReaderBuilderTest extends AbstractBigQueryTest { - @Test - void testBuild_WithoutJobConfiguration() throws IllegalAccessException, NoSuchFieldException { - BigQuery mockedBigQuery = prepareMockedBigQuery(); - String query = "SELECT p.name, p.age FROM spring_batch_extensions.persons p LIMIT 1"; - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(mockedBigQuery) - .query(query) - .rowMapper(TestConstants.PERSON_MAPPER) - .build(); - - Assertions.assertNotNull(reader); - - BigQuery actualBigQuery = (BigQuery) handle - .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) - .get(reader); - - Converter actualRowMapper = (Converter) handle - .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) - .get(reader); - - QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle - .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) - .get(reader); - - Assertions.assertEquals(mockedBigQuery, actualBigQuery); - Assertions.assertEquals(TestConstants.PERSON_MAPPER, actualRowMapper); - Assertions.assertEquals(QueryJobConfiguration.newBuilder(query).build(), actualJobConfiguration); - } - - @Test - void testBuild_WithoutRowMapper() throws IllegalAccessException, NoSuchFieldException { - BigQuery mockedBigQuery = prepareMockedBigQuery(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - - QueryJobConfiguration expectedJobConfiguration = QueryJobConfiguration - .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.persons p LIMIT 1") - .build(); - - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(mockedBigQuery) - .jobConfiguration(expectedJobConfiguration) - .targetType(PersonDto.class) - .build(); - - Assertions.assertNotNull(reader); - - BigQuery actualBigQuery = (BigQuery) handle - .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) - .get(reader); - - Converter actualRowMapper = (Converter) handle - .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) - .get(reader); - - QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle - .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) - .get(reader); - - Assertions.assertEquals(mockedBigQuery, actualBigQuery); - Assertions.assertNotNull(actualRowMapper); - Assertions.assertEquals(expectedJobConfiguration, actualJobConfiguration); - } - - @Test - void testBuild() throws IllegalAccessException, NoSuchFieldException { - BigQuery mockedBigQuery = prepareMockedBigQuery(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, MethodHandles.lookup()); - - QueryJobConfiguration jobConfiguration = QueryJobConfiguration - .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p LIMIT 2") - .setDestinationTable(TableId.of(TestConstants.DATASET, TestConstants.CSV)) - .build(); - - BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() - .bigQuery(mockedBigQuery) - .jobConfiguration(jobConfiguration) - .rowMapper(TestConstants.PERSON_MAPPER) - .build(); - - Assertions.assertNotNull(reader); - - BigQuery actualBigQuery = (BigQuery) handle - .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) - .get(reader); - - Converter actualRowMapper = (Converter) handle - .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) - .get(reader); - - QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle - .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) - .get(reader); - - Assertions.assertEquals(mockedBigQuery, actualBigQuery); - Assertions.assertEquals(TestConstants.PERSON_MAPPER, actualRowMapper); - Assertions.assertEquals(jobConfiguration, actualJobConfiguration); - } - - @ParameterizedTest - @MethodSource("brokenBuilders") - void testBuild_Exception(String expectedMessage, BigQueryQueryItemReaderBuilder builder) { - IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, builder::build); - Assertions.assertEquals(expectedMessage, ex.getMessage()); - } - - private static Stream brokenBuilders() { - final class HumanDto {} - BigQuery bigQuery = Mockito.mock(BigQuery.class); - return Stream.of( - Arguments.of( - "No target type provided", - new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery) - ), - Arguments.of( - "Only Java record supported", - new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery).targetType(HumanDto.class) - ), - Arguments.of( - "No query provided", - new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery).rowMapper(source -> null) - ) - ); - } + @Test + void testBuild_WithoutJobConfiguration() throws IllegalAccessException, NoSuchFieldException { + BigQuery mockedBigQuery = prepareMockedBigQuery(); + String query = "SELECT p.name, p.age FROM spring_batch_extensions.persons p LIMIT 1"; + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() + .bigQuery(mockedBigQuery) + .query(query) + .rowMapper(TestConstants.PERSON_MAPPER) + .build(); + + Assertions.assertNotNull(reader); + + BigQuery actualBigQuery = (BigQuery) handle + .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) + .get(reader); + + Converter actualRowMapper = (Converter) handle + .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) + .get(reader); + + QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle + .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) + .get(reader); + + Assertions.assertEquals(mockedBigQuery, actualBigQuery); + Assertions.assertEquals(TestConstants.PERSON_MAPPER, actualRowMapper); + Assertions.assertEquals(QueryJobConfiguration.newBuilder(query).build(), actualJobConfiguration); + } + + @Test + void testBuild_WithoutRowMapper() throws IllegalAccessException, NoSuchFieldException { + BigQuery mockedBigQuery = prepareMockedBigQuery(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + + QueryJobConfiguration expectedJobConfiguration = QueryJobConfiguration + .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.persons p LIMIT 1") + .build(); + + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() + .bigQuery(mockedBigQuery) + .jobConfiguration(expectedJobConfiguration) + .targetType(PersonDto.class) + .build(); + + Assertions.assertNotNull(reader); + + BigQuery actualBigQuery = (BigQuery) handle + .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) + .get(reader); + + Converter actualRowMapper = (Converter) handle + .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) + .get(reader); + + QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle + .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) + .get(reader); + + Assertions.assertEquals(mockedBigQuery, actualBigQuery); + Assertions.assertNotNull(actualRowMapper); + Assertions.assertEquals(expectedJobConfiguration, actualJobConfiguration); + } + + @Test + void testBuild() throws IllegalAccessException, NoSuchFieldException { + BigQuery mockedBigQuery = prepareMockedBigQuery(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryQueryItemReader.class, + MethodHandles.lookup()); + + QueryJobConfiguration jobConfiguration = QueryJobConfiguration + .newBuilder("SELECT p.name, p.age FROM spring_batch_extensions.csv p LIMIT 2") + .setDestinationTable(TableId.of(TestConstants.DATASET, TestConstants.CSV)) + .build(); + + BigQueryQueryItemReader reader = new BigQueryQueryItemReaderBuilder() + .bigQuery(mockedBigQuery) + .jobConfiguration(jobConfiguration) + .rowMapper(TestConstants.PERSON_MAPPER) + .build(); + + Assertions.assertNotNull(reader); + + BigQuery actualBigQuery = (BigQuery) handle + .findVarHandle(BigQueryQueryItemReader.class, "bigQuery", BigQuery.class) + .get(reader); + + Converter actualRowMapper = (Converter) handle + .findVarHandle(BigQueryQueryItemReader.class, "rowMapper", Converter.class) + .get(reader); + + QueryJobConfiguration actualJobConfiguration = (QueryJobConfiguration) handle + .findVarHandle(BigQueryQueryItemReader.class, "jobConfiguration", QueryJobConfiguration.class) + .get(reader); + + Assertions.assertEquals(mockedBigQuery, actualBigQuery); + Assertions.assertEquals(TestConstants.PERSON_MAPPER, actualRowMapper); + Assertions.assertEquals(jobConfiguration, actualJobConfiguration); + } + + @ParameterizedTest + @MethodSource("brokenBuilders") + void testBuild_Exception(String expectedMessage, BigQueryQueryItemReaderBuilder builder) { + IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, builder::build); + Assertions.assertEquals(expectedMessage, ex.getMessage()); + } + + private static Stream brokenBuilders() { + final class HumanDto { + + } + BigQuery bigQuery = Mockito.mock(BigQuery.class); + + return Stream.of( + Arguments.of("No target type provided", + new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery)), + Arguments.of("Only Java record supported", + new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery).targetType(HumanDto.class)), + Arguments.of("No query provided", + new BigQueryQueryItemReaderBuilder().bigQuery(bigQuery).rowMapper(source -> null))); + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/RecordMapperTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/RecordMapperTest.java index 7ecb018..199845e 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/RecordMapperTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/reader/builder/RecordMapperTest.java @@ -31,34 +31,36 @@ class RecordMapperTest { - @Test - void testGenerateMapper() { - RecordMapper mapper = new RecordMapper<>(); - List expected = TestConstants.CHUNK.getItems(); + @Test + void testGenerateMapper() { + RecordMapper mapper = new RecordMapper<>(); + List expected = TestConstants.CHUNK.getItems(); - Field name = Field.of(TestConstants.NAME, StandardSQLTypeName.STRING); - Field age = Field.of(TestConstants.AGE, StandardSQLTypeName.INT64); + Field name = Field.of(TestConstants.NAME, StandardSQLTypeName.STRING); + Field age = Field.of(TestConstants.AGE, StandardSQLTypeName.INT64); - PersonDto person1 = expected.get(0); - FieldValue value1 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.name()); - FieldValue value2 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.age()); + PersonDto person1 = expected.get(0); + FieldValue value1 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.name()); + FieldValue value2 = FieldValue.of(FieldValue.Attribute.PRIMITIVE, person1.age()); - FieldValueList row = FieldValueList.of(List.of(value1, value2), name, age); + FieldValueList row = FieldValueList.of(List.of(value1, value2), name, age); - Converter converter = mapper.generateMapper(PersonDto.class); - Assertions.assertNotNull(converter); + Converter converter = mapper.generateMapper(PersonDto.class); + Assertions.assertNotNull(converter); - PersonDto actual = converter.convert(row); + PersonDto actual = converter.convert(row); - Assertions.assertEquals(expected.get(0).name(), actual.name()); - Assertions.assertEquals(expected.get(0).age(), actual.age()); - } + Assertions.assertEquals(expected.get(0).name(), actual.name()); + Assertions.assertEquals(expected.get(0).age(), actual.age()); + } - @Test - void testGenerateMapper_EmptyRecord() { - record TestRecord(){} - IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, () -> new RecordMapper().generateMapper(TestRecord.class)); - Assertions.assertEquals("Record without fields is redundant", ex.getMessage()); - } + @Test + void testGenerateMapper_EmptyRecord() { + record TestRecord() { + } + IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, + () -> new RecordMapper().generateMapper(TestRecord.class)); + Assertions.assertEquals("Record without fields is redundant", ex.getMessage()); + } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/BigQueryLoadJobBaseItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/BigQueryLoadJobBaseItemWriterTest.java index 361c245..de70e3f 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/BigQueryLoadJobBaseItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/BigQueryLoadJobBaseItemWriterTest.java @@ -36,277 +36,296 @@ class BigQueryLoadJobBaseItemWriterTest extends AbstractBigQueryTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); - @Test - void testGetTable() { - Table expected = Mockito.mock(Table.class); - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.getTable(TABLE_ID)).thenReturn(expected); + @Test + void testGetTable() { + Table expected = Mockito.mock(Table.class); + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.getTable(TABLE_ID)).thenReturn(expected); - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); - Assertions.assertEquals(expected, writer.testGetTable()); - } + Assertions.assertEquals(expected, writer.testGetTable()); + } - @Test - void testSetDatasetInfo() throws IllegalAccessException, NoSuchFieldException { - TestWriter writer = new TestWriter(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - DatasetInfo expected = DatasetInfo.of(TABLE_ID.getDataset()); + @Test + void testSetDatasetInfo() throws IllegalAccessException, NoSuchFieldException { + TestWriter writer = new TestWriter(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + DatasetInfo expected = DatasetInfo.of(TABLE_ID.getDataset()); - writer.setDatasetInfo(expected); + writer.setDatasetInfo(expected); - DatasetInfo actual = (DatasetInfo) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "datasetInfo", DatasetInfo.class) - .get(writer); + DatasetInfo actual = (DatasetInfo) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "datasetInfo", DatasetInfo.class) + .get(writer); - Assertions.assertEquals(expected, actual); - } + Assertions.assertEquals(expected, actual); + } - @Test - void testSetJobConsumer() throws IllegalAccessException, NoSuchFieldException { - TestWriter writer = new TestWriter(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - Consumer expected = job -> {}; + @Test + void testSetJobConsumer() throws IllegalAccessException, NoSuchFieldException { + TestWriter writer = new TestWriter(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + Consumer expected = job -> { + }; - writer.setJobConsumer(expected); + writer.setJobConsumer(expected); - Consumer actual = (Consumer) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) - .get(writer); + Consumer actual = (Consumer) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) + .get(writer); - Assertions.assertEquals(expected, actual); - } + Assertions.assertEquals(expected, actual); + } - @Test - void testSetWriteChannelConfig() throws IllegalAccessException, NoSuchFieldException { - TestWriter writer = new TestWriter(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - WriteChannelConfiguration expected = WriteChannelConfiguration.newBuilder(TABLE_ID).build(); + @Test + void testSetWriteChannelConfig() throws IllegalAccessException, NoSuchFieldException { + TestWriter writer = new TestWriter(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + WriteChannelConfiguration expected = WriteChannelConfiguration.newBuilder(TABLE_ID).build(); - writer.setWriteChannelConfig(expected); + writer.setWriteChannelConfig(expected); - WriteChannelConfiguration actual = (WriteChannelConfiguration) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) - .get(writer); + WriteChannelConfiguration actual = (WriteChannelConfiguration) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) + .get(writer); + + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetBigQuery() throws IllegalAccessException, NoSuchFieldException { + TestWriter writer = new TestWriter(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + BigQuery expected = prepareMockedBigQuery(); + + writer.setBigQuery(expected); + + BigQuery actual = (BigQuery) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) + .get(writer); + + Assertions.assertEquals(expected, actual); + } + + @Test + void testWrite() throws Exception { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + AtomicBoolean consumerCalled = new AtomicBoolean(); + + Job job = Mockito.mock(Job.class); + Mockito.when(job.getJobId()).thenReturn(JobId.newBuilder().build()); + + TableDataWriteChannel channel = Mockito.mock(TableDataWriteChannel.class); + Mockito.when(channel.getJob()).thenReturn(job); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.writer(Mockito.any(WriteChannelConfiguration.class))).thenReturn(channel); + + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + writer.setJobConsumer(j -> consumerCalled.set(true)); + writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); + + writer.write(TestConstants.CHUNK); + + AtomicLong actual = (AtomicLong) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQueryWriteCounter", AtomicLong.class) + .get(writer); + + Assertions.assertEquals(1L, actual.get()); + Assertions.assertTrue(consumerCalled.get()); + + Mockito.verify(channel).write(Mockito.any(ByteBuffer.class)); + Mockito.verify(channel).close(); + Mockito.verify(channel, Mockito.times(2)).getJob(); + Mockito.verifyNoMoreInteractions(channel); + } + + @Test + void testWrite_Exception() throws Exception { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + AtomicBoolean consumerCalled = new AtomicBoolean(); + + Job job = Mockito.mock(Job.class); + Mockito.when(job.getJobId()).thenReturn(JobId.newBuilder().build()); + + TableDataWriteChannel channel = Mockito.mock(TableDataWriteChannel.class); + Mockito.when(channel.getJob()).thenReturn(job); + Mockito.when(channel.write(Mockito.any(ByteBuffer.class))).thenThrow(BigQueryException.class); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.writer(Mockito.any(WriteChannelConfiguration.class))).thenReturn(channel); + + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + writer.setJobConsumer(j -> consumerCalled.set(true)); + writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); + + BigQueryItemWriterException actual = Assertions.assertThrows(BigQueryItemWriterException.class, + () -> writer.write(TestConstants.CHUNK)); + Assertions.assertEquals("Error on write happened", actual.getMessage()); + + AtomicLong actualCounter = (AtomicLong) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQueryWriteCounter", AtomicLong.class) + .get(writer); + + boolean writeFailed = (Boolean) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "writeFailed", boolean.class) + .get(writer); + + Assertions.assertEquals(0L, actualCounter.get()); + Assertions.assertTrue(writeFailed); + Assertions.assertFalse(consumerCalled.get()); + + Mockito.verify(channel).write(Mockito.any(ByteBuffer.class)); + Mockito.verify(channel).close(); + Mockito.verifyNoMoreInteractions(channel); + } + + @Test + void testBaseAfterPropertiesSet_Exception() { + TestWriter writer = new TestWriter(); + WriteChannelConfiguration.Builder channelBuilder = WriteChannelConfiguration.newBuilder(TABLE_ID); + + // bigQuery + IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, + writer::afterPropertiesSet); + Assertions.assertEquals("BigQuery service must be provided", actual.getMessage()); + + // writeChannelConfig + writer.setBigQuery(prepareMockedBigQuery()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Write channel configuration must be provided", actual.getMessage()); + + // format + writer.setWriteChannelConfig(channelBuilder.build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Data format must be provided", actual.getMessage()); + + // bigtable + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.bigtable()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Google BigTable is not supported", actual.getMessage()); + + // googleSheets + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.googleSheets()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Google Sheets is not supported", actual.getMessage()); + + // datastore + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.datastoreBackup()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Google Datastore is not supported", actual.getMessage()); + + // parquet + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.parquet()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Parquet is not supported", actual.getMessage()); + + // orc + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.orc()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Orc is not supported", actual.getMessage()); + + // avro + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.avro()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Avro is not supported", actual.getMessage()); + + // iceberg + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.iceberg()).build()); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Iceberg is not supported", actual.getMessage()); + + // dataset + writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.csv()).build()); + writer.setDatasetInfo(DatasetInfo.of("dataset-1")); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Dataset should be configured properly", actual.getMessage()); + } + + @Test + void testBaseAfterPropertiesSet_Dataset() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + + DatasetInfo datasetInfo = DatasetInfo.of(TABLE_ID.getDataset()); + BigQuery bigQuery = prepareMockedBigQuery(); + + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + writer.setWriteChannelConfig( + WriteChannelConfiguration.newBuilder(TABLE_ID).setFormatOptions(FormatOptions.json()).build()); + + writer.afterPropertiesSet(); + + DatasetInfo actual = (DatasetInfo) handle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "datasetInfo", DatasetInfo.class) + .get(writer); + Assertions.assertEquals(datasetInfo, actual); + + Mockito.verify(bigQuery).create(datasetInfo); + Mockito.verify(bigQuery).getDataset(TABLE_ID.getDataset()); + Mockito.verifyNoMoreInteractions(bigQuery); + } + + @Test + void testTableHasDefinedSchema() { + TestWriter writer = new TestWriter(); + Table table = Mockito.mock(Table.class); + + // Null + Assertions.assertFalse(writer.testTableHasDefinedSchema(null)); + + // Without definition + Assertions.assertFalse(writer.testTableHasDefinedSchema(table)); + + // Without schema + StandardTableDefinition.Builder definitionBuilder = StandardTableDefinition.newBuilder(); + Mockito.when(table.getDefinition()).thenReturn(definitionBuilder.build()); + Assertions.assertFalse(writer.testTableHasDefinedSchema(table)); + + // With schema + Mockito.when(table.getDefinition()) + .thenReturn(definitionBuilder.setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))) + .build()); + Assertions.assertTrue(writer.testTableHasDefinedSchema(table)); + } + + private static final class TestWriter extends BigQueryLoadJobBaseItemWriter { + + @Override + protected void doInitializeProperties(List items) { + } + + @Override + protected List convertObjectsToByteArrays(List items) { + return items.stream().map(Objects::toString).map(String::getBytes).toList(); + } + + @Override + protected void performFormatSpecificChecks() { + } + + public Table testGetTable() { + return getTable(); + } + + public boolean testTableHasDefinedSchema(Table table) { + return tableHasDefinedSchema(table); + } + + } - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetBigQuery() throws IllegalAccessException, NoSuchFieldException { - TestWriter writer = new TestWriter(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - BigQuery expected = prepareMockedBigQuery(); - - writer.setBigQuery(expected); - - BigQuery actual = (BigQuery) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) - .get(writer); - - Assertions.assertEquals(expected, actual); - } - - @Test - void testWrite() throws Exception { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - AtomicBoolean consumerCalled = new AtomicBoolean(); - - Job job = Mockito.mock(Job.class); - Mockito.when(job.getJobId()).thenReturn(JobId.newBuilder().build()); - - TableDataWriteChannel channel = Mockito.mock(TableDataWriteChannel.class); - Mockito.when(channel.getJob()).thenReturn(job); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.writer(Mockito.any(WriteChannelConfiguration.class))).thenReturn(channel); - - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - writer.setJobConsumer(j -> consumerCalled.set(true)); - writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); - - writer.write(TestConstants.CHUNK); - - AtomicLong actual = (AtomicLong) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQueryWriteCounter", AtomicLong.class) - .get(writer); - - Assertions.assertEquals(1L, actual.get()); - Assertions.assertTrue(consumerCalled.get()); - - Mockito.verify(channel).write(Mockito.any(ByteBuffer.class)); - Mockito.verify(channel).close(); - Mockito.verify(channel, Mockito.times(2)).getJob(); - Mockito.verifyNoMoreInteractions(channel); - } - - @Test - void testWrite_Exception() throws Exception { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - AtomicBoolean consumerCalled = new AtomicBoolean(); - - Job job = Mockito.mock(Job.class); - Mockito.when(job.getJobId()).thenReturn(JobId.newBuilder().build()); - - TableDataWriteChannel channel = Mockito.mock(TableDataWriteChannel.class); - Mockito.when(channel.getJob()).thenReturn(job); - Mockito.when(channel.write(Mockito.any(ByteBuffer.class))).thenThrow(BigQueryException.class); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.writer(Mockito.any(WriteChannelConfiguration.class))).thenReturn(channel); - - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - writer.setJobConsumer(j -> consumerCalled.set(true)); - writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID)); - - BigQueryItemWriterException actual = Assertions.assertThrows(BigQueryItemWriterException.class, () -> writer.write(TestConstants.CHUNK)); - Assertions.assertEquals("Error on write happened", actual.getMessage()); - - AtomicLong actualCounter = (AtomicLong) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQueryWriteCounter", AtomicLong.class) - .get(writer); - - boolean writeFailed = (Boolean) handle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "writeFailed", boolean.class) - .get(writer); - - Assertions.assertEquals(0L, actualCounter.get()); - Assertions.assertTrue(writeFailed); - Assertions.assertFalse(consumerCalled.get()); - - Mockito.verify(channel).write(Mockito.any(ByteBuffer.class)); - Mockito.verify(channel).close(); - Mockito.verifyNoMoreInteractions(channel); - } - - @Test - void testBaseAfterPropertiesSet_Exception() { - TestWriter writer = new TestWriter(); - WriteChannelConfiguration.Builder channelBuilder = WriteChannelConfiguration.newBuilder(TABLE_ID); - - // bigQuery - IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("BigQuery service must be provided", actual.getMessage()); - - // writeChannelConfig - writer.setBigQuery(prepareMockedBigQuery()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Write channel configuration must be provided", actual.getMessage()); - - // format - writer.setWriteChannelConfig(channelBuilder.build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Data format must be provided", actual.getMessage()); - - // bigtable - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.bigtable()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Google BigTable is not supported", actual.getMessage()); - - // googleSheets - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.googleSheets()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Google Sheets is not supported", actual.getMessage()); - - // datastore - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.datastoreBackup()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Google Datastore is not supported", actual.getMessage()); - - // parquet - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.parquet()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Parquet is not supported", actual.getMessage()); - - // orc - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.orc()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Orc is not supported", actual.getMessage()); - - // avro - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.avro()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Avro is not supported", actual.getMessage()); - - // iceberg - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.iceberg()).build()); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Iceberg is not supported", actual.getMessage()); - - // dataset - writer.setWriteChannelConfig(channelBuilder.setFormatOptions(FormatOptions.csv()).build()); - writer.setDatasetInfo(DatasetInfo.of("dataset-1")); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Dataset should be configured properly", actual.getMessage()); - } - - @Test - void testBaseAfterPropertiesSet_Dataset() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - - DatasetInfo datasetInfo = DatasetInfo.of(TABLE_ID.getDataset()); - BigQuery bigQuery = prepareMockedBigQuery(); - - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - writer.setWriteChannelConfig(WriteChannelConfiguration.newBuilder(TABLE_ID).setFormatOptions(FormatOptions.json()).build()); - - writer.afterPropertiesSet(); - - DatasetInfo actual = (DatasetInfo) handle.findVarHandle(BigQueryLoadJobBaseItemWriter.class, "datasetInfo", DatasetInfo.class).get(writer); - Assertions.assertEquals(datasetInfo, actual); - - Mockito.verify(bigQuery).create(datasetInfo); - Mockito.verify(bigQuery).getDataset(TABLE_ID.getDataset()); - Mockito.verifyNoMoreInteractions(bigQuery); - } - - @Test - void testTableHasDefinedSchema() { - TestWriter writer = new TestWriter(); - Table table = Mockito.mock(Table.class); - - // Null - Assertions.assertFalse(writer.testTableHasDefinedSchema(null)); - - // Without definition - Assertions.assertFalse(writer.testTableHasDefinedSchema(table)); - - // Without schema - StandardTableDefinition.Builder definitionBuilder = StandardTableDefinition.newBuilder(); - Mockito.when(table.getDefinition()).thenReturn(definitionBuilder.build()); - Assertions.assertFalse(writer.testTableHasDefinedSchema(table)); - - // With schema - Mockito.when(table.getDefinition()).thenReturn(definitionBuilder.setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))).build()); - Assertions.assertTrue(writer.testTableHasDefinedSchema(table)); - } - - private static final class TestWriter extends BigQueryLoadJobBaseItemWriter { - - @Override - protected void doInitializeProperties(List items) {} - - @Override - protected List convertObjectsToByteArrays(List items) { - return items.stream().map(Objects::toString).map(String::getBytes).toList(); - } - - @Override - protected void performFormatSpecificChecks() {} - - public Table testGetTable() { - return getTable(); - } - - public boolean testTableHasDefinedSchema(Table table) { - return tableHasDefinedSchema(table); - } - } } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/BigQueryLoadJobCsvItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/BigQueryLoadJobCsvItemWriterTest.java index 50de5d6..2ac4eb5 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/BigQueryLoadJobCsvItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/BigQueryLoadJobCsvItemWriterTest.java @@ -46,163 +46,160 @@ class BigQueryLoadJobCsvItemWriterTest extends AbstractBigQueryTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); - private static final Schema SCHEMA = Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING)); - - @Test - void testDoInitializeProperties() throws IllegalAccessException, NoSuchFieldException { - TestWriter writer = new TestWriter(); - List items = TestConstants.CHUNK.getItems(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, MethodHandles.lookup()); - - // Exception - Assertions.assertThrows(IllegalStateException.class, () -> writer.testInitializeProperties(List.of())); - - // No exception - writer.testInitializeProperties(items); - Assertions.assertEquals( - PersonDto.class.getSimpleName(), - ((Class) handle.findVarHandle(BigQueryLoadJobCsvItemWriter.class, "itemClass", Class.class).get(writer)).getSimpleName() - ); - ObjectWriter objectWriter = (ObjectWriter) handle - .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "objectWriter", ObjectWriter.class) - .get(writer); - Assertions.assertInstanceOf(CsvFactory.class, objectWriter.getFactory()); - } - - @Test - void testSetRowMapper() throws IllegalAccessException, NoSuchFieldException { - BigQueryLoadJobCsvItemWriter reader = new BigQueryLoadJobCsvItemWriter<>(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, MethodHandles.lookup()); - Converter expected = source -> null; - - reader.setRowMapper(expected); - - Converter actual = (Converter) handle - .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "rowMapper", Converter.class) - .get(reader); - - Assertions.assertEquals(expected, actual); - } - - @Test - void testConvertObjectsToByteArrays() { - TestWriter writer = new TestWriter(); - List items = TestConstants.CHUNK.getItems(); - - // Empty - Assertions.assertTrue(writer.testConvert(List.of()).isEmpty()); - - // Not empty (row mapper) - writer.setRowMapper(source -> source.toString().getBytes()); - List actual = writer.testConvert(items); - List expected = items.stream().map(PersonDto::toString).map(String::getBytes).toList(); - Assertions.assertEquals(expected.size(), actual.size()); - - for (int i = 0; i < actual.size(); i++) { - Assertions.assertArrayEquals(expected.get(i), actual.get(i)); - } - - // Not empty (object writer) - ObjectWriter csvWriter = new CsvMapper().writerWithTypedSchemaFor(PersonDto.class); - writer.setRowMapper(null); - writer.testInitializeProperties(items); - actual = writer.testConvert(items); - - expected = items - .stream() - .map(pd -> { - try { - return csvWriter.writeValueAsBytes(pd); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - }) - .toList(); - - Assertions.assertEquals(expected.size(), actual.size()); - - for (int i = 0; i < actual.size(); i++) { - Assertions.assertArrayEquals(expected.get(i), actual.get(i)); - } - } - - @Test - void testPerformFormatSpecificChecks() { - TestWriter writer = new TestWriter(); - - Table table = Mockito.mock(Table.class); - StandardTableDefinition tableDefinition = StandardTableDefinition - .newBuilder() - .setSchema(SCHEMA) - .build(); - Mockito.when(table.getDefinition()).thenReturn(tableDefinition); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); - - // schema - writer.setBigQuery(bigQuery); - writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID, FormatOptions.json())); - IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Schema must be provided", actual.getMessage()); - - // schema equality - WriteChannelConfiguration channelConfig = WriteChannelConfiguration - .newBuilder(TABLE_ID) - .setSchema(Schema.of(Field.of(TestConstants.NAME, StandardSQLTypeName.STRING))) - .setFormatOptions(FormatOptions.csv()) - .build(); - writer.setWriteChannelConfig(channelConfig); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Schema must be the same", actual.getMessage()); - } - - @ParameterizedTest - @MethodSource("invalidFormats") - void testPerformFormatSpecificChecks_Format(FormatOptions formatOptions) { - Table table = Mockito.mock(Table.class); - StandardTableDefinition tableDefinition = StandardTableDefinition - .newBuilder() - .setSchema(SCHEMA) - .build(); - Mockito.when(table.getDefinition()).thenReturn(tableDefinition); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); - - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - - writer.setWriteChannelConfig(WriteChannelConfiguration.newBuilder(TABLE_ID).setAutodetect(true).setFormatOptions(formatOptions).build()); - IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Only %s format is allowed".formatted(FormatOptions.csv().getType()), actual.getMessage()); - } - - static Stream invalidFormats() { - return Stream.of( - FormatOptions.parquet(), - FormatOptions.avro(), - FormatOptions.bigtable(), - FormatOptions.datastoreBackup(), - FormatOptions.googleSheets(), - FormatOptions.iceberg(), - FormatOptions.orc(), - FormatOptions.json() - ); - } - - private static final class TestWriter extends BigQueryLoadJobCsvItemWriter { - public void testInitializeProperties(List items) { - doInitializeProperties(items); - } - - public List testConvert(List items) { - return convertObjectsToByteArrays(items); - } - - public void testPerformFormatSpecificChecks() { - performFormatSpecificChecks(); - } - } + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.CSV); + + private static final Schema SCHEMA = Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING)); + + @Test + void testDoInitializeProperties() throws IllegalAccessException, NoSuchFieldException { + TestWriter writer = new TestWriter(); + List items = TestConstants.CHUNK.getItems(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, + MethodHandles.lookup()); + + // Exception + Assertions.assertThrows(IllegalStateException.class, () -> writer.testInitializeProperties(List.of())); + + // No exception + writer.testInitializeProperties(items); + Class personClass = ((Class) handle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "itemClass", Class.class) + .get(writer)); + Assertions.assertEquals(PersonDto.class.getSimpleName(), personClass.getSimpleName()); + + ObjectWriter objectWriter = (ObjectWriter) handle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "objectWriter", ObjectWriter.class) + .get(writer); + Assertions.assertInstanceOf(CsvFactory.class, objectWriter.getFactory()); + } + + @Test + void testSetRowMapper() throws IllegalAccessException, NoSuchFieldException { + BigQueryLoadJobCsvItemWriter reader = new BigQueryLoadJobCsvItemWriter<>(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, + MethodHandles.lookup()); + Converter expected = source -> null; + + reader.setRowMapper(expected); + + Converter actual = (Converter) handle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "rowMapper", Converter.class) + .get(reader); + + Assertions.assertEquals(expected, actual); + } + + @Test + void testConvertObjectsToByteArrays() { + TestWriter writer = new TestWriter(); + List items = TestConstants.CHUNK.getItems(); + + // Empty + Assertions.assertTrue(writer.testConvert(List.of()).isEmpty()); + + // Not empty (row mapper) + writer.setRowMapper(source -> source.toString().getBytes()); + List actual = writer.testConvert(items); + List expected = items.stream().map(PersonDto::toString).map(String::getBytes).toList(); + Assertions.assertEquals(expected.size(), actual.size()); + + for (int i = 0; i < actual.size(); i++) { + Assertions.assertArrayEquals(expected.get(i), actual.get(i)); + } + + // Not empty (object writer) + ObjectWriter csvWriter = new CsvMapper().writerWithTypedSchemaFor(PersonDto.class); + writer.setRowMapper(null); + writer.testInitializeProperties(items); + actual = writer.testConvert(items); + + expected = items.stream().map(pd -> { + try { + return csvWriter.writeValueAsBytes(pd); + } + catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + }).toList(); + + Assertions.assertEquals(expected.size(), actual.size()); + + for (int i = 0; i < actual.size(); i++) { + Assertions.assertArrayEquals(expected.get(i), actual.get(i)); + } + } + + @Test + void testPerformFormatSpecificChecks() { + TestWriter writer = new TestWriter(); + + Table table = Mockito.mock(Table.class); + StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder().setSchema(SCHEMA).build(); + Mockito.when(table.getDefinition()).thenReturn(tableDefinition); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); + + // schema + writer.setBigQuery(bigQuery); + writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID, FormatOptions.json())); + IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, + writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Schema must be provided", actual.getMessage()); + + // schema equality + WriteChannelConfiguration channelConfig = WriteChannelConfiguration.newBuilder(TABLE_ID) + .setSchema(Schema.of(Field.of(TestConstants.NAME, StandardSQLTypeName.STRING))) + .setFormatOptions(FormatOptions.csv()) + .build(); + writer.setWriteChannelConfig(channelConfig); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Schema must be the same", actual.getMessage()); + } + + @ParameterizedTest + @MethodSource("invalidFormats") + void testPerformFormatSpecificChecks_Format(FormatOptions formatOptions) { + Table table = Mockito.mock(Table.class); + StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder().setSchema(SCHEMA).build(); + Mockito.when(table.getDefinition()).thenReturn(tableDefinition); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); + + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + + writer.setWriteChannelConfig(WriteChannelConfiguration.newBuilder(TABLE_ID) + .setAutodetect(true) + .setFormatOptions(formatOptions) + .build()); + IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, + writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Only %s format is allowed".formatted(FormatOptions.csv().getType()), + actual.getMessage()); + } + + static Stream invalidFormats() { + return Stream.of(FormatOptions.parquet(), FormatOptions.avro(), FormatOptions.bigtable(), + FormatOptions.datastoreBackup(), FormatOptions.googleSheets(), FormatOptions.iceberg(), + FormatOptions.orc(), FormatOptions.json()); + } + + private static final class TestWriter extends BigQueryLoadJobCsvItemWriter { + + public void testInitializeProperties(List items) { + doInitializeProperties(items); + } + + public List testConvert(List items) { + return convertObjectsToByteArrays(items); + } + + public void testPerformFormatSpecificChecks() { + performFormatSpecificChecks(); + } + + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTest.java new file mode 100644 index 0000000..adc8881 --- /dev/null +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.extensions.bigquery.unit.writer.loadjob.csv.builder; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.WriteChannelConfiguration; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.batch.extensions.bigquery.common.PersonDto; +import org.springframework.batch.extensions.bigquery.common.TestConstants; +import org.springframework.batch.extensions.bigquery.unit.base.AbstractBigQueryTest; +import org.springframework.batch.extensions.bigquery.writer.loadjob.BigQueryLoadJobBaseItemWriter; +import org.springframework.batch.extensions.bigquery.writer.loadjob.csv.BigQueryLoadJobCsvItemWriter; +import org.springframework.batch.extensions.bigquery.writer.loadjob.csv.builder.BigQueryCsvItemWriterBuilder; +import org.springframework.core.convert.converter.Converter; + +import java.lang.invoke.MethodHandles; +import java.util.function.Consumer; + +class BigQueryLoadJobCsvItemWriterBuilderTest extends AbstractBigQueryTest { + + @Test + void testBuild() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup csvWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, + MethodHandles.lookup()); + MethodHandles.Lookup baseWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + + Converter rowMapper = source -> new byte[0]; + DatasetInfo datasetInfo = DatasetInfo.newBuilder(TestConstants.DATASET).setLocation("europe-west-2").build(); + Consumer jobConsumer = job -> { + }; + BigQuery mockedBigQuery = prepareMockedBigQuery(); + + WriteChannelConfiguration writeConfiguration = WriteChannelConfiguration + .newBuilder(TableId.of(datasetInfo.getDatasetId().getDataset(), TestConstants.CSV)) + .setFormatOptions(FormatOptions.csv()) + .build(); + + BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() + .rowMapper(rowMapper) + .writeChannelConfig(writeConfiguration) + .jobConsumer(jobConsumer) + .bigQuery(mockedBigQuery) + .datasetInfo(datasetInfo) + .build(); + + Assertions.assertNotNull(writer); + + Converter actualRowMapper = (Converter) csvWriterHandle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "rowMapper", Converter.class) + .get(writer); + + WriteChannelConfiguration actualWriteChannelConfig = (WriteChannelConfiguration) csvWriterHandle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) + .get(writer); + + Consumer actualJobConsumer = (Consumer) baseWriterHandle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) + .get(writer); + + BigQuery actualBigQuery = (BigQuery) baseWriterHandle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) + .get(writer); + + DatasetInfo actualDatasetInfo = (DatasetInfo) baseWriterHandle + .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "datasetInfo", DatasetInfo.class) + .get(writer); + + Assertions.assertEquals(rowMapper, actualRowMapper); + Assertions.assertEquals(writeConfiguration, actualWriteChannelConfig); + Assertions.assertEquals(jobConsumer, actualJobConsumer); + Assertions.assertEquals(mockedBigQuery, actualBigQuery); + Assertions.assertEquals(datasetInfo, actualDatasetInfo); + } + +} \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTests.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTests.java deleted file mode 100644 index f47cedb..0000000 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/csv/builder/BigQueryLoadJobCsvItemWriterBuilderTests.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright 2002-2025 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.extensions.bigquery.unit.writer.loadjob.csv.builder; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.DatasetInfo; -import com.google.cloud.bigquery.FormatOptions; -import com.google.cloud.bigquery.Job; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.WriteChannelConfiguration; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.springframework.batch.extensions.bigquery.common.PersonDto; -import org.springframework.batch.extensions.bigquery.common.TestConstants; -import org.springframework.batch.extensions.bigquery.unit.base.AbstractBigQueryTest; -import org.springframework.batch.extensions.bigquery.writer.loadjob.BigQueryLoadJobBaseItemWriter; -import org.springframework.batch.extensions.bigquery.writer.loadjob.csv.BigQueryLoadJobCsvItemWriter; -import org.springframework.batch.extensions.bigquery.writer.loadjob.csv.builder.BigQueryCsvItemWriterBuilder; -import org.springframework.core.convert.converter.Converter; - -import java.lang.invoke.MethodHandles; -import java.util.function.Consumer; - -class BigQueryLoadJobCsvItemWriterBuilderTests extends AbstractBigQueryTest { - - @Test - void testBuild() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup csvWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobCsvItemWriter.class, MethodHandles.lookup()); - MethodHandles.Lookup baseWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - - Converter rowMapper = source -> new byte[0]; - DatasetInfo datasetInfo = DatasetInfo.newBuilder(TestConstants.DATASET).setLocation("europe-west-2").build(); - Consumer jobConsumer = job -> {}; - BigQuery mockedBigQuery = prepareMockedBigQuery(); - - WriteChannelConfiguration writeConfiguration = WriteChannelConfiguration - .newBuilder(TableId.of(datasetInfo.getDatasetId().getDataset(), TestConstants.CSV)) - .setFormatOptions(FormatOptions.csv()) - .build(); - - BigQueryLoadJobCsvItemWriter writer = new BigQueryCsvItemWriterBuilder() - .rowMapper(rowMapper) - .writeChannelConfig(writeConfiguration) - .jobConsumer(jobConsumer) - .bigQuery(mockedBigQuery) - .datasetInfo(datasetInfo) - .build(); - - Assertions.assertNotNull(writer); - - Converter actualRowMapper = (Converter) csvWriterHandle - .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "rowMapper", Converter.class) - .get(writer); - - WriteChannelConfiguration actualWriteChannelConfig = (WriteChannelConfiguration) csvWriterHandle - .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) - .get(writer); - - Consumer actualJobConsumer = (Consumer) baseWriterHandle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) - .get(writer); - - BigQuery actualBigQuery = (BigQuery) baseWriterHandle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) - .get(writer); - - DatasetInfo actualDatasetInfo = (DatasetInfo) baseWriterHandle - .findVarHandle(BigQueryLoadJobCsvItemWriter.class, "datasetInfo", DatasetInfo.class) - .get(writer); - - Assertions.assertEquals(rowMapper, actualRowMapper); - Assertions.assertEquals(writeConfiguration, actualWriteChannelConfig); - Assertions.assertEquals(jobConsumer, actualJobConsumer); - Assertions.assertEquals(mockedBigQuery, actualBigQuery); - Assertions.assertEquals(datasetInfo, actualDatasetInfo); - } - -} \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/BigQueryLoadJobJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/BigQueryLoadJobJsonItemWriterTest.java index 20a99e5..eaf53b2 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/BigQueryLoadJobJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/BigQueryLoadJobJsonItemWriterTest.java @@ -44,129 +44,127 @@ class BigQueryLoadJobJsonItemWriterTest extends AbstractBigQueryTest { - private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.JSON); - - @Test - void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { - BigQueryLoadJobJsonItemWriter reader = new BigQueryLoadJobJsonItemWriter<>(); - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobJsonItemWriter.class, MethodHandles.lookup()); - JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); - - reader.setMarshaller(expected); - - JsonObjectMarshaller actual = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(reader); - - Assertions.assertEquals(expected, actual); - } - - @Test - void testConvertObjectsToByteArrays() { - TestWriter writer = new TestWriter(); - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - - // Empty - Assertions.assertTrue(writer.testConvert(List.of()).isEmpty()); - - // Not empty - writer.setMarshaller(Record::toString); - List actual = writer.testConvert(TestConstants.CHUNK.getItems()); - - List expected = TestConstants.CHUNK - .getItems() - .stream() - .map(PersonDto::toString) - .map(s -> s.concat("\n")) - .map(String::getBytes) - .toList(); - - Assertions.assertEquals(expected.size(), actual.size()); - - for (int i = 0; i < actual.size(); i++) { - Assertions.assertArrayEquals(expected.get(i), actual.get(i)); - } - } - - @Test - void testPerformFormatSpecificChecks() { - TestWriter writer = new TestWriter(); - - Table table = Mockito.mock(Table.class); - StandardTableDefinition tableDefinition = StandardTableDefinition - .newBuilder() - .setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))) - .build(); - Mockito.when(table.getDefinition()).thenReturn(tableDefinition); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); - - // marshaller - IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Marshaller must be provided", actual.getMessage()); - - // schema - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - writer.setBigQuery(bigQuery); - writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID, FormatOptions.csv())); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Schema must be provided", actual.getMessage()); - - // schema equality - WriteChannelConfiguration channelConfig = WriteChannelConfiguration - .newBuilder(TABLE_ID) - .setSchema(Schema.of(Field.of(TestConstants.NAME, StandardSQLTypeName.STRING))) - .setFormatOptions(FormatOptions.json()) - .build(); - writer.setWriteChannelConfig(channelConfig); - actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Schema must be the same", actual.getMessage()); - } - - @ParameterizedTest - @MethodSource("invalidFormats") - void testPerformFormatSpecificChecks_Format(FormatOptions formatOptions) { - Table table = Mockito.mock(Table.class); - StandardTableDefinition tableDefinition = StandardTableDefinition - .newBuilder() - .setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))) - .build(); - Mockito.when(table.getDefinition()).thenReturn(tableDefinition); - - BigQuery bigQuery = prepareMockedBigQuery(); - Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); - - TestWriter writer = new TestWriter(); - writer.setBigQuery(bigQuery); - writer.setMarshaller(new GsonJsonObjectMarshaller<>()); - - writer.setWriteChannelConfig(WriteChannelConfiguration.newBuilder(TABLE_ID).setAutodetect(true).setFormatOptions(formatOptions).build()); - IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); - Assertions.assertEquals("Only %s format is allowed".formatted(FormatOptions.json().getType()), actual.getMessage()); - } - - static Stream invalidFormats() { - return Stream.of( - FormatOptions.parquet(), - FormatOptions.avro(), - FormatOptions.bigtable(), - FormatOptions.datastoreBackup(), - FormatOptions.googleSheets(), - FormatOptions.iceberg(), - FormatOptions.orc(), - FormatOptions.csv() - ); - } - - private static final class TestWriter extends BigQueryLoadJobJsonItemWriter { - - public List testConvert(List items) { - return convertObjectsToByteArrays(items); - } - - public void testPerformFormatSpecificChecks() { - performFormatSpecificChecks(); - } - } + private static final TableId TABLE_ID = TableId.of(TestConstants.DATASET, TestConstants.JSON); + + @Test + void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { + BigQueryLoadJobJsonItemWriter reader = new BigQueryLoadJobJsonItemWriter<>(); + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryLoadJobJsonItemWriter.class, + MethodHandles.lookup()); + JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); + + reader.setMarshaller(expected); + + JsonObjectMarshaller actual = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(reader); + + Assertions.assertEquals(expected, actual); + } + + @Test + void testConvertObjectsToByteArrays() { + TestWriter writer = new TestWriter(); + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + + // Empty + Assertions.assertTrue(writer.testConvert(List.of()).isEmpty()); + + // Not empty + writer.setMarshaller(Record::toString); + List actual = writer.testConvert(TestConstants.CHUNK.getItems()); + + List expected = TestConstants.CHUNK.getItems() + .stream() + .map(PersonDto::toString) + .map(s -> s.concat("\n")) + .map(String::getBytes) + .toList(); + + Assertions.assertEquals(expected.size(), actual.size()); + + for (int i = 0; i < actual.size(); i++) { + Assertions.assertArrayEquals(expected.get(i), actual.get(i)); + } + } + + @Test + void testPerformFormatSpecificChecks() { + TestWriter writer = new TestWriter(); + + Table table = Mockito.mock(Table.class); + StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() + .setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))) + .build(); + Mockito.when(table.getDefinition()).thenReturn(tableDefinition); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); + + // marshaller + IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, + writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Marshaller must be provided", actual.getMessage()); + + // schema + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + writer.setBigQuery(bigQuery); + writer.setWriteChannelConfig(WriteChannelConfiguration.of(TABLE_ID, FormatOptions.csv())); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Schema must be provided", actual.getMessage()); + + // schema equality + WriteChannelConfiguration channelConfig = WriteChannelConfiguration.newBuilder(TABLE_ID) + .setSchema(Schema.of(Field.of(TestConstants.NAME, StandardSQLTypeName.STRING))) + .setFormatOptions(FormatOptions.json()) + .build(); + writer.setWriteChannelConfig(channelConfig); + actual = Assertions.assertThrows(IllegalArgumentException.class, writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Schema must be the same", actual.getMessage()); + } + + @ParameterizedTest + @MethodSource("invalidFormats") + void testPerformFormatSpecificChecks_Format(FormatOptions formatOptions) { + Table table = Mockito.mock(Table.class); + StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() + .setSchema(Schema.of(Field.of(TestConstants.AGE, StandardSQLTypeName.STRING))) + .build(); + Mockito.when(table.getDefinition()).thenReturn(tableDefinition); + + BigQuery bigQuery = prepareMockedBigQuery(); + Mockito.when(bigQuery.getTable(Mockito.any(TableId.class))).thenReturn(table); + + TestWriter writer = new TestWriter(); + writer.setBigQuery(bigQuery); + writer.setMarshaller(new GsonJsonObjectMarshaller<>()); + + writer.setWriteChannelConfig(WriteChannelConfiguration.newBuilder(TABLE_ID) + .setAutodetect(true) + .setFormatOptions(formatOptions) + .build()); + IllegalArgumentException actual = Assertions.assertThrows(IllegalArgumentException.class, + writer::testPerformFormatSpecificChecks); + Assertions.assertEquals("Only %s format is allowed".formatted(FormatOptions.json().getType()), + actual.getMessage()); + } + + static Stream invalidFormats() { + return Stream.of(FormatOptions.parquet(), FormatOptions.avro(), FormatOptions.bigtable(), + FormatOptions.datastoreBackup(), FormatOptions.googleSheets(), FormatOptions.iceberg(), + FormatOptions.orc(), FormatOptions.csv()); + } + + private static final class TestWriter extends BigQueryLoadJobJsonItemWriter { + + public List testConvert(List items) { + return convertObjectsToByteArrays(items); + } + + public void testPerformFormatSpecificChecks() { + performFormatSpecificChecks(); + } + + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTest.java new file mode 100644 index 0000000..b41aa56 --- /dev/null +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTest.java @@ -0,0 +1,96 @@ +/* + * Copyright 2002-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.springframework.batch.extensions.bigquery.unit.writer.loadjob.json.builder; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.WriteChannelConfiguration; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.batch.extensions.bigquery.common.PersonDto; +import org.springframework.batch.extensions.bigquery.common.TestConstants; +import org.springframework.batch.extensions.bigquery.unit.base.AbstractBigQueryTest; +import org.springframework.batch.extensions.bigquery.writer.loadjob.BigQueryLoadJobBaseItemWriter; +import org.springframework.batch.extensions.bigquery.writer.loadjob.json.BigQueryLoadJobJsonItemWriter; +import org.springframework.batch.extensions.bigquery.writer.loadjob.json.builder.BigQueryLoadJobJsonItemWriterBuilder; +import org.springframework.batch.item.json.JacksonJsonObjectMarshaller; +import org.springframework.batch.item.json.JsonObjectMarshaller; + +import java.lang.invoke.MethodHandles; +import java.util.function.Consumer; + +class BigQueryLoadJobJsonItemWriterBuilderTest extends AbstractBigQueryTest { + + @Test + void testBuild() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup jsonWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobJsonItemWriter.class, + MethodHandles.lookup()); + MethodHandles.Lookup baseWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, + MethodHandles.lookup()); + + JsonObjectMarshaller marshaller = new JacksonJsonObjectMarshaller<>(); + DatasetInfo datasetInfo = DatasetInfo.newBuilder(TestConstants.DATASET).setLocation("europe-west-2").build(); + Consumer jobConsumer = job -> { + }; + BigQuery mockedBigQuery = prepareMockedBigQuery(); + + WriteChannelConfiguration writeConfiguration = WriteChannelConfiguration + .newBuilder(TableId.of(datasetInfo.getDatasetId().getDataset(), TestConstants.JSON)) + .setFormatOptions(FormatOptions.json()) + .build(); + + BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() + .marshaller(marshaller) + .writeChannelConfig(writeConfiguration) + .jobConsumer(jobConsumer) + .bigQuery(mockedBigQuery) + .datasetInfo(datasetInfo) + .build(); + + Assertions.assertNotNull(writer); + + JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) jsonWriterHandle + .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(writer); + + WriteChannelConfiguration actualWriteChannelConfig = (WriteChannelConfiguration) jsonWriterHandle + .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) + .get(writer); + + Consumer actualJobConsumer = (Consumer) baseWriterHandle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) + .get(writer); + + BigQuery actualBigQuery = (BigQuery) baseWriterHandle + .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) + .get(writer); + + DatasetInfo actualDatasetInfo = (DatasetInfo) baseWriterHandle + .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "datasetInfo", DatasetInfo.class) + .get(writer); + + Assertions.assertEquals(marshaller, actualMarshaller); + Assertions.assertEquals(writeConfiguration, actualWriteChannelConfig); + Assertions.assertEquals(jobConsumer, actualJobConsumer); + Assertions.assertEquals(mockedBigQuery, actualBigQuery); + Assertions.assertEquals(datasetInfo, actualDatasetInfo); + } + +} \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTests.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTests.java deleted file mode 100644 index 0a2f317..0000000 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/loadjob/json/builder/BigQueryLoadJobJsonItemWriterBuilderTests.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright 2002-2025 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.springframework.batch.extensions.bigquery.unit.writer.loadjob.json.builder; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.DatasetInfo; -import com.google.cloud.bigquery.FormatOptions; -import com.google.cloud.bigquery.Job; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.WriteChannelConfiguration; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.springframework.batch.extensions.bigquery.common.PersonDto; -import org.springframework.batch.extensions.bigquery.common.TestConstants; -import org.springframework.batch.extensions.bigquery.unit.base.AbstractBigQueryTest; -import org.springframework.batch.extensions.bigquery.writer.loadjob.BigQueryLoadJobBaseItemWriter; -import org.springframework.batch.extensions.bigquery.writer.loadjob.json.BigQueryLoadJobJsonItemWriter; -import org.springframework.batch.extensions.bigquery.writer.loadjob.json.builder.BigQueryLoadJobJsonItemWriterBuilder; -import org.springframework.batch.item.json.JacksonJsonObjectMarshaller; -import org.springframework.batch.item.json.JsonObjectMarshaller; - -import java.lang.invoke.MethodHandles; -import java.util.function.Consumer; - -class BigQueryLoadJobJsonItemWriterBuilderTests extends AbstractBigQueryTest { - - @Test - void testBuild() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup jsonWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobJsonItemWriter.class, MethodHandles.lookup()); - MethodHandles.Lookup baseWriterHandle = MethodHandles.privateLookupIn(BigQueryLoadJobBaseItemWriter.class, MethodHandles.lookup()); - - JsonObjectMarshaller marshaller = new JacksonJsonObjectMarshaller<>(); - DatasetInfo datasetInfo = DatasetInfo.newBuilder(TestConstants.DATASET).setLocation("europe-west-2").build(); - Consumer jobConsumer = job -> {}; - BigQuery mockedBigQuery = prepareMockedBigQuery(); - - WriteChannelConfiguration writeConfiguration = WriteChannelConfiguration - .newBuilder(TableId.of(datasetInfo.getDatasetId().getDataset(), TestConstants.JSON)) - .setFormatOptions(FormatOptions.json()) - .build(); - - BigQueryLoadJobJsonItemWriter writer = new BigQueryLoadJobJsonItemWriterBuilder() - .marshaller(marshaller) - .writeChannelConfig(writeConfiguration) - .jobConsumer(jobConsumer) - .bigQuery(mockedBigQuery) - .datasetInfo(datasetInfo) - .build(); - - Assertions.assertNotNull(writer); - - JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) jsonWriterHandle - .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(writer); - - WriteChannelConfiguration actualWriteChannelConfig = (WriteChannelConfiguration) jsonWriterHandle - .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "writeChannelConfig", WriteChannelConfiguration.class) - .get(writer); - - Consumer actualJobConsumer = (Consumer) baseWriterHandle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "jobConsumer", Consumer.class) - .get(writer); - - BigQuery actualBigQuery = (BigQuery) baseWriterHandle - .findVarHandle(BigQueryLoadJobBaseItemWriter.class, "bigQuery", BigQuery.class) - .get(writer); - - DatasetInfo actualDatasetInfo = (DatasetInfo) baseWriterHandle - .findVarHandle(BigQueryLoadJobJsonItemWriter.class, "datasetInfo", DatasetInfo.class) - .get(writer); - - Assertions.assertEquals(marshaller, actualMarshaller); - Assertions.assertEquals(writeConfiguration, actualWriteChannelConfig); - Assertions.assertEquals(jobConsumer, actualJobConsumer); - Assertions.assertEquals(mockedBigQuery, actualBigQuery); - Assertions.assertEquals(datasetInfo, actualDatasetInfo); - } - -} \ No newline at end of file diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriterTest.java index bd28155..aaf49b0 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiCommitedJsonItemWriterTest.java @@ -29,155 +29,183 @@ class BigQueryWriteApiCommitedJsonItemWriterTest { - private static final TableName TABLE_NAME = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - @Test - void testWrite_Empty() throws Exception { - BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - writer.setBigQueryWriteClient(writeClient); - - writer.write(Chunk.of()); - - Mockito.verifyNoInteractions(writeClient); - } - - @Test - void testWrite_Exception() { - BigQueryItemWriterException ex = Assertions.assertThrows( - BigQueryItemWriterException.class, () -> new BigQueryWriteApiCommitedJsonItemWriter<>().write(TestConstants.CHUNK) - ); - Assertions.assertEquals("Error on write happened", ex.getMessage()); - } - - @Test - void testWrite() throws Exception { - WriteStreamName streamName = WriteStreamName.of(TABLE_NAME.getProject(), TABLE_NAME.getDataset(), TABLE_NAME.getTable(), "test-stream-1"); - - WriteStream writeStream = WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build(); - CreateWriteStreamRequest streamRequest = CreateWriteStreamRequest.newBuilder().setParent(TABLE_NAME.toString()).setWriteStream(writeStream).build(); - - BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); - WriteStream generatedWriteStream = WriteStream.newBuilder().setName(streamName.toString()).setTableSchema(PersonDto.getWriteApiSchema()).build(); - Mockito.when(writeClient.createWriteStream(streamRequest)).thenReturn(generatedWriteStream); - Mockito.when(writeClient.getWriteStream(Mockito.any(GetWriteStreamRequest.class))).thenReturn(generatedWriteStream); - Mockito.when(writeClient.getSettings()).thenReturn(BigQueryWriteSettings.newBuilder().setCredentialsProvider(NoCredentialsProvider.create()).build()); - Mockito.when(writeClient.finalizeWriteStream(streamName.toString())).thenReturn(FinalizeWriteStreamResponse.newBuilder().build()); - - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - writer.setTableName(TABLE_NAME); - writer.setBigQueryWriteClient(writeClient); - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - - writer.write(TestConstants.CHUNK); - - Mockito.verify(writeClient).createWriteStream(streamRequest); - Mockito.verify(writeClient).finalizeWriteStream(streamName.toString()); - } - - @Test - void testAfterPropertiesSet() { - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - - // bigQueryWriteClient - IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("BigQuery write client must be provided", ex.getMessage()); - - // tableName - writer.setBigQueryWriteClient(Mockito.mock(BigQueryWriteClient.class)); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Table name must be provided", ex.getMessage()); - - // marshaller - writer.setTableName(TABLE_NAME); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Marshaller must be provided", ex.getMessage()); - - // executor - writer.setApiFutureCallback(new TestCallback()); - writer.setMarshaller(new GsonJsonObjectMarshaller<>()); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Executor must be provided", ex.getMessage()); - - // All good - writer.setExecutor(Executors.newSingleThreadExecutor()); - Assertions.assertDoesNotThrow(writer::afterPropertiesSet); - } - - @Test - void testSetBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); - - writer.setBigQueryWriteClient(expected); - - BigQueryWriteClient actual = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetTableName() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - - writer.setTableName(TABLE_NAME); - - TableName actual = (TableName) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "tableName", TableName.class) - .get(writer); - Assertions.assertEquals(TABLE_NAME, actual); - } - - @Test - void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); - - writer.setMarshaller(expected); - - JsonObjectMarshaller actual = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - ApiFutureCallback expected = new TestCallback(); - - writer.setApiFutureCallback(expected); - - ApiFutureCallback actual = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback .class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetExecutor() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); - Executor expected = Executors.newSingleThreadExecutor(); - - writer.setExecutor(expected); - - Executor actual = (Executor) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "executor", Executor.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - private static final class TestCallback implements ApiFutureCallback { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) {} - } + private static final TableName TABLE_NAME = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, + TestConstants.JSON); + + @Test + void testWrite_Empty() throws Exception { + BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + writer.setBigQueryWriteClient(writeClient); + + writer.write(Chunk.of()); + + Mockito.verifyNoInteractions(writeClient); + } + + @Test + void testWrite_Exception() { + BigQueryItemWriterException ex = Assertions.assertThrows(BigQueryItemWriterException.class, + () -> new BigQueryWriteApiCommitedJsonItemWriter<>().write(TestConstants.CHUNK)); + Assertions.assertEquals("Error on write happened", ex.getMessage()); + } + + @Test + void testWrite() throws Exception { + WriteStreamName streamName = WriteStreamName.of(TABLE_NAME.getProject(), TABLE_NAME.getDataset(), + TABLE_NAME.getTable(), "test-stream-1"); + + WriteStream writeStream = WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build(); + CreateWriteStreamRequest streamRequest = CreateWriteStreamRequest.newBuilder() + .setParent(TABLE_NAME.toString()) + .setWriteStream(writeStream) + .build(); + + BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); + WriteStream generatedWriteStream = WriteStream.newBuilder() + .setName(streamName.toString()) + .setTableSchema(PersonDto.getWriteApiSchema()) + .build(); + Mockito.when(writeClient.createWriteStream(streamRequest)).thenReturn(generatedWriteStream); + Mockito.when(writeClient.getWriteStream(Mockito.any(GetWriteStreamRequest.class))) + .thenReturn(generatedWriteStream); + Mockito.when(writeClient.getSettings()) + .thenReturn( + BigQueryWriteSettings.newBuilder().setCredentialsProvider(NoCredentialsProvider.create()).build()); + Mockito.when(writeClient.finalizeWriteStream(streamName.toString())) + .thenReturn(FinalizeWriteStreamResponse.newBuilder().build()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + writer.setTableName(TABLE_NAME); + writer.setBigQueryWriteClient(writeClient); + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + + writer.write(TestConstants.CHUNK); + + Mockito.verify(writeClient).createWriteStream(streamRequest); + Mockito.verify(writeClient).finalizeWriteStream(streamName.toString()); + } + + @Test + void testAfterPropertiesSet() { + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + + // bigQueryWriteClient + IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, + writer::afterPropertiesSet); + Assertions.assertEquals("BigQuery write client must be provided", ex.getMessage()); + + // tableName + writer.setBigQueryWriteClient(Mockito.mock(BigQueryWriteClient.class)); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Table name must be provided", ex.getMessage()); + + // marshaller + writer.setTableName(TABLE_NAME); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Marshaller must be provided", ex.getMessage()); + + // executor + writer.setApiFutureCallback(new TestCallback()); + writer.setMarshaller(new GsonJsonObjectMarshaller<>()); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Executor must be provided", ex.getMessage()); + + // All good + writer.setExecutor(Executors.newSingleThreadExecutor()); + Assertions.assertDoesNotThrow(writer::afterPropertiesSet); + } + + @Test + void testSetBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); + + writer.setBigQueryWriteClient(expected); + + BigQueryWriteClient actual = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetTableName() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + + writer.setTableName(TABLE_NAME); + + TableName actual = (TableName) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "tableName", TableName.class) + .get(writer); + Assertions.assertEquals(TABLE_NAME, actual); + } + + @Test + void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); + + writer.setMarshaller(expected); + + JsonObjectMarshaller actual = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + ApiFutureCallback expected = new TestCallback(); + + writer.setApiFutureCallback(expected); + + ApiFutureCallback actual = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetExecutor() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriter<>(); + Executor expected = Executors.newSingleThreadExecutor(); + + writer.setExecutor(expected); + + Executor actual = (Executor) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "executor", Executor.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + private static final class TestCallback implements ApiFutureCallback { + + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriterTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriterTest.java index a7eabec..b8baed3 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriterTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/BigQueryWriteApiPendingJsonItemWriterTest.java @@ -21,162 +21,189 @@ class BigQueryWriteApiPendingJsonItemWriterTest { - private static final TableName TABLE_NAME = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - @Test - void testWrite_Empty() throws Exception { - BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - writer.setBigQueryWriteClient(writeClient); - - writer.write(Chunk.of()); - - Mockito.verifyNoInteractions(writeClient); - } - - @Test - void testWrite_Exception() { - BigQueryItemWriterException ex = Assertions.assertThrows( - BigQueryItemWriterException.class, () -> new BigQueryWriteApiPendingJsonItemWriter<>().write(TestConstants.CHUNK) - ); - Assertions.assertEquals("Error on write happened", ex.getMessage()); - } - - @Test - void testWrite() throws Exception { - WriteStreamName streamName = WriteStreamName.of(TABLE_NAME.getProject(), TABLE_NAME.getDataset(), TABLE_NAME.getTable(), "test-stream-1"); - - WriteStream writeStream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); - CreateWriteStreamRequest streamRequest = CreateWriteStreamRequest.newBuilder().setParent(TABLE_NAME.toString()).setWriteStream(writeStream).build(); - - BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); - WriteStream generatedWriteStream = WriteStream.newBuilder().setName(streamName.toString()).setTableSchema(PersonDto.getWriteApiSchema()).build(); - Mockito.when(writeClient.createWriteStream(streamRequest)).thenReturn(generatedWriteStream); - Mockito.when(writeClient.getWriteStream(Mockito.any(GetWriteStreamRequest.class))).thenReturn(generatedWriteStream); - Mockito.when(writeClient.getSettings()).thenReturn(BigQueryWriteSettings.newBuilder().setCredentialsProvider(NoCredentialsProvider.create()).build()); - Mockito.when(writeClient.finalizeWriteStream(streamName.toString())).thenReturn(FinalizeWriteStreamResponse.newBuilder().build()); - - BatchCommitWriteStreamsResponse batchResponse = Mockito.mock(BatchCommitWriteStreamsResponse.class); - Mockito.when(batchResponse.hasCommitTime()).thenReturn(true); - - Mockito - .when(writeClient.batchCommitWriteStreams(Mockito.any(BatchCommitWriteStreamsRequest.class))) - .thenReturn(batchResponse); - - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - writer.setTableName(TABLE_NAME); - writer.setBigQueryWriteClient(writeClient); - writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); - - writer.write(TestConstants.CHUNK); - - Mockito.verify(writeClient).createWriteStream(streamRequest); - Mockito.verify(writeClient).finalizeWriteStream(streamName.toString()); - } - - @Test - void testAfterPropertiesSet() { - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - - // bigQueryWriteClient - IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("BigQuery write client must be provided", ex.getMessage()); - - // tableName - writer.setBigQueryWriteClient(Mockito.mock(BigQueryWriteClient.class)); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Table name must be provided", ex.getMessage()); - - // marshaller - writer.setTableName(TABLE_NAME); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Marshaller must be provided", ex.getMessage()); - - // executor - writer.setApiFutureCallback(new TestCallback()); - writer.setMarshaller(new GsonJsonObjectMarshaller<>()); - ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); - Assertions.assertEquals("Executor must be provided", ex.getMessage()); - - // All good - writer.setExecutor(Executors.newSingleThreadExecutor()); - Assertions.assertDoesNotThrow(writer::afterPropertiesSet); - } - - @Test - void testSetBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); - - writer.setBigQueryWriteClient(expected); - - BigQueryWriteClient actual = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetTableName() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - - writer.setTableName(TABLE_NAME); - - TableName actual = (TableName) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "tableName", TableName.class) - .get(writer); - Assertions.assertEquals(TABLE_NAME, actual); - } - - @Test - void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); - - writer.setMarshaller(expected); - - JsonObjectMarshaller actual = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - ApiFutureCallback expected = new TestCallback(); - - writer.setApiFutureCallback(expected); - - ApiFutureCallback actual = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback .class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - @Test - void testSetExecutor() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); - Executor expected = Executors.newSingleThreadExecutor(); - - writer.setExecutor(expected); - - Executor actual = (Executor) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "executor", Executor.class) - .get(writer); - Assertions.assertEquals(expected, actual); - } - - private static final class TestCallback implements ApiFutureCallback { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) {} - } + private static final TableName TABLE_NAME = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, + TestConstants.JSON); + + @Test + void testWrite_Empty() throws Exception { + BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + writer.setBigQueryWriteClient(writeClient); + + writer.write(Chunk.of()); + + Mockito.verifyNoInteractions(writeClient); + } + + @Test + void testWrite_Exception() { + BigQueryItemWriterException ex = Assertions.assertThrows(BigQueryItemWriterException.class, + () -> new BigQueryWriteApiPendingJsonItemWriter<>().write(TestConstants.CHUNK)); + Assertions.assertEquals("Error on write happened", ex.getMessage()); + } + + @Test + void testWrite() throws Exception { + WriteStreamName streamName = WriteStreamName.of(TABLE_NAME.getProject(), TABLE_NAME.getDataset(), + TABLE_NAME.getTable(), "test-stream-1"); + + WriteStream writeStream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); + CreateWriteStreamRequest streamRequest = CreateWriteStreamRequest.newBuilder() + .setParent(TABLE_NAME.toString()) + .setWriteStream(writeStream) + .build(); + + BigQueryWriteClient writeClient = Mockito.mock(BigQueryWriteClient.class); + WriteStream generatedWriteStream = WriteStream.newBuilder() + .setName(streamName.toString()) + .setTableSchema(PersonDto.getWriteApiSchema()) + .build(); + Mockito.when(writeClient.createWriteStream(streamRequest)).thenReturn(generatedWriteStream); + Mockito.when(writeClient.getWriteStream(Mockito.any(GetWriteStreamRequest.class))) + .thenReturn(generatedWriteStream); + Mockito.when(writeClient.getSettings()) + .thenReturn( + BigQueryWriteSettings.newBuilder().setCredentialsProvider(NoCredentialsProvider.create()).build()); + Mockito.when(writeClient.finalizeWriteStream(streamName.toString())) + .thenReturn(FinalizeWriteStreamResponse.newBuilder().build()); + + BatchCommitWriteStreamsResponse batchResponse = Mockito.mock(BatchCommitWriteStreamsResponse.class); + Mockito.when(batchResponse.hasCommitTime()).thenReturn(true); + + Mockito.when(writeClient.batchCommitWriteStreams(Mockito.any(BatchCommitWriteStreamsRequest.class))) + .thenReturn(batchResponse); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + writer.setTableName(TABLE_NAME); + writer.setBigQueryWriteClient(writeClient); + writer.setMarshaller(new JacksonJsonObjectMarshaller<>()); + + writer.write(TestConstants.CHUNK); + + Mockito.verify(writeClient).createWriteStream(streamRequest); + Mockito.verify(writeClient).finalizeWriteStream(streamName.toString()); + } + + @Test + void testAfterPropertiesSet() { + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + + // bigQueryWriteClient + IllegalArgumentException ex = Assertions.assertThrows(IllegalArgumentException.class, + writer::afterPropertiesSet); + Assertions.assertEquals("BigQuery write client must be provided", ex.getMessage()); + + // tableName + writer.setBigQueryWriteClient(Mockito.mock(BigQueryWriteClient.class)); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Table name must be provided", ex.getMessage()); + + // marshaller + writer.setTableName(TABLE_NAME); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Marshaller must be provided", ex.getMessage()); + + // executor + writer.setApiFutureCallback(new TestCallback()); + writer.setMarshaller(new GsonJsonObjectMarshaller<>()); + ex = Assertions.assertThrows(IllegalArgumentException.class, writer::afterPropertiesSet); + Assertions.assertEquals("Executor must be provided", ex.getMessage()); + + // All good + writer.setExecutor(Executors.newSingleThreadExecutor()); + Assertions.assertDoesNotThrow(writer::afterPropertiesSet); + } + + @Test + void testSetBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); + + writer.setBigQueryWriteClient(expected); + + BigQueryWriteClient actual = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetTableName() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + + writer.setTableName(TABLE_NAME); + + TableName actual = (TableName) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "tableName", TableName.class) + .get(writer); + Assertions.assertEquals(TABLE_NAME, actual); + } + + @Test + void testSetMarshaller() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + JsonObjectMarshaller expected = new JacksonJsonObjectMarshaller<>(); + + writer.setMarshaller(expected); + + JsonObjectMarshaller actual = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + ApiFutureCallback expected = new TestCallback(); + + writer.setApiFutureCallback(expected); + + ApiFutureCallback actual = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + @Test + void testSetExecutor() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriter<>(); + Executor expected = Executors.newSingleThreadExecutor(); + + writer.setExecutor(expected); + + Executor actual = (Executor) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "executor", Executor.class) + .get(writer); + Assertions.assertEquals(expected, actual); + } + + private static final class TestCallback implements ApiFutureCallback { + + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilderTest.java index 82b6f9a..0d37c7d 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiCommitedJsonItemWriterBuilderTest.java @@ -38,135 +38,149 @@ class BigQueryWriteApiCommitedJsonItemWriterBuilderTest { - @Test - void testBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); - BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); - - builder.bigQueryWriteClient(expected); - - BigQueryWriteClient actual = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testTableName() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); - TableName expected = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - builder.tableName(expected); - - TableName actual = (TableName) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "tableName", TableName.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testMarshaller() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); - JsonObjectMarshaller expected = new GsonJsonObjectMarshaller<>(); - - builder.marshaller(expected); - - JsonObjectMarshaller actual = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "marshaller", JsonObjectMarshaller.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); - - ApiFutureCallback expected = new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) {} - }; - - builder.apiFutureCallback(expected); - - ApiFutureCallback actual = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "apiFutureCallback", ApiFutureCallback.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testExecutor() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); - Executor expected = Executors.newSingleThreadExecutor(); - - builder.executor(expected); - - Executor actual = (Executor) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "executor", Executor.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testBuild() throws IOException, IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, MethodHandles.lookup()); - JsonObjectMarshaller expectedMarshaller = new JacksonJsonObjectMarshaller<>(); - BigQueryWriteClient expectedWriteClient = Mockito.mock(BigQueryWriteClient.class); - Executor expectedExecutor = Executors.newCachedThreadPool(); - TableName expectedTableName = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - ApiFutureCallback expectedCallback = new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) { - } - - @Override - public void onSuccess(AppendRowsResponse result) { - } - }; - - BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriterBuilder() - .marshaller(expectedMarshaller) - .bigQueryWriteClient(expectedWriteClient) - .apiFutureCallback(expectedCallback) - .executor(expectedExecutor) - .tableName(expectedTableName) - .build(); - - Assertions.assertNotNull(writer); - - JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(writer); - - BigQueryWriteClient actualWriteClient = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(writer); - - ApiFutureCallback actualCallback = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) - .get(writer); - - Executor actualExecutor = (Executor) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "executor", Executor.class) - .get(writer); - - TableName actualTableName = (TableName) handle - .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "tableName", TableName.class) - .get(writer); - - Assertions.assertEquals(expectedMarshaller, actualMarshaller); - Assertions.assertEquals(expectedWriteClient, actualWriteClient); - Assertions.assertEquals(expectedCallback, actualCallback); - Assertions.assertEquals(expectedExecutor, actualExecutor); - Assertions.assertEquals(expectedTableName, actualTableName); - } + @Test + void testBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, + MethodHandles.lookup()); + BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); + BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); + + builder.bigQueryWriteClient(expected); + + BigQueryWriteClient actual = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testTableName() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, + MethodHandles.lookup()); + BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); + TableName expected = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); + + builder.tableName(expected); + + TableName actual = (TableName) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "tableName", TableName.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testMarshaller() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, + MethodHandles.lookup()); + BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); + JsonObjectMarshaller expected = new GsonJsonObjectMarshaller<>(); + + builder.marshaller(expected); + + JsonObjectMarshaller actual = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "marshaller", + JsonObjectMarshaller.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, + MethodHandles.lookup()); + BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); + + ApiFutureCallback expected = new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + }; + + builder.apiFutureCallback(expected); + + ApiFutureCallback actual = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "apiFutureCallback", + ApiFutureCallback.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testExecutor() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, + MethodHandles.lookup()); + BigQueryWriteApiCommitedJsonItemWriterBuilder builder = new BigQueryWriteApiCommitedJsonItemWriterBuilder<>(); + Executor expected = Executors.newSingleThreadExecutor(); + + builder.executor(expected); + + Executor actual = (Executor) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriterBuilder.class, "executor", Executor.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testBuild() throws IOException, IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiCommitedJsonItemWriter.class, + MethodHandles.lookup()); + + JsonObjectMarshaller expectedMarshaller = new JacksonJsonObjectMarshaller<>(); + BigQueryWriteClient expectedWriteClient = Mockito.mock(BigQueryWriteClient.class); + Executor expectedExecutor = Executors.newCachedThreadPool(); + TableName expectedTableName = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); + + ApiFutureCallback expectedCallback = new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + }; + + BigQueryWriteApiCommitedJsonItemWriter writer = new BigQueryWriteApiCommitedJsonItemWriterBuilder() + .marshaller(expectedMarshaller) + .bigQueryWriteClient(expectedWriteClient) + .apiFutureCallback(expectedCallback) + .executor(expectedExecutor) + .tableName(expectedTableName) + .build(); + + Assertions.assertNotNull(writer); + + JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(writer); + + BigQueryWriteClient actualWriteClient = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(writer); + + ApiFutureCallback actualCallback = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) + .get(writer); + + Executor actualExecutor = (Executor) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "executor", Executor.class) + .get(writer); + + TableName actualTableName = (TableName) handle + .findVarHandle(BigQueryWriteApiCommitedJsonItemWriter.class, "tableName", TableName.class) + .get(writer); + + Assertions.assertEquals(expectedMarshaller, actualMarshaller); + Assertions.assertEquals(expectedWriteClient, actualWriteClient); + Assertions.assertEquals(expectedCallback, actualCallback); + Assertions.assertEquals(expectedExecutor, actualExecutor); + Assertions.assertEquals(expectedTableName, actualTableName); + } + } diff --git a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilderTest.java b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilderTest.java index 0e67871..ca268d5 100644 --- a/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilderTest.java +++ b/spring-batch-bigquery/src/test/java/org/springframework/batch/extensions/bigquery/unit/writer/writeapi/json/builder/BigQueryWriteApiPendingJsonItemWriterBuilderTest.java @@ -38,135 +38,153 @@ class BigQueryWriteApiPendingJsonItemWriterBuilderTest { - @Test - void testBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); - BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); - - builder.bigQueryWriteClient(expected); - - BigQueryWriteClient actual = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testTableName() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); - TableName expected = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - builder.tableName(expected); - - TableName actual = (TableName) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "tableName", TableName.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testMarshaller() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); - JsonObjectMarshaller expected = new GsonJsonObjectMarshaller<>(); - - builder.marshaller(expected); - - JsonObjectMarshaller actual = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "marshaller", JsonObjectMarshaller.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); - - ApiFutureCallback expected = new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) {} - - @Override - public void onSuccess(AppendRowsResponse result) {} - }; - - builder.apiFutureCallback(expected); - - ApiFutureCallback actual = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "apiFutureCallback", ApiFutureCallback.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testExecutor() throws IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, MethodHandles.lookup()); - BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); - Executor expected = Executors.newSingleThreadExecutor(); - - builder.executor(expected); - - Executor actual = (Executor) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "executor", Executor.class) - .get(builder); - Assertions.assertEquals(expected, actual); - } - - @Test - void testBuild() throws IOException, IllegalAccessException, NoSuchFieldException { - MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, MethodHandles.lookup()); - JsonObjectMarshaller expectedMarshaller = new JacksonJsonObjectMarshaller<>(); - BigQueryWriteClient expectedWriteClient = Mockito.mock(BigQueryWriteClient.class); - Executor expectedExecutor = Executors.newCachedThreadPool(); - TableName expectedTableName = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); - - ApiFutureCallback expectedCallback = new ApiFutureCallback<>() { - @Override - public void onFailure(Throwable t) { - } - - @Override - public void onSuccess(AppendRowsResponse result) { - } - }; - - BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriterBuilder() - .marshaller(expectedMarshaller) - .bigQueryWriteClient(expectedWriteClient) - .apiFutureCallback(expectedCallback) - .executor(expectedExecutor) - .tableName(expectedTableName) - .build(); - - Assertions.assertNotNull(writer); - - JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) - .get(writer); - - BigQueryWriteClient actualWriteClient = (BigQueryWriteClient) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "bigQueryWriteClient", BigQueryWriteClient.class) - .get(writer); - - ApiFutureCallback actualCallback = (ApiFutureCallback) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) - .get(writer); - - Executor actualExecutor = (Executor) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "executor", Executor.class) - .get(writer); - - TableName actualTableName = (TableName) handle - .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "tableName", TableName.class) - .get(writer); - - Assertions.assertEquals(expectedMarshaller, actualMarshaller); - Assertions.assertEquals(expectedWriteClient, actualWriteClient); - Assertions.assertEquals(expectedCallback, actualCallback); - Assertions.assertEquals(expectedExecutor, actualExecutor); - Assertions.assertEquals(expectedTableName, actualTableName); - } + @Test + void testBigQueryWriteClient() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); + BigQueryWriteClient expected = Mockito.mock(BigQueryWriteClient.class); + + builder.bigQueryWriteClient(expected); + + BigQueryWriteClient actual = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testTableName() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); + TableName expected = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); + + builder.tableName(expected); + + TableName actual = (TableName) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "tableName", TableName.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testMarshaller() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); + JsonObjectMarshaller expected = new GsonJsonObjectMarshaller<>(); + + builder.marshaller(expected); + + JsonObjectMarshaller actual = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "marshaller", JsonObjectMarshaller.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testApiFutureCallback() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); + + ApiFutureCallback expected = new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + }; + + builder.apiFutureCallback(expected); + + ApiFutureCallback actual = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "apiFutureCallback", + ApiFutureCallback.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testExecutor() throws IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriterBuilder.class, + MethodHandles.lookup()); + + BigQueryWriteApiPendingJsonItemWriterBuilder builder = new BigQueryWriteApiPendingJsonItemWriterBuilder<>(); + Executor expected = Executors.newSingleThreadExecutor(); + + builder.executor(expected); + + Executor actual = (Executor) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriterBuilder.class, "executor", Executor.class) + .get(builder); + Assertions.assertEquals(expected, actual); + } + + @Test + void testBuild() throws IOException, IllegalAccessException, NoSuchFieldException { + MethodHandles.Lookup handle = MethodHandles.privateLookupIn(BigQueryWriteApiPendingJsonItemWriter.class, + MethodHandles.lookup()); + + JsonObjectMarshaller expectedMarshaller = new JacksonJsonObjectMarshaller<>(); + BigQueryWriteClient expectedWriteClient = Mockito.mock(BigQueryWriteClient.class); + Executor expectedExecutor = Executors.newCachedThreadPool(); + TableName expectedTableName = TableName.of(TestConstants.PROJECT, TestConstants.DATASET, TestConstants.JSON); + + ApiFutureCallback expectedCallback = new ApiFutureCallback<>() { + @Override + public void onFailure(Throwable t) { + } + + @Override + public void onSuccess(AppendRowsResponse result) { + } + }; + + BigQueryWriteApiPendingJsonItemWriter writer = new BigQueryWriteApiPendingJsonItemWriterBuilder() + .marshaller(expectedMarshaller) + .bigQueryWriteClient(expectedWriteClient) + .apiFutureCallback(expectedCallback) + .executor(expectedExecutor) + .tableName(expectedTableName) + .build(); + + Assertions.assertNotNull(writer); + + JsonObjectMarshaller actualMarshaller = (JsonObjectMarshaller) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "marshaller", JsonObjectMarshaller.class) + .get(writer); + + BigQueryWriteClient actualWriteClient = (BigQueryWriteClient) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "bigQueryWriteClient", + BigQueryWriteClient.class) + .get(writer); + + ApiFutureCallback actualCallback = (ApiFutureCallback) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "apiFutureCallback", ApiFutureCallback.class) + .get(writer); + + Executor actualExecutor = (Executor) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "executor", Executor.class) + .get(writer); + + TableName actualTableName = (TableName) handle + .findVarHandle(BigQueryWriteApiPendingJsonItemWriter.class, "tableName", TableName.class) + .get(writer); + + Assertions.assertEquals(expectedMarshaller, actualMarshaller); + Assertions.assertEquals(expectedWriteClient, actualWriteClient); + Assertions.assertEquals(expectedCallback, actualCallback); + Assertions.assertEquals(expectedExecutor, actualExecutor); + Assertions.assertEquals(expectedTableName, actualTableName); + } + }