From e2cc6ac4f65186f7a8885df0f1f9a3366000bf68 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 2 Jan 2025 14:39:45 +0530 Subject: [PATCH 1/3] Added task files and dtos --- .../com/scalar/db/common/error/CoreError.java | 14 +++ .../core/dataimport/ImportOptions.java | 38 ++++++ .../core/dataimport/log/LogMode.java | 6 + .../dataimport/task/ImportTaskConstants.java | 17 +++ .../task/mapping/ImportDataMapping.java | 28 +++++ .../ImportSourceRecordValidationResult.java | 48 ++++++++ .../ImportSourceRecordValidator.java | 112 ++++++++++++++++++ .../task/mapping/ImportDataMappingTest.java | 49 ++++++++ .../ImportSourceRecordValidatorTest.java | 86 ++++++++++++++ 9 files changed, 398 insertions(+) create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskConstants.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java create mode 100644 data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java create mode 100644 data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java create mode 100644 data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java diff --git a/core/src/main/java/com/scalar/db/common/error/CoreError.java b/core/src/main/java/com/scalar/db/common/error/CoreError.java index 7786cedf5..c05eed9cd 100644 --- a/core/src/main/java/com/scalar/db/common/error/CoreError.java +++ b/core/src/main/java/com/scalar/db/common/error/CoreError.java @@ -735,6 +735,20 @@ public enum CoreError implements ScalarDbError { "Multiple data mappings found for column '%s' in table '%s'", "", ""), + DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN( + Category.USER_ERROR, + "0166", + "Missing required field or column mapping for clustering key %s", + "", + ""), + DATA_LOADER_MISSING_PARTITION_KEY_COLUMN( + Category.USER_ERROR, + "0167", + "Missing required field or column mapping for partition key %s", + "", + ""), + DATA_LOADER_MISSING_COLUMN( + Category.USER_ERROR, "0168", "Missing field or column mapping for %s", "", ""), // // Errors for the concurrency error category // diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java new file mode 100644 index 000000000..9cb6225d3 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/ImportOptions.java @@ -0,0 +1,38 @@ +package com.scalar.db.dataloader.core.dataimport; + +import com.scalar.db.dataloader.core.FileFormat; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFile; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileValidationLevel; +import com.scalar.db.dataloader.core.dataimport.log.LogMode; +import lombok.Builder; +import lombok.Data; + +/** Import options to import data into one or more ScalarDB tables */ +@Builder +@Data +public class ImportOptions { + + @Builder.Default private final ImportMode importMode = ImportMode.UPSERT; + @Builder.Default private final boolean requireAllColumns = false; + @Builder.Default private final FileFormat fileFormat = FileFormat.JSON; + @Builder.Default private final boolean prettyPrint = false; + @Builder.Default private final boolean ignoreNullValues = false; + @Builder.Default private final LogMode logMode = LogMode.SPLIT_BY_DATA_CHUNK; + + @Builder.Default + private final ControlFileValidationLevel controlFileValidationLevel = + ControlFileValidationLevel.MAPPED; + + @Builder.Default private final char delimiter = ','; + + @Builder.Default private final boolean logSuccessRecords = false; + @Builder.Default private final boolean logRawRecord = false; + + private final int dataChunkSize; + private final int transactionBatchSize; + private final ControlFile controlFile; + private final String namespace; + private final String tableName; + private final int maxThreads; + private final String customHeaderRow; +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java new file mode 100644 index 000000000..48eac32e6 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/log/LogMode.java @@ -0,0 +1,6 @@ +package com.scalar.db.dataloader.core.dataimport.log; + +public enum LogMode { + SINGLE_FILE, + SPLIT_BY_DATA_CHUNK +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskConstants.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskConstants.java new file mode 100644 index 000000000..eb30211a0 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/ImportTaskConstants.java @@ -0,0 +1,17 @@ +package com.scalar.db.dataloader.core.dataimport.task; + +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class ImportTaskConstants { + public static final String ERROR_COULD_NOT_FIND_PARTITION_KEY = + "could not find the partition key"; + public static final String ERROR_UPSERT_INSERT_MISSING_COLUMNS = + "the source record needs to contain all fields if the UPSERT turns into an INSERT"; + public static final String ERROR_DATA_ALREADY_EXISTS = "record already exists"; + public static final String ERROR_DATA_NOT_FOUND = "record was not found"; + public static final String ERROR_COULD_NOT_FIND_CLUSTERING_KEY = + "could not find the clustering key"; + public static final String ERROR_TABLE_METADATA_MISSING = "No table metadata found"; +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java new file mode 100644 index 000000000..7f7524d26 --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMapping.java @@ -0,0 +1,28 @@ +package com.scalar.db.dataloader.core.dataimport.task.mapping; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; + +public class ImportDataMapping { + + /** + * * Update the source data replace the source column name with the target column name according + * to control file table data + * + * @param source source data + * @param controlFileTable control file table to map source data + */ + public static void apply(ObjectNode source, ControlFileTable controlFileTable) { + // Copy the source field data to the target column if missing + for (ControlFileTableFieldMapping mapping : controlFileTable.getMappings()) { + String sourceField = mapping.getSourceField(); + String targetColumn = mapping.getTargetColumn(); + + if (source.has(sourceField) && !source.has(targetColumn)) { + source.set(targetColumn, source.get(sourceField)); + source.remove(sourceField); + } + } + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java new file mode 100644 index 000000000..30b878b9e --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidationResult.java @@ -0,0 +1,48 @@ +package com.scalar.db.dataloader.core.dataimport.task.validation; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import javax.annotation.concurrent.Immutable; + +/** The validation result for a data source record */ +@Immutable +public final class ImportSourceRecordValidationResult { + + private final List errorMessages; + private final Set columnsWithErrors; + + /** Constructor */ + public ImportSourceRecordValidationResult() { + this.errorMessages = new ArrayList<>(); + this.columnsWithErrors = new HashSet<>(); + } + + /** + * Add a validation error message for a column. Also marking the column as containing an error. + * + * @param columnName column name + * @param errorMessage error message + */ + public void addErrorMessage(String columnName, String errorMessage) { + this.columnsWithErrors.add(columnName); + this.errorMessages.add(errorMessage); + } + + /** @return Immutable list of validation error messages */ + public List getErrorMessages() { + return Collections.unmodifiableList(this.errorMessages); + } + + /** @return Immutable set of columns that had errors */ + public Set getColumnsWithErrors() { + return Collections.unmodifiableSet(this.columnsWithErrors); + } + + /** @return Validation is valid or not */ + public boolean isValid() { + return this.errorMessages.isEmpty(); + } +} diff --git a/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java new file mode 100644 index 000000000..38938ea8b --- /dev/null +++ b/data-loader/core/src/main/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidator.java @@ -0,0 +1,112 @@ +package com.scalar.db.dataloader.core.dataimport.task.validation; + +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.common.error.CoreError; +import com.scalar.db.dataloader.core.DatabaseKeyType; +import com.scalar.db.dataloader.core.util.TableMetadataUtil; +import java.util.Set; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; + +@NoArgsConstructor(access = AccessLevel.PRIVATE) +public class ImportSourceRecordValidator { + + /** + * Create list for validation error messages. Validate everything and not return when one single + * error is found. Avoiding trial and error imports where every time a new error appears + * + * @param partitionKeyNames List of partition keys in table + * @param clusteringKeyNames List of clustering keys in table + * @param columnNames List of all column names in table + * @param sourceRecord source data + * @param allColumnsRequired If true treat missing columns as an error + * @return Source record validation result + */ + public static ImportSourceRecordValidationResult validateSourceRecord( + Set partitionKeyNames, + Set clusteringKeyNames, + Set columnNames, + JsonNode sourceRecord, + boolean allColumnsRequired) { + ImportSourceRecordValidationResult validationResult = new ImportSourceRecordValidationResult(); + + // check if partition keys are found + checkMissingKeys(DatabaseKeyType.PARTITION, partitionKeyNames, sourceRecord, validationResult); + + // check if clustering keys are found + checkMissingKeys( + DatabaseKeyType.CLUSTERING, clusteringKeyNames, sourceRecord, validationResult); + + // Check if the record is missing any columns + if (allColumnsRequired) { + checkMissingColumns( + sourceRecord, columnNames, validationResult, validationResult.getColumnsWithErrors()); + } + + return validationResult; + } + + /** + * Check if the required keys are found in the data file. + * + * @param keyType Type of key to validate + * @param keyColumnNames List of required column names + * @param sourceRecord source data + * @param validationResult Source record validation result + */ + public static void checkMissingKeys( + DatabaseKeyType keyType, + Set keyColumnNames, + JsonNode sourceRecord, + ImportSourceRecordValidationResult validationResult) { + for (String columnName : keyColumnNames) { + if (!sourceRecord.has(columnName)) { + String errorMessageFormat = + keyType == DatabaseKeyType.PARTITION + ? CoreError.DATA_LOADER_MISSING_PARTITION_KEY_COLUMN.buildMessage(columnName) + : CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN.buildMessage(columnName); + validationResult.addErrorMessage(columnName, errorMessageFormat); + } + } + } + + /** + * Make sure the json object is not missing any columns. Error added to validation errors lists + * + * @param sourceRecord Source json object + * @param columnNames List of column names for a table + * @param validationResult Source record validation result + * @param ignoreColumns Columns that can be ignored in the check + */ + public static void checkMissingColumns( + JsonNode sourceRecord, + Set columnNames, + ImportSourceRecordValidationResult validationResult, + Set ignoreColumns) { + Set metadataColumns = TableMetadataUtil.getMetadataColumns(); + for (String columnName : columnNames) { + // If the field is not a metadata column and is missing and should not be ignored + if ((ignoreColumns == null || !ignoreColumns.contains(columnName)) + && !TableMetadataUtil.isMetadataColumn(columnName, metadataColumns, columnNames) + && !sourceRecord.has(columnName)) { + validationResult.addErrorMessage( + columnName, CoreError.DATA_LOADER_MISSING_COLUMN.buildMessage(columnName)); + } + } + } + + /** + * Make sure the json object is not missing any columns. Error added to validation errors lists + * + * @param sourceRecord Source json object + * @param columnNames List of column names for a table + * @param validationResult Source record validation result + */ + public static void checkMissingColumns( + JsonNode sourceRecord, + Set columnNames, + ImportSourceRecordValidationResult validationResult) { + ImportSourceRecordValidator.checkMissingColumns( + sourceRecord, columnNames, validationResult, null); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java new file mode 100644 index 000000000..2589dacf8 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java @@ -0,0 +1,49 @@ +package com.scalar.db.dataloader.core.dataimport.task.mapping; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.scalar.db.api.TableMetadata; +import com.scalar.db.dataloader.core.UnitTestUtils; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; +import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; +import java.util.ArrayList; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class ImportDataMappingTest { + + TableMetadata mockMetadata; + ControlFileTable controlFilTable; + + @BeforeEach + void setup() { + mockMetadata = UnitTestUtils.createTestTableMetadata(); + controlFilTable = new ControlFileTable("namespace", "table"); + ControlFileTableFieldMapping m1 = new ControlFileTableFieldMapping("source_id", "target_id"); + ControlFileTableFieldMapping m2 = + new ControlFileTableFieldMapping("source_name", "target_name"); + ControlFileTableFieldMapping m3 = + new ControlFileTableFieldMapping("source_email", "target_email"); + ArrayList mappingArrayList = new ArrayList<>(); + mappingArrayList.add(m1); + mappingArrayList.add(m2); + mappingArrayList.add(m3); + controlFilTable.getMappings().addAll(mappingArrayList); + } + + @Test + void apply_withValidData_shouldUpdateSourceData() throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + ObjectNode source = objectMapper.createObjectNode(); + source.put("source_id", "111"); + source.put("source_name", "abc"); + source.put("source_email", "sam@dsd.com"); + ImportDataMapping.apply(source, controlFilTable); + // Assert changes + Assertions.assertEquals("111", source.get("target_id").asText()); + Assertions.assertEquals("abc", source.get("target_name").asText()); + Assertions.assertEquals("sam@dsd.com", source.get("target_email").asText()); + } +} diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java new file mode 100644 index 000000000..f065bcb69 --- /dev/null +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java @@ -0,0 +1,86 @@ +package com.scalar.db.dataloader.core.dataimport.task.validation; + +import com.fasterxml.jackson.databind.JsonNode; +import com.scalar.db.api.TableMetadata; +import com.scalar.db.dataloader.core.UnitTestUtils; +import java.util.HashSet; +import java.util.Set; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ImportSourceRecordValidatorTest { + + TableMetadata mockMetadata = UnitTestUtils.createTestTableMetadata(); + + @Test + void + validateSourceRecord_withValidData_shouldReturnValidImportSourceRecordValidationResultWithoutErrors() { + Set partitionKeyNames = mockMetadata.getPartitionKeyNames(); + Set clusteringKeyNames = mockMetadata.getClusteringKeyNames(); + Set columnNames = mockMetadata.getColumnNames(); + JsonNode sourceRecord = UnitTestUtils.getOutputDataWithoutMetadata(); + ImportSourceRecordValidationResult result = + ImportSourceRecordValidator.validateSourceRecord( + partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, false); + Assertions.assertTrue(result.getColumnsWithErrors().isEmpty()); + } + + @Test + void + validateSourceRecord_withValidDataWithAllColumnsRequired_shouldReturnValidImportSourceRecordValidationResultWithoutErrors() { + Set partitionKeyNames = mockMetadata.getPartitionKeyNames(); + Set clusteringKeyNames = mockMetadata.getClusteringKeyNames(); + Set columnNames = mockMetadata.getColumnNames(); + JsonNode sourceRecord = UnitTestUtils.getOutputDataWithoutMetadata(); + ImportSourceRecordValidationResult result = + ImportSourceRecordValidator.validateSourceRecord( + partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, true); + Assertions.assertTrue(result.getColumnsWithErrors().isEmpty()); + } + + @Test + void + validateSourceRecord_withInValidPartitionKey_shouldReturnValidImportSourceRecordValidationResultWithErrors() { + Set partitionKeyNames = new HashSet<>(); + partitionKeyNames.add("id1"); + Set clusteringKeyNames = mockMetadata.getClusteringKeyNames(); + Set columnNames = mockMetadata.getColumnNames(); + JsonNode sourceRecord = UnitTestUtils.getOutputDataWithoutMetadata(); + ImportSourceRecordValidationResult result = + ImportSourceRecordValidator.validateSourceRecord( + partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, false); + Assertions.assertFalse(result.getColumnsWithErrors().isEmpty()); + } + + @Test + void + validateSourceRecord_withInValidPartitionKeyWithAllColumnsRequired_shouldReturnValidImportSourceRecordValidationResultWithErrors() { + Set partitionKeyNames = new HashSet<>(); + partitionKeyNames.add("id1"); + Set clusteringKeyNames = mockMetadata.getClusteringKeyNames(); + Set columnNames = mockMetadata.getColumnNames(); + JsonNode sourceRecord = UnitTestUtils.getOutputDataWithoutMetadata(); + ImportSourceRecordValidationResult result = + ImportSourceRecordValidator.validateSourceRecord( + partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, true); + Assertions.assertFalse(result.getColumnsWithErrors().isEmpty()); + Assertions.assertEquals(1, result.getErrorMessages().size()); + } + + @Test + void + validateSourceRecord_withInValidClusteringKey_shouldReturnValidImportSourceRecordValidationResultWithErrors() { + Set partitionKeyNames = mockMetadata.getPartitionKeyNames(); + Set clusteringKeyNames = new HashSet<>(); + clusteringKeyNames.add("id1"); + Set columnNames = mockMetadata.getColumnNames(); + JsonNode sourceRecord = UnitTestUtils.getOutputDataWithoutMetadata(); + ImportSourceRecordValidationResult result = + ImportSourceRecordValidator.validateSourceRecord( + partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, false); + Assertions.assertFalse(result.getColumnsWithErrors().isEmpty()); + Assertions.assertEquals( + "missing required field or column mapping for clustering key id1", + result.getErrorMessages().get(0)); + } +} From 8c75b791a24a1dfe65f02a7de33be26d108278ae Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 2 Jan 2025 14:58:08 +0530 Subject: [PATCH 2/3] Fix unit test failure --- .../task/validation/ImportSourceRecordValidatorTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java index f065bcb69..5d33b2622 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/validation/ImportSourceRecordValidatorTest.java @@ -2,6 +2,7 @@ import com.fasterxml.jackson.databind.JsonNode; import com.scalar.db.api.TableMetadata; +import com.scalar.db.common.error.CoreError; import com.scalar.db.dataloader.core.UnitTestUtils; import java.util.HashSet; import java.util.Set; @@ -80,7 +81,7 @@ class ImportSourceRecordValidatorTest { partitionKeyNames, clusteringKeyNames, columnNames, sourceRecord, false); Assertions.assertFalse(result.getColumnsWithErrors().isEmpty()); Assertions.assertEquals( - "missing required field or column mapping for clustering key id1", + CoreError.DATA_LOADER_MISSING_CLUSTERING_KEY_COLUMN.buildMessage("id1"), result.getErrorMessages().get(0)); } } From 98618aa63b5bef6e4d44645681cac45d8eefe903 Mon Sep 17 00:00:00 2001 From: Jishnu J Date: Thu, 2 Jan 2025 15:16:38 +0530 Subject: [PATCH 3/3] Fix spot bugs failure --- .../core/dataimport/task/mapping/ImportDataMappingTest.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java index 2589dacf8..e2b9364ff 100644 --- a/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java +++ b/data-loader/core/src/test/java/com/scalar/db/dataloader/core/dataimport/task/mapping/ImportDataMappingTest.java @@ -3,8 +3,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; -import com.scalar.db.api.TableMetadata; -import com.scalar.db.dataloader.core.UnitTestUtils; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTable; import com.scalar.db.dataloader.core.dataimport.controlfile.ControlFileTableFieldMapping; import java.util.ArrayList; @@ -14,12 +12,10 @@ public class ImportDataMappingTest { - TableMetadata mockMetadata; ControlFileTable controlFilTable; @BeforeEach void setup() { - mockMetadata = UnitTestUtils.createTestTableMetadata(); controlFilTable = new ControlFileTable("namespace", "table"); ControlFileTableFieldMapping m1 = new ControlFileTableFieldMapping("source_id", "target_id"); ControlFileTableFieldMapping m2 =