Skip to content

Commit

Permalink
Resolve NPE when bulk copy operation is performed on computed column (#…
Browse files Browse the repository at this point in the history
…2612)

* Resolve NPE when bulk copy operation is performed on computed column

* Updated test

* Updated the test

* Ran formatter

---------

Co-authored-by: Jeff Wasty <jeff.wasty@gmail.com>
  • Loading branch information
muskan124947 and Jeffery-Wasty authored Mar 6, 2025
1 parent 2691a7f commit f0f59f6
Show file tree
Hide file tree
Showing 3 changed files with 121 additions and 15 deletions.
25 changes: 12 additions & 13 deletions src/main/java/com/microsoft/sqlserver/jdbc/SQLServerBulkCopy.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.Connection;
import java.sql.Date;
import java.sql.ResultSet;
Expand Down Expand Up @@ -1764,16 +1762,16 @@ private void getDestinationMetadata() throws SQLServerException {
}

if (loggerExternal.isLoggable(Level.FINER)) {
loggerExternal.finer(this.toString() + " Acquiring existing destination column metadata " +
"from cache for bulk copy");
loggerExternal.finer(this.toString() + " Acquiring existing destination column metadata "
+ "from cache for bulk copy");
}

} else {
setDestinationColumnMetadata(escapedDestinationTableName);

if (loggerExternal.isLoggable(Level.FINER)) {
loggerExternal.finer(this.toString() + " cacheBulkCopyMetadata=false - Querying server " +
"for destination column metadata");
loggerExternal.finer(this.toString() + " cacheBulkCopyMetadata=false - Querying server "
+ "for destination column metadata");
}
}
}
Expand Down Expand Up @@ -1913,7 +1911,7 @@ private void validateColumnMappings() throws SQLServerException {

// Generate default column mappings
ColumnMapping cm;
for (int i = 1; i <= srcColumnCount; ++i) {
for (Integer i : destColumnMetadata.keySet()) {
// Only skip identity column mapping if KEEP IDENTITY OPTION is FALSE
if (!(destColumnMetadata.get(i).isIdentity && !copyOptions.isKeepIdentity())) {
cm = new ColumnMapping(i, i);
Expand Down Expand Up @@ -1967,7 +1965,7 @@ private void validateColumnMappings() throws SQLServerException {
if (-1 == cm.destinationColumnOrdinal) {
boolean foundColumn = false;

for (int j = 1; j <= destColumnCount; ++j) {
for (Integer j : destColumnMetadata.keySet()) {
if (destColumnMetadata.get(j).columnName.equals(cm.destinationColumnName)) {
foundColumn = true;
cm.destinationColumnOrdinal = j;
Expand Down Expand Up @@ -2128,7 +2126,8 @@ private void writeNullToTdsWriter(TDSWriter tdsWriter, int srcJdbcType,

private void writeColumnToTdsWriter(TDSWriter tdsWriter, int bulkPrecision, int bulkScale, int bulkJdbcType,
boolean bulkNullable, // should it be destNullable instead?
int srcColOrdinal, int destColOrdinal, boolean isStreaming, Object colValue, Calendar cal) throws SQLServerException {
int srcColOrdinal, int destColOrdinal, boolean isStreaming, Object colValue,
Calendar cal) throws SQLServerException {
SSType destSSType = destColumnMetadata.get(destColOrdinal).ssType;

bulkPrecision = validateSourcePrecision(bulkPrecision, bulkJdbcType,
Expand Down Expand Up @@ -3045,8 +3044,8 @@ private Object readColumnFromResultSet(int srcColOrdinal, int srcJdbcType, boole
/**
* Reads the given column from the result set current row and writes the data to tdsWriter.
*/
private void writeColumn(TDSWriter tdsWriter, int srcColOrdinal, int destColOrdinal,
Object colValue, Calendar cal) throws SQLServerException {
private void writeColumn(TDSWriter tdsWriter, int srcColOrdinal, int destColOrdinal, Object colValue,
Calendar cal) throws SQLServerException {
String destName = destColumnMetadata.get(destColOrdinal).columnName;
int srcPrecision, srcScale, destPrecision, srcJdbcType;
SSType destSSType = null;
Expand Down Expand Up @@ -3698,8 +3697,8 @@ private boolean writeBatchData(TDSWriter tdsWriter, TDSCommand command,
// Loop for each destination column. The mappings is a many to one mapping
// where multiple source columns can be mapped to one destination column.
for (ColumnMapping columnMapping : columnMappings) {
writeColumn(tdsWriter, columnMapping.sourceColumnOrdinal, columnMapping.destinationColumnOrdinal, null,
null // cell
writeColumn(tdsWriter, columnMapping.sourceColumnOrdinal, columnMapping.destinationColumnOrdinal,
null, null // cell
// value is
// retrieved
// inside
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@

import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.StackOverflowError;
import java.net.URL;
import java.sql.Connection;
import java.sql.ResultSet;
Expand All @@ -37,6 +37,7 @@
import com.microsoft.sqlserver.jdbc.RandomUtil;
import com.microsoft.sqlserver.jdbc.SQLServerBulkCSVFileRecord;
import com.microsoft.sqlserver.jdbc.SQLServerBulkCopy;
import com.microsoft.sqlserver.jdbc.SQLServerBulkCopyOptions;
import com.microsoft.sqlserver.jdbc.SQLServerException;
import com.microsoft.sqlserver.jdbc.TestUtils;
import com.microsoft.sqlserver.testframework.AbstractSQLGenerator;
Expand Down Expand Up @@ -68,6 +69,7 @@ public class BulkCopyCSVTest extends AbstractTest {
static String inputFileDelimiterEscape = "BulkCopyCSVTestInputDelimiterEscape.csv";
static String inputFileDelimiterEscapeNoNewLineAtEnd = "BulkCopyCSVTestInputDelimiterEscapeNoNewLineAtEnd.csv";
static String inputFileMultipleDoubleQuotes = "BulkCopyCSVTestInputMultipleDoubleQuotes.csv";
static String computeColumnCsvFile = "BulkCopyCSVTestInputComputeColumn.csv";
static String encoding = "UTF-8";
static String delimiter = ",";

Expand Down Expand Up @@ -261,7 +263,7 @@ public void testEscapeColumnDelimitersCSVNoNewLineAtEnd() throws Exception {
}
}
}

/**
* test simple csv file for bulkcopy, for GitHub issue 1391 Tests to ensure that the set returned by
* getColumnOrdinals doesn't have to be ordered
Expand Down Expand Up @@ -446,6 +448,108 @@ public void testCSV2400() {
}
}

/**
* Test to perform bulk copy with a computed column as the last column in the table.
*/
@Test
@DisplayName("Test bulk copy with computed column as last column")
public void testBulkCopyWithComputedColumnAsLastColumn() {
String tableName = AbstractSQLGenerator.escapeIdentifier(RandomUtil.getIdentifier("BulkEscape"));
String fileName = filePath + computeColumnCsvFile;

try (Connection con = getConnection(); Statement stmt = con.createStatement();
SQLServerBulkCopy bulkCopy = new SQLServerBulkCopy(con);
SQLServerBulkCSVFileRecord fileRecord = new SQLServerBulkCSVFileRecord(fileName, encoding, ",", true)) {

String createTableSQL = "CREATE TABLE " + tableName + " (" + "[NAME] varchar(50) NOT NULL,"
+ "[AGE] int NULL," + "[CAL_COL] numeric(17, 2) NULL," + "[ORIGINAL] varchar(50) NOT NULL,"
+ "[COMPUTED_COL] AS (right([NAME], 8)) PERSISTED" + ")";
stmt.executeUpdate(createTableSQL);

fileRecord.addColumnMetadata(1, "NAME", java.sql.Types.VARCHAR, 50, 0);
fileRecord.addColumnMetadata(2, "AGE", java.sql.Types.INTEGER, 0, 0);
fileRecord.addColumnMetadata(3, "CAL_COL", java.sql.Types.NUMERIC, 17, 2);
fileRecord.addColumnMetadata(4, "ORIGINAL", java.sql.Types.VARCHAR, 50, 0);

bulkCopy.setDestinationTableName(tableName);

bulkCopy.addColumnMapping("NAME", "NAME");
bulkCopy.addColumnMapping("AGE", "AGE");
bulkCopy.addColumnMapping("CAL_COL", "CAL_COL");
bulkCopy.addColumnMapping("ORIGINAL", "ORIGINAL");

SQLServerBulkCopyOptions options = new SQLServerBulkCopyOptions();
options.setKeepIdentity(false);
options.setTableLock(true);
bulkCopy.setBulkCopyOptions(options);

bulkCopy.writeToServer(fileRecord);

try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tableName)) {
if (rs.next()) {
int rowCount = rs.getInt(1);
assertTrue(rowCount > 0);
}
} finally {
TestUtils.dropTableIfExists(tableName, stmt);
}
} catch (Exception e) {
fail(e.getMessage());
}
}

/**
* Test to perform bulk copy with a computed column not as the last column in the table.
*/
@Test
@DisplayName("Test bulk copy with computed column not as last column")
public void testBulkCopyWithComputedColumnNotAsLastColumn() {
String tableName = AbstractSQLGenerator.escapeIdentifier(RandomUtil.getIdentifier("BulkEscape"));
String fileName = filePath + computeColumnCsvFile;

try (Connection con = getConnection(); Statement stmt = con.createStatement();
SQLServerBulkCopy bulkCopy = new SQLServerBulkCopy(con);
SQLServerBulkCSVFileRecord fileRecord = new SQLServerBulkCSVFileRecord(fileName, encoding, ",", true)) {

String createTableSQL = "CREATE TABLE " + tableName + " (" + "[NAME] varchar(50) NOT NULL,"
+ "[AGE] int NULL," + "[CAL_COL] numeric(17, 2) NULL," + "[ORIGINAL] varchar(50) NOT NULL,"
+ "[COMPUTED_COL] AS (right([NAME], 8)) PERSISTED," + "[LAST_COL] varchar(50) NULL" + ")";
stmt.executeUpdate(createTableSQL);

fileRecord.addColumnMetadata(1, "NAME", java.sql.Types.VARCHAR, 50, 0);
fileRecord.addColumnMetadata(2, "AGE", java.sql.Types.INTEGER, 0, 0);
fileRecord.addColumnMetadata(3, "CAL_COL", java.sql.Types.NUMERIC, 17, 2);
fileRecord.addColumnMetadata(4, "ORIGINAL", java.sql.Types.VARCHAR, 50, 0);
fileRecord.addColumnMetadata(5, "LAST_COL", java.sql.Types.VARCHAR, 50, 0);

bulkCopy.setDestinationTableName(tableName);

bulkCopy.addColumnMapping("NAME", "NAME");
bulkCopy.addColumnMapping("AGE", "AGE");
bulkCopy.addColumnMapping("CAL_COL", "CAL_COL");
bulkCopy.addColumnMapping("ORIGINAL", "ORIGINAL");
bulkCopy.addColumnMapping("LAST_COL", "LAST_COL");

SQLServerBulkCopyOptions options = new SQLServerBulkCopyOptions();
options.setKeepIdentity(false);
options.setTableLock(true);
bulkCopy.setBulkCopyOptions(options);

bulkCopy.writeToServer(fileRecord);

try (ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM " + tableName)) {
if (rs.next()) {
int rowCount = rs.getInt(1);
assertTrue(rowCount > 0);
}
} finally {
TestUtils.dropTableIfExists(tableName, stmt);
}
} catch (Exception e) {
fail(e.getMessage());
}
}

/**
* drop source table after testing bulk copy
*
Expand Down
3 changes: 3 additions & 0 deletions src/test/resources/BulkCopyCSVTestInputComputeColumn.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
NAME,AGE,CAL_COL,ORIGINAL,LAST_COL
John Doe,30,12345.67,Original,Last
Jane Smith,25,54321.00,Original,Last

0 comments on commit f0f59f6

Please sign in to comment.