Skip to content

Commit

Permalink
Nodejs183, crash when using full table path on array binding (#466)
Browse files Browse the repository at this point in the history
* issue 183, fix failed on array binding using full path

* add try catch on write file

* add error handle on write file for array binding

* update the connection string

* add longer timeout for testArrayBind - full path

* fix test case
  • Loading branch information
sfc-gh-ext-simba-dl authored Apr 15, 2023
1 parent 7c52609 commit ca2f5e6
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 17 deletions.
10 changes: 9 additions & 1 deletion lib/connection/bind_uploader.js
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,15 @@ function BindUploader(options, services, connectionConfig, requestId)
}

var putStmt = "PUT file://" + fileName + "'" + stageName + "' overwrite=true auto_compress=false source_compression=gzip";
fs.writeFileSync(fileName, data);
try
{
fs.writeFileSync(fileName, data);
}
catch(e)
{
Logger.getInstance().debug('Failed to write file: %s', fileName);
throw e;
}
this.files.push(fileName);
this.datas.push(data);
this.puts.push(putStmt);
Expand Down
47 changes: 31 additions & 16 deletions lib/connection/statement.js
Original file line number Diff line number Diff line change
Expand Up @@ -91,24 +91,30 @@ exports.createStatementPreExec = function (
}
Logger.getInstance().debug('threshold = %d', threshold);

var isUsingBindUpload = false;
// check array binding,
if(numBinds > threshold)
{
var bindUploaderRequestId = uuidv4();
var bind = new Bind.BindUploader(options, services, connectionConfig, bindUploaderRequestId);
var bindData = bind.Upload(context.binds);

var bindData;
try
{
bindData = bind.Upload(context.binds);
}
catch(e)
{
Logger.getInstance().debug('bind upload error, use normal binding');
return createRowStatementPreExec(
options, context, services, connectionConfig);
}
if(bindData != null)
{
isUsingBindUpload = true;
context.bindStage = Bind.GetStageName(bindUploaderRequestId);
Logger.getInstance().debug('context.bindStage = %s', context.bindStage);
createStage(services, connectionConfig, bindData, options, context);
context.bindStage = Bind.GetStageName(bindUploaderRequestId);
Logger.getInstance().debug('context.bindStage = %s', context.bindStage);
return createStage(services, connectionConfig, bindData, options, context);
}
}

if (!isUsingBindUpload)
else
{
return createRowStatementPreExec(
options, context, services, connectionConfig);
Expand All @@ -123,12 +129,21 @@ function createStage(services, connectionConfig, bindData, options, context)
{
Logger.getInstance().debug('stream');
Logger.getInstance().debug('err '+err);
var stream = stmt.streamRows();
stream.on('data', function (rows)
if(err)
{
Logger.getInstance().debug('stream on data');
uploadFiles(services, connectionConfig, bindData, options, context);
});
context.bindStage = null;
return createRowStatementPreExec(
options, context, services, connectionConfig);
}
else
{
var stream = stmt.streamRows();
stream.on('data', function (rows)
{
Logger.getInstance().debug('stream on data');
return uploadFiles(services, connectionConfig, bindData, options, context);
});
}
}
}
Logger.getInstance().debug('CREATE_STAGE_STMT = %s', Bind.GetCreateStageStmt());
Expand All @@ -144,8 +159,8 @@ function uploadFiles(services, connectionConfig, bindData, options, context, cur
sqlText: bindData.puts[curIndex],
complete: function (err, stmt, rows) {
if (err) {
Logger.getInstance().debug('err ' + err);
throw err;
return createRowStatementPreExec(
options, context, services, connectionConfig);
}
Logger.getInstance().debug('uploadFiles done ');
var stream = stmt.streamRows();
Expand Down
57 changes: 57 additions & 0 deletions test/integration/testArrayBind.js
Original file line number Diff line number Diff line change
Expand Up @@ -428,3 +428,60 @@ describe('Test Array Bind', function ()
);
});
});

describe('testArrayBind - full path', function ()
{
this.timeout(600000);
var connection;
var createABTable = `create or replace table ${DATABASE_NAME}.${SCHEMA_NAME}.testAB(colA string, colB number, colC date, colD time, colE TIMESTAMP_NTZ, colF TIMESTAMP_TZ)`;
var insertAB = `insert into ${DATABASE_NAME}.${SCHEMA_NAME}.testAB values(?, ?, ?, ?, ?, ?)`;

before(function (done)
{
connection = snowflake.createConnection({
accessUrl: connOption.valid.accessUrl,
account: connOption.valid.account,
username: connOption.valid.username,
password: connOption.valid.password,
warehouse: connOption.valid.warehouse,
role: connOption.valid.role,
arrayBindingThreshold: 3
});
testUtil.connect(connection, function ()
{
connection.execute({
sqlText: createABTable,
complete: function (err)
{
testUtil.checkError(err);
done();
}
});
});
});

it('Full path array bind', function ()
{
var arrBind = [];
var count = 100;
for(var i = 0; i<count; i++)
{
arrBind.push([null, i, "2020-05-11", "12:35:41.3333333", "2022-04-01 23:59:59", "2022-07-08 12:05:30.9999999"]);
}

var insertABStmt = connection.execute({
sqlText: insertAB,
binds: arrBind,
complete: function (err, stmt) {
testUtil.checkError(err);
assert.strictEqual(stmt.getNumUpdatedRows(), count);
callback();
}
});
});
after(function (done)
{
testUtil.destroyConnection(connection, done);
});

});

0 comments on commit ca2f5e6

Please sign in to comment.