Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

batch insert operation hangs with 3.3.0 #281

Open
markddrake opened this issue Apr 26, 2024 · 7 comments
Open

batch insert operation hangs with 3.3.0 #281

markddrake opened this issue Apr 26, 2024 · 7 comments

Comments

@markddrake
Copy link

markddrake commented Apr 26, 2024

Given the following environment (Windows 11)

And the following code


C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);
    const conn = await pool.getConnection();
    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);


    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('TEST_DATA.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})

The batch insert operation hangs

C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
ROWS 4
Insert
^C
C:\Development\YADAMU\src\scratch\mariadb>

TEST_DATA.zip

@rusher
Copy link
Collaborator

rusher commented Apr 26, 2024

Thanks for your detailled example.
This will corrected in next version. As a walkaround, you can set option maxAllowedPacket to 16777216.

@markddrake
Copy link
Author

I had set it already (actually to 1G)..

Modified test case checks, and if necessary sets MAX_ALLOWED_PACKET

C:\Development\YADAMU\src\scratch\mariadb>node -v
v20.7.0

C:\Development\YADAMU\src\scratch\mariadb>npm ls
[email protected] C:\Development\YADAMU\src
+-- @aws-sdk/[email protected]
+-- @aws-sdk/[email protected]
+-- @azure/[email protected]
+-- @electron/[email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- ibm_db_electron@npm:[email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
`-- [email protected]


C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

const MAX_ALLOWED_PACKET             = 1 * 1024 * 1024 * 1024;

const SQL_GET_MAX_ALLOWED_PACKET = `SELECT @@MAX_ALLOWED_PACKET`

const SQL_SET_MAX_ALLOWED_PACKET = `SET GLOBAL MAX_ALLOWED_PACKET=${MAX_ALLOWED_PACKET}`

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);

        try {
      const conn = await pool.getConnection();
      let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
          const maxAllowedPacket = parseInt(results[0][0])
      if (maxAllowedPacket <  MAX_ALLOWED_PACKET) {

            console.log(`Increasing MAX_ALLOWED_PACKET to ${MAX_ALLOWED_PACKET}.`)
        results = await conn.query(SQL_SET_MAX_ALLOWED_PACKET)

          }
      conn.end()
    } catch (e) {
          console.log('MAX_ALLOWED_PACKET','Failed')
      console.log(e)
          return
    }

    const conn = await pool.getConnection();
    let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
    const maxAllowedPacket = parseInt(results[0][0])
    console.log(`MAX_ALLOWED_PACKET`,maxAllowedPacket)

    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);


    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('TEST_DATA.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})






C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
MAX_ALLOWED_PACKET 1073741824
ROWS 4
Insert
^C
C:\Development\YADAMU\src\scratch\mariadb>

Hang is still observed

@rusher
Copy link
Collaborator

rusher commented Apr 26, 2024

i mean something like :

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true
, maxAllowedPacket: 16777216}

(set the right amount, 16777216 is the default example, but it must be at least that amount to solve the issue until next release, this won't have other impact and will solve the issue)

@markddrake
Copy link
Author

markddrake commented Apr 27, 2024

Set to 1G and it appears to work.. However with a slightly different data set I got a broken connection...

C:\Development\YADAMU\src\scratch\mariadb>node -v
v20.7.0

C:\Development\YADAMU\src\scratch\mariadb>npm ls
[email protected] C:\Development\YADAMU\src
+-- @aws-sdk/[email protected]
+-- @aws-sdk/[email protected]
+-- @azure/[email protected]
+-- @electron/[email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- ibm_db_electron@npm:[email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
+-- [email protected]
`-- [email protected]


C:\Development\YADAMU\src\scratch\mariadb>type batchInsert.js
"use strict"
import mariadb from 'mariadb';
import fs from 'fs';

const MAX_ALLOWED_PACKET             = 1 * 1024 * 1024 * 1024;

const SQL_GET_MAX_ALLOWED_PACKET = `SELECT @@MAX_ALLOWED_PACKET`

const SQL_SET_MAX_ALLOWED_PACKET = `SET GLOBAL MAX_ALLOWED_PACKET=${MAX_ALLOWED_PACKET}`

async function main() {

    const connectionDetails = {
            host      : "yadamu-db1"
           ,user      : "root"
           ,password  : "oracle"
           ,port      : 3307
           ,database  : "mysql"
           ,multipleStatements: true
           ,typeCast:true
                   ,bigNumberStrings:true
                   ,dateStrings:true
                   ,rowsAsArray:true
                   ,maxAllowedPacket:MAX_ALLOWED_PACKET}

  let results;

  try {

    const pool = mariadb.createPool(connectionDetails);

        try {
      const conn = await pool.getConnection();
      let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
          const maxAllowedPacket = parseInt(results[0][0])
      if (maxAllowedPacket <  MAX_ALLOWED_PACKET) {

            console.log(`Increasing MAX_ALLOWED_PACKET to ${MAX_ALLOWED_PACKET}.`)
        results = await conn.query(SQL_SET_MAX_ALLOWED_PACKET)

          }
      conn.end()
    } catch (e) {
          console.log('MAX_ALLOWED_PACKET','Failed')
      console.log(e)
          return
    }

    const conn = await pool.getConnection();
    let results = await conn.query(SQL_GET_MAX_ALLOWED_PACKET)
    const maxAllowedPacket = parseInt(results[0][0])
    console.log(`MAX_ALLOWED_PACKET`,maxAllowedPacket)

    results = await conn.query(`SET AUTOCOMMIT = 0, TIME_ZONE = '+00:00',SESSION INTERACTIVE_TIMEOUT = 600000, WAIT_TIMEOUT = 600000, SQL_MODE='ANSI_QUOTES,PAD_CHAR_TO_FULL_LENGTH', GROUP_CONCAT_MAX_LEN = 1024000, GLOBAL LOCAL_INFILE = 'ON';`);
    results = await conn.query(`drop schema if exists "YADAMU_TEST";`);
    results = await conn.query(`create schema "YADAMU_TEST";`);


    results = await conn.query(`USE "YADAMU_TEST"`);
    results = await conn.query(`create table  if not exists "YADAMU_TEST"."character_types"(
  "character_col" char(1) ,"character_varying_col" longtext ,"character_max_col" mediumtext ,"character_varying_max_col" mediumtext ,"character_4000_col" varchar(4000) ,"character_varying_4000_col" varchar(4000) ,"character_64k_col" text ,"character_varying_64k_col" text ,"text_col" longtext)`);

    const testData = fs.readFileSync('data.json');
        const data = JSON.parse(testData)
        console.log('ROWS',data.length)

    const sql = `insert into "YADAMU_TEST"."character_types" ("character_col","character_varying_col","character_max_col","character_varying_max_col","character_4000_col","character_varying_4000_col","character_64k_col","character_varying_64k_col","text_col") values (?,?,?,?,?,?,?,?,?)`;

    try {

          results = await conn.beginTransaction()
          results = await conn.query(`SAVEPOINT YADAMU_INSERT; `)

          console.log('Insert')
      results = await conn.batch(sql,data);

          console.log('Commit')
          results = await conn.commit()
          console.log('Success')
    } catch (e) {
          console.log('Failed')
      console.log(e)
    }

   conn.end()
   pool.end();
 } catch(e) {
   console.log(e);
 }
}

main().then(() => {console.log("Complete")}).catch((e) => {console.log(e)})

C:\Development\YADAMU\src\scratch\mariadb>node batchInsert.js
MAX_ALLOWED_PACKET 1073741824
ROWS 4
Insert
Failed
Error: write ECONNRESET
    at afterWriteDispatched (node:internal/stream_base_commons:160:15)
    at writeGeneric (node:internal/stream_base_commons:151:3)
    at Socket._writeGeneric (node:net:952:11)
    at Socket._write (node:net:964:8)
    at writeOrBuffer (node:internal/streams/writable:399:12)
    at _write (node:internal/streams/writable:340:10)
    at Writable.write (node:internal/streams/writable:344:10)
    at socket.writeBuf (C:\Development\YADAMU\src\node_modules\mariadb\lib\connection.js:884:49)
    at PacketOutputStream.flushBufferBasic (C:\Development\YADAMU\src\node_modules\mariadb\lib\io\packet-output-stream.js:698:17)
    at PacketOutputStream.flushBufferStopAtMark (C:\Development\YADAMU\src\node_modules\mariadb\lib\io\packet-output-stream.js:685:10) {
  errno: -4077,
  code: 'ECONNRESET',
  syscall: 'write',
  fatal: true,
  sqlState: 'HY000'
}
Complete

data.zip

@markddrake
Copy link
Author

The ECONRESET appears to be Windows specific ....

@rusher
Copy link
Collaborator

rusher commented Apr 30, 2024

Thank you already for the clear test case. I was able to reproduce it.
The exchange flows are nevertheless correct, that's interesting, need to study that in detail.

@Orel-A
Copy link

Orel-A commented Jun 16, 2024

@rusher
Hi, after this commit V3.3.1 I have a regression in my code resulting in Error: write ECONNRESET.
My code is inserting a batch of data.
How can I resolve this?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

3 participants