pool.js
import mysql from 'mysql2'
import { dbusername, dbpassword, dbname, host } from './config'
const pool = mysql.createPool({
host: host,
user: dbusername,
password: dbpassword,
database: dbname,
connectionLimit: 214000,
waitForConnections: true,
})
export const promisePool = pool.promise()
home-route.js
import { promisePool } from '../pool'
router.post('/', async (request, res, next) => {
const data = request.body
const loop = await data.map(async (i, index) => {
try {
const row = await promisePool.execute(
'SELECT * FROM sheet'
)
} catch (error) {
console.log('mysql error', error)
}
})
res.json({ message: 'ok' })
})
export { router as default }
When I did it like this, I got an error "too many connections". What do I do to avoid this?
214000 is way too high for a connection limit. Stick with 10 until you can test the application on your deployment environment and tweak from there.
Your issue is map is not really compatible with async functions. If you inspect loop you will find it full of promises. Each promise is probably grabbing a connection from the pool to execute the sql statement.
Instead of using map just use a for loop which is compatible with await.
for(const d of data) {
try {
const row = await promisePool.execute('SELECT * FROM sheet');
..do something with the row
} catch (error) {
console.log('mysql error', error)
}
}
This does mean each SQL statement will execute in serial instead of parallel, but this is usually a better pattern than allowing one http request to eat multiple db connections.