I am new to the parse platform and i'm trying to insert 81000 rows of data in to the Parse DB, here the code
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
var kp = new Parse.Object("theClass");
kp.save(datakp)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
})
}
There is no error in console log, and no data is saved in Parse DB, but if I only insert 1000 rows, it will save to the database.
EG:
if (dataresult.length > 0) {
res.data.forEach(function (datakp, index) {
if (index < 1000) {
var kp = new Parse.Object("theClass");
kp.save(datakp)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
})
}
}
Thank You
UPDATE
I fix this case based on answer @davi-macêdo here a complete code
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
const objs = [];
const theKP = Parse.Object.extend("theClass")
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
var thekp = new theKP()
thekp.set(datakp)
objs.push(thekp);
})
}
Parse.Object.saveAll(objs)
.then((res) => {
console.log('oke updated ' + dataresult.length)
}),
(error) => {
console.log('err : '+ error.message)
}
The most efficient way is using Parse.Object.saveAll
function. Something like this:
const uri = "/the.json"
const res = await axios.get(uri)
const dataresult = Object.keys(res.data)
const objs = [];
if (dataresult.length > 0) {
res.data.forEach(function (datakp) {
objs.push(new Parse.Object("theClass", datakp));
})
}
Parse.Object.saveAll(objs)
.then((res) => {
console.log('oke ' + res.id)
}),
(error) => {
console.log('err : '+ error.message)
}
Anyways, since you have no error and no data currently being saved, you might be kitting some memory limit. So that's something you also need to be aware about.