I am facing a challenge while trying to upload a large JSON file containing at least 400,000 objects into my database. When I attempt to post only 20,000 objects at a time, everything works smoothly, indicating that the issue lies with the size of the JSON.
To overcome this hurdle, I have divided the JSON file into 20 smaller chunks with the intention of uploading one chunk at a time. However, I am encountering difficulties in implementing this approach effectively.
Below is the code snippet I am currently using:
var rows = {};
Papa.parse(content, {
header: false,
delimiter: '|',
worker: true,
encoding: "utf16le",
dynamicTyping: true,
skipEmptyLines: true,
complete: function(results) {
rows = results.data;
let obj = []
for(var i=0; i < rows.length; i++){
obj.push(rows[i])
}
let result = []
for(let i in obj) {
let temp = {}
if(i > 0) {
temp["id"] = obj[i][0]
temp["name"] = obj[i][1]
temp["tel"] = obj[i][2]
temp["email"] = obj[i][3]
temp["status"] = obj[i][5]
result.push(temp)
}
}
var array1 = result.map((e) => {
return {
id: e.id,
name: e.name,
email: e.email
}
})
let chunked = []
let size = 20000;
Array.from({length: Math.ceil(array1.length / size)}, (val, i) => {
chunked.push(array1.slice(i * size, i * size + size))
})
console.log(chunked); // at this point I have my array divided into chunks of 20000
axios({
url: 'url',
method: 'post',
data: chunked
})
.then(function (response) {
// your action after success
console.log(response);
})
.catch(function (error) {
// your action on error successif (error.response) {
console.log(error);
});