var address = [ "data/somedata1.json", "data/somedata2.json", "data/somedata3.json", "data/somedata4.json", "data/somedata5.json"];
There is also a function available to import these files:
function readData()
{
var loadFile = function (filePath, done)
{
var xhr = new XMLHttpRequest();
xhr.open("GET", filePath, true);
xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest");
xhr.onload = function () { return done(this.responseText) }
xhr.send();
}
address.forEach(function (file, i)
{
loadFile(file, function (responseText)
{
jsonData[i] = JSON.parse(responseText);
if(i === 4)
{
fill(jsonData);
document.getElementById("el").innerHTML = jsonData[2].title3;
Dosomething(jsonData[0])
}
})
})
}
All the JSON files have an exact size of 150kb. However, there seems to be an issue where sometimes when running this code on a website, jsonData[0]
turns out to be undefined. This inconsistency in loading data raises the question of what might be going wrong. Is there a way to optimize this code for better assurance that all files are loaded correctly?