When working with an array containing data from 5000 users, my code's performance slows down significantly, especially when trying to extract unique values while also prioritizing phone numbers over null values.
The code snippet provided below implements two conditions:
Firstly, extracting unique values
Secondly, checking whether the phone number is null or not, giving preference to records with existing phone numbers.
var arr = [
["Wong", ""],
["Wong", "0143213123"],
["Ali", "0177213123"],
["Ali", "0177213123, 0124545345"],
["Ali", ""],
["Imran", "0133454335"]
];
function uniq(arr) {
var seen = [];
for (i = 0; i < arr.length; i++) {
var a = arr[i][0];
var a2 = arr[i][1];
var c = true;
for (x = 0; x < seen.length; x++) {
var b = seen[x][0];
var b2 = seen[x][1];
if (b == a) {
c = false;
if ((b2.trim() == '' || b2.indexOf(',') == -1) && (a2.trim() != '')) {
seen[x][1] = a2.trim();
break;
}
break;
}
}
if (c == true) {
seen.push(arr[i]);
}
}
return seen;
}
Upon executing the code, the result is as follows:
var arr = uniq(arr);
console.log(arr);
[
["Wong", "0143213123"],
["Ali", "0177213123, 0124545345"],
["Imran", "0133454335"]
];
Find this on jsfiddle here