The following code snippet shows a function that uses Insertion Sort to sort an array of heights. The values being passed in are 4, 1, 9, 14, 6, and 8, and the sorted order should be 1, 4, 6, 8, 9, 14.
var heightChecker = function(heights) {
var sorted = [...heights];
var newHeight = insertionSort(heights);
var count = 0;
for(var i = 0; i < newHeight.length; i++) {
if(newHeight[i] !== sorted[i]) {
count++;
}
}
return count;
}
When using the line var sorted = [...height];
, the expected answer is 3. However, changing the code to:
var heightChecker = function(heights) {
var newHeight = insertionSort(heights);
var count = 0;
for(var i = 0; i < heights.length; i++) {
if(newHeight[i] !== heights[i]) {
count++;
}
}
return count;
}
Returns an answer of 0 instead. It's puzzling why the two implementations yield different results despite appearing similar.
Below is the Insertion Sort algorithm used in the code:
function insertionSort(inputArr) {
let n = inputArr.length;
for (let i = 1; i < n; i++) {
let current = inputArr[i];
let j = i - 1;
while ((j > -1) && (current < inputArr[j])) {
inputArr[j + 1] = inputArr[j];
j--;
}
inputArr[j + 1] = current;
}
return inputArr;
}