Attempting to visualize UTM/WGS84 coordinates in three.js, I've encountered a challenge with the fine granularity of trajectories. The movements are so subtle that it's difficult to distinguish any real changes in behavior. I am seeking a method to plot a Space-Time-Cube where X and Y represent space, and Z signifies time. However, I am struggling to project the trajectory data in a way that clearly shows the location changes. While normalizing the data provided some results, I am interested in exploring more sophisticated techniques. My trajectory information is sourced from a CSV file stored in the variable 'data,' containing 1500 tuples with LAT, LON (EPSG 4326), and ascending seconds. The movement data I have reflects an object traversing approximately four football fields.
12.4309352,48.4640973,0
12.4301431,48.4655268,15
12.4288555,48.4658138,30
12.4266812,48.4653488,45
12.4245049,48.4648678,60
12.4228305,48.4639438,75
12.4217859,48.4625038,90
... ... ...
Included below is my code progression along with accompanying comments:
var data = $.csv.toArrays(csv);
var renderer,
scene,
camera,
controls
//terrainSize was an experimental parameter that did not yield significant changes
var terrainSize = 60;
if (!Detector.webgl) Detector.addGetWebGLMessage();
renderer = new THREE.WebGLRenderer({ antialias: true });
document.body.appendChild(renderer.domElement);
renderer.setSize(window.innerWidth, window.innerHeight);
renderer.setClearColorHex(0xeeeeee, 1.0);
scene = new THREE.Scene();
var material = new THREE.LineBasicMaterial({
color: 0xff00cc,
fog: true
});
var geometry = new THREE.Geometry();
var x = []
var y = []
var z = []
var count = 0;
for (var row in data) {
count += parseInt(data[row][2]);
x.push(parseFloat(data[row][0]));
y.push(parseFloat(data[row][1]));
z.push(parseFloat(count));
}
//Normalization of seconds for enhanced visibility on the map
z_stretch = stretch_array(z, 10, 1)
function stretch_array(my_stretched_array, given_stretch, multiplier) {
ratio = Math.max.apply(this, my_stretched_array) / given_stretch,
l = my_stretched_array.length;
for (i = 0; i < l; i++) {
my_stretched_array[i] = my_stretched_array[i] / ratio;
}
for (i = 0; i < my_stretched_array.length; i++) {
my_stretched_array[i] = multiplier * my_stretched_array[i];
}
return my_stretched_array;
}
//Reformation of the data post-normalization
var data_stretched = []
for (i = 0; i < data.length; i++) {
data_stretched.push([x[i], y[i], z_stretch[i]]);
}
//Integration attempt with d3.js, albeit incomplete for data stretching
var projection = d3.geo.transverseMercator()
.translate([terrainSize / 2, terrainSize / 2])
.scale(10)
.center([12.4309352,48.4640973]);
//Iteration through the data for translation and geometry addition
for (var row in data_stretched) {
var x = data_stretched[row][0]
var y = data_stretched[row][2]
var z = data_stretched[row][2]
coord = translate(projection([y, x]));
geometry.vertices.push(new THREE.Vector3(parseFloat(coord[0]), parseFloat(z), parseFloat(coord[1]));
}
//Additional experiment involving translation functions
function translate(point) {
return [point[0] - (terrainSize / 2), (terrainSize / 2) - point[1]];
}
//Rendering the plotted line
var line = new THREE.Line(geometry, material);
scene.add(line);
//Configuration settings for camera and controls
var camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 0.1, 1000);
camera.position.set(0, -terrainSize / 2, terrainSize / 2);
controls = new THREE.TrackballControls(camera);
controls.rotateSpeed = 1.0;
controls.zoomSpeed = 0.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.dynamicDampingFactor = 0.3;
animate();
function animate() {
requestAnimationFrame(animate);
controls.update();
renderer.render(scene, camera);
}
Desired visualization output resembling the stretched values applied for illustration purposes: