I'm attempting to convert the XTEA implementation from JavaScript to Go language:
function decipherXTea(v, k) {
const ROUNDS = 32;
const DELTA = 0x9E3779B9;
let y = v[0];
let z = v[1];
let sum = DELTA * ROUNDS;
for (let round = 0; round < ROUNDS; round++) {
z -= ((y << 4 ^ y >> 5) + y) ^ (sum + k[sum >> 11 & 3]);
sum -= DELTA;
y -= ((z << 4 ^ z >> 5) + z) ^ (sum + k[sum & 3]);
}
return [
y,
z
];
}
Strangely, my Go code is producing identical initial values for y
and z
, but the final result after completing the rounds does not match what the Go code computes at all.
This is my Go version:
func decrypt(k [4]int64, block []byte) []byte {
y := int64(int32(block[0])<<24 | int32(block[1])<<16 | int32(block[2])<<8 | int32(block[3])<<0)
z := int64(int32(block[4])<<24 | int32(block[5])<<16 | int32(block[6])<<8 | int32(block[7])<<0)
fmt.Printf("y: %d, z: %d\n", y, z)
const (
rounds = 32
delta int64 = 0x9E3779B9
)
sum := delta * rounds
for i := 0; i < rounds; i++ {
z -= int64(int32(int64(int32(y)<<4^int32(y)>>5)+y) ^ int32(sum+k[int32(sum)>>11&3]))
sum -= delta
y -= int64((int32(z<<4^z>>5) + int32(z)) ^ int32(sum+k[int32(sum)&3]))
}
fmt.Printf("result: %d, %d\n", y, z)
end := make([]byte, 8)
end[0] = byte(y >> 24 & 255)
end[1] = byte(y >> 16 & 255)
end[2] = byte(y >> 8 & 255)
end[3] = byte(y & 255)
end[4] = byte(z >> 24 & 255)
end[5] = byte(z >> 16 & 255)
end[6] = byte(z >> 8 & 255)
end[7] = byte(z & 255)
return end
}