let multiply = function(a, b) {
let c = [];
for(let row = 0; row < 4; row++){
for(let col = 0; col < 4;col++){
for(let i = 0; i < 4; i++){
c[col + (4*row)] += a[i][row] * b[col][i];
}
}
}
The function takes two four by four matrixes and outputs an array of length 16. It does not seem to be giving me the correct array.
This should do the trick
let multiply = function(a, b) {
let c = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];
for(let row = 0; row < 4; row++){
for(let col = 0; col < 4; col++){
for(let i = 0; i < 4; i++){
c[col + (4*row)] += a[row][i] * b[i][col];
}
}
}
return c;
}
Notice that you need to initialize the output array and your row x col
multiplication was wrong; you need:
a[row][i] * b[i][col]
instead ofa[i][row] * b[col][i]
The function should also return the resulting array.
You can give it a try using JSFiddle: https://jsfiddle.net/1fxLpucg/
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.