我在使用 JavaScript 中的矩阵乘法代码时遇到问题。如果我使用以下两个矩阵运行以下函数:
var m1 = [ [ 1, 0, 0 ],
[ 0, 1, 0 ],
[ 1, 1, 0 ],
[ 0, 0, 1 ],
[ 1, 0, 1 ],
[ 0, 1, 1 ],
[ 1, 1, 1 ] ];
var m2 = [ [ '0', '1', '1', '0', '0', '1', '1' ] ];
var matrixMult = function (m1, m2) {
console.log(m1);
console.log(m2);
console.log("m1 length: %d, m2[0].length: %d", m1.length, m2[0].length);
if (m1.length != m2[0].length) {
console.error("Incompatible matrix dimensions for multiplication.");
return false;
}
var result = [];
for (var i = 0; i < m1[0].length; i++) {
result[i] = [];
for (var j = 0; j < m2.length; j++) {
var sum = 0;
for (var k = 0; k < m1.length; k++) {
sum += m1[i][k] * m2[k][j];
}
result[i][j] = sum;
}
}
return result;
}
我收到此错误:
/path/to/file.js:58
sum += m1[i][k] * m2[k][j];
^
TypeError: Cannot read property '0' of undefined
at matrixMult (...)
怎么了?问题可能m2.length
只是1吗?