这将是一堵巨大的代码墙,但我希望有人有时间和耐心来帮助我。
我目前正在尝试为我的 HTML Tic-Tac-Toe 游戏创建一个 AI 播放器。我正在使用这个资源,其中包含使用 minimax 算法在 Java 中编写的工作 AI 播放器代码:https ://www3.ntu.edu.sg/home/ehchua/programming/java/JavaGame_TicTacToe_AI.html (第 1.5 节)
我想把这个给定的 Java 代码翻译成 JavaScript。资源代码和我的 HTML/JS 代码之间的一个主要区别是资源使用二维数组作为游戏板,而我使用一维数组。
这意味着资源的数组如下所示:Cell[][] cells;
其中第一个索引代表行,第二个索引代表列;我的数组看起来像这样:let board_array = [];
它的长度为 9,表示从左上角到右下角读取的棋盘,例如,左上角的单元格位于索引 0 处,而中间右侧的单元格位于索引处 5。
另一个小的区别是资源使用玩家种子存储在单元格数组中,而我只'X'
为人类和'O'
人工智能使用字符串。
我花了很多时间尝试将给定的 Java 函数转换为我的 JS 脚本,但无论我尝试什么,minimax
总是返回-1
作为最终的最佳单元格,据我了解,只有在没有更多可用单元格的情况下才会发生,这表明游戏已经结束。我已经进行了一些console.log 调试,我可以看到在递归过程中,实际上返回了合法的最佳单元格(例如0 或4),但在最终返回时它始终为-1。
我敢打赌,我的错误要么在于从二维数组到一维数组的有缺陷的转换,要么与 JS 在特定行中所做的与 Java 完全不同的事情有关。我可以想象WINNING_PATTERNS
数组中的二进制值或hasWon
函数中对它们的操作可能会造成麻烦。我不知道它在 JS 中是否像这样工作,但它不会抛出任何错误,所以我无法自己弄清楚。
无论如何,这是我的代码:
let board_array = ['X', '', '', '', '', '', '', '', '']; // X at a random index because the human player always makes the first turn
const MY_SEED = 'O';
const OPP_SEED = 'X';
function moveAI() {
let result = minimax(2, MY_SEED);
let best_cell = result[1];
console.log(best_cell); // always returns -1
// if best_cell would be properly returned I could do "board_array[best_cell] = 'O'" here
}
function minimax(depth, player) {
let next_moves = generateMoves();
let best_score = (player === MY_SEED) ? Number.MIN_VALUE : Number.MAX_VALUE;
let current_score;
let best_cell = -1;
if (next_moves.length === 0 || depth === 0) {
best_score = evaluate();
}
else {
for (let move in next_moves) {
move = parseInt(move);
board_array[move] = player;
if (player === MY_SEED) {
current_score = minimax(depth - 1, OPP_SEED)[0];
if (current_score > best_score) {
best_score = current_score;
best_cell = move;
}
}
else {
current_score = minimax(depth - 1, MY_SEED)[0];
if (current_score < best_score) {
best_score = current_score;
best_cell = move;
}
}
board_array[move] = '';
}
}
return [best_score, best_cell];
}
function generateMoves() {
let next_moves = [];
if (hasWon(MY_SEED) || hasWon(OPP_SEED)) {
return next_moves;
}
for (let i = 0; i < board_array.length; i++) {
if (board_array[i] === '') { next_moves.push(i); }
}
return next_moves;
}
function evaluate() {
let score = 0;
score += evaluateLine(0, 1, 2);
score += evaluateLine(3, 4, 5);
score += evaluateLine(6, 7, 8);
score += evaluateLine(0, 3, 6);
score += evaluateLine(1, 4, 7);
score += evaluateLine(2, 5, 8);
score += evaluateLine(0, 4, 8);
score += evaluateLine(2, 4, 6);
return score;
}
function evaluateLine(c1, c2, c3) {
let score = 0;
if (board_array[c1] === MY_SEED) { score = 1; }
else if (board_array[c1] === OPP_SEED) { score = -1; }
if (board_array[c2] === MY_SEED) {
if (score === 1) { score = 10; }
else if (score === -1) { return 0; }
else { score = 1; }
}
else if (board_array[c2] === OPP_SEED) {
if (score === -1) { score = -10; }
else if (score === 1) { return 0; }
else { score = -1; }
}
if (board_array[c3] === MY_SEED) {
if (score > 0) { score *= 10; }
else if (score < 0) { return 0; }
else { score = 1; }
}
else if (board_array[c3] === OPP_SEED) {
if (score < 0) { score *= 10; }
else if (score > 1) { return 0; }
else { score = -1; }
}
return score;
}
const WINNING_PATTERNS = [
0b111000000, 0b000111000, 0b000000111,
0b100100100, 0b010010010, 0b001001001,
0b100010001, 0b001010100
];
function hasWon(player) {
let pattern = 0b000000000; // does JS recognize this as a binary number like Java does?
for (let i = 0; i < board_array.length; i++) {
if (board_array[i] === player) {
pattern |= (1 << (9 - i)); // does this actually do what I think it does? (compare to Java code)
}
}
for (let winning_pattern in WINNING_PATTERNS) {
if ((pattern & winning_pattern) === winning_pattern) { return true; }
}
return false;
}
moveAI(); // usually called after the human player sets their X