-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathComputer.java
208 lines (200 loc) · 7.91 KB
/
Computer.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
import java.util.List;
/**
* @author Tony
*
* This class is the artifical-intelligence that will be
* playing against the human. Since it is random who goes
* first, the AI is not always going to be alpha/max or the
* beta/min player and same goes with the human.
*/
public class Computer {
public int nodesExpanded; //Number of nodes expanded.
private GameBoard gameBoard; //the state of the current gameboard.
public Computer() {
//Default constructor
}
public GameBoard getBoard() {
return gameBoard;
}
/**
* The alpha-beta-pruning algorithm that the AI will follow
* to play the best move possible against the human. The
* strength of the AI depends on the depth which is the
* number of moves that the computer can foresee.
*
* @param depth number of moves that the computer can foresee.
* @param gameBoard the future gameboard states.
* @param alphaPlayer whether the AI is an alpha player.
* @param alpha the heuristic score of alpha player.
* @param beta the heuristic score of beta player.
* @return returns the coordinates of intelligent move.
*/
public int[] alphaBetaPrun(int depth, GameBoard gameBoard,
boolean alphaPlayer, int alpha, int beta) {
int heuristicValue = 0;
int bestRow = -1;
int bestCol = -1;
int bestBlock = -1;
int bestDir = -1;
int row, col, block, dir;
int compCount = 1;
int humCount = 1;
List<int[]> nextMoves = gameBoard.getChildren(alphaPlayer);
if(depth == 0 || nextMoves.isEmpty()) {
nodesExpanded++;
heuristicValue = gameBoard.getHeuristicValue(alphaPlayer);
return new int[] {heuristicValue, bestRow, bestCol};
} else {
for (int[] move : nextMoves) {
row = move[0];
col = move[1];
block = move[2];
dir = move[3];
if (alphaPlayer) { // max player (alpha)
// System.out.print("Human(MAX) Move: "+"("+row+","+col+")"+block+"/"+dir+" " + humCount++ + "/" + nextMoves.size() + " | ");
gameBoard.addToBoard(row, col, gameBoard.player1color);
gameBoard.rotateBoard(block, dir);
heuristicValue = alphaBetaPrun(depth - 1, gameBoard, false, alpha, beta)[0];
nodesExpanded++;
// System.out.println("Depth: "+ depth+ " Heuristic Value: " + heuristicValue + " ");
if (heuristicValue > alpha) {
alpha = heuristicValue;
bestRow = row;
bestCol = col;
bestBlock = block;
bestDir = dir;
}
//undo move
if (dir == 0) {
gameBoard.rotateBoard(block, 1);
gameBoard.addToBoard(row, col, '\u0000');
} else {
gameBoard.rotateBoard(block, 0);
gameBoard.addToBoard(row, col, '\u0000');
}
gameBoard.clearHeuristic();
} else { // min player (beta)
// System.out.println("Computer(MIN) Move: "+"("+row+","+col+")"+block+"/"+dir+" " + humCount++ + "/" + nextMoves.size() + " | ");
gameBoard.addToBoard(row, col, gameBoard.player2color);
gameBoard.rotateBoard(block, dir);
heuristicValue = alphaBetaPrun(depth - 1, gameBoard, true, alpha, beta)[0];
nodesExpanded++;
// System.out.println("Depth: "+ depth+ " Heuristic Value: " + heuristicValue + " ");
if (heuristicValue < beta) {
beta = heuristicValue;
bestRow = row;
bestCol = col;
bestBlock = block;
bestDir = dir;
}
//undo move
if (dir == 0) {
gameBoard.rotateBoard(block, 1);
gameBoard.addToBoard(row, col, '\u0000');
} else {
gameBoard.rotateBoard(block, 0);
gameBoard.addToBoard(row, col, '\u0000');
}
gameBoard.clearHeuristic();
}
//prune off branch
if (alpha >= beta) {
break;
}
}
if (alphaPlayer) {
return new int[]{alpha, bestRow, bestCol, bestBlock, bestDir};
} else {
return new int[]{beta, bestRow, bestCol, bestBlock, bestDir};
}
}
}
/**
* The min-max algorithm that the AI will follow to find the
* best possible move against the human. The strength of the
* AI move will depend on the depth.
*
* @param depth number of moves that the computer can foresee.
* @param gameBoard future states of the gameboard.
* @param alphaPlayer whether the AI is alpha or beta player.
* @return returns the coordinates of intelligent move.
*/
public int[] minMax(int depth, GameBoard gameBoard, boolean alphaPlayer) {
int heuristicValue = 0;
int bestValue = 0;
int bestRow = -1;
int bestCol = -1;
int bestBlock = -1;
int bestDir = -1;
int row, col, block, dir;
int compCount = 1;
int humCount = 1;
List<int[]> nextMoves = gameBoard.getChildren(alphaPlayer);
if(depth == 0 || nextMoves.isEmpty()) {
nodesExpanded++;
bestValue = gameBoard.getHeuristicValue(alphaPlayer);
return new int[] {bestValue, bestRow, bestCol};
} else {
for (int[] move : nextMoves) {
row = move[0];
col = move[1];
block = move[2];
dir = move[3];
if (alphaPlayer) { // max player
bestValue = Integer.MIN_VALUE;
// System.out.print("Human(MAX) Move: "+"("+row+","+col+")"+block+"/"+dir+" " + humCount++ + "/" + nextMoves.size() + " | ");
gameBoard.addToBoard(row, col, gameBoard.player1color);
gameBoard.rotateBoard(block, dir);
heuristicValue = minMax(depth - 1, gameBoard, false)[0];
nodesExpanded++;
// System.out.println("Depth: "+ depth+ " Heuristic Value: " + heuristicValue + " ");
if (heuristicValue > bestValue) {
bestValue = heuristicValue;
bestRow = row;
bestCol = col;
bestBlock = block;
bestDir = dir;
}
//undo move
if (dir == 0) {
gameBoard.rotateBoard(block, 1);
gameBoard.addToBoard(row, col, '\u0000');
} else {
gameBoard.rotateBoard(block, 0);
gameBoard.addToBoard(row, col, '\u0000');
}
gameBoard.clearHeuristic();
} else { // min player
bestValue = Integer.MAX_VALUE;
// System.out.println("Computer(MIN) Move: "+"("+row+","+col+")"+block+"/"+dir+" " + humCount++ + "/" + nextMoves.size() + " | ");
gameBoard.addToBoard(row, col, gameBoard.player2color);
gameBoard.rotateBoard(block, dir);
heuristicValue = minMax(depth - 1, gameBoard, true)[0];
nodesExpanded++;
// System.out.println("Depth: "+ depth+ " Heuristic Value: " + heuristicValue + " ");
if (heuristicValue < bestValue) {
bestValue = heuristicValue;
bestRow = row;
bestCol = col;
bestBlock = block;
bestDir = dir;
}
//undo move
if (dir == 0) {
gameBoard.rotateBoard(block, 1);
gameBoard.addToBoard(row, col, '\u0000');
} else {
gameBoard.rotateBoard(block, 0);
gameBoard.addToBoard(row, col, '\u0000');
}
gameBoard.clearHeuristic();
}
}
if (alphaPlayer) {
return new int[]{bestValue, bestRow, bestCol, bestBlock, bestDir};
} else {
return new int[]{bestValue, bestRow, bestCol, bestBlock, bestDir};
}
}
}
}