15 #include "tactics/util.h"
17 #include "uct/internal.h"
18 #include "uct/prior.h"
20 #include "uct/slave.h"
23 /* Allocate tree node(s). The returned nodes are initialized with zeroes.
24 * Returns NULL if not enough memory.
25 * This function may be called by multiple threads in parallel. */
26 static struct tree_node
*
27 tree_alloc_node(struct tree
*t
, int count
, bool fast_alloc
)
29 struct tree_node
*n
= NULL
;
30 size_t nsize
= count
* sizeof(*n
);
31 unsigned long old_size
= __sync_fetch_and_add(&t
->nodes_size
, nsize
);
34 if (old_size
+ nsize
> t
->max_tree_size
)
36 assert(t
->nodes
!= NULL
);
37 n
= (struct tree_node
*)(t
->nodes
+ old_size
);
40 n
= calloc2(count
, sizeof(*n
));
45 /* Initialize a node at a given place in memory.
46 * This function may be called by multiple threads in parallel. */
48 tree_setup_node(struct tree
*t
, struct tree_node
*n
, coord_t coord
, int depth
)
50 static volatile unsigned int hash
= 0;
53 /* n->hash is used only for debugging. It is very likely (but not
54 * guaranteed) to be unique. */
55 hash_t h
= n
- (struct tree_node
*)0;
56 n
->hash
= (h
<< 32) + (hash
++ & 0xffffffff);
57 if (depth
> t
->max_depth
)
61 /* Allocate and initialize a node. Returns NULL (fast_alloc mode)
62 * or exits the main program if not enough memory.
63 * This function may be called by multiple threads in parallel. */
64 static struct tree_node
*
65 tree_init_node(struct tree
*t
, coord_t coord
, int depth
, bool fast_alloc
)
68 n
= tree_alloc_node(t
, 1, fast_alloc
);
70 tree_setup_node(t
, n
, coord
, depth
);
74 /* Create a tree structure. Pre-allocate all nodes if max_tree_size is > 0. */
76 tree_init(struct board
*board
, enum stone color
, unsigned long max_tree_size
,
77 unsigned long max_pruned_size
, unsigned long pruning_threshold
, floating_t ltree_aging
, int hbits
)
79 struct tree
*t
= calloc2(1, sizeof(*t
));
81 t
->max_tree_size
= max_tree_size
;
82 t
->max_pruned_size
= max_pruned_size
;
83 t
->pruning_threshold
= pruning_threshold
;
84 if (max_tree_size
!= 0) {
85 t
->nodes
= malloc2(max_tree_size
);
86 /* The nodes buffer doesn't need initialization. This is currently
87 * done by tree_init_node to spread the load. Doing a memset for the
88 * entire buffer here would be too slow for large trees (>10 GB). */
90 /* The root PASS move is only virtual, we never play it. */
91 t
->root
= tree_init_node(t
, pass
, 0, t
->nodes
);
92 t
->root_symmetry
= board
->symmetry
;
93 t
->root_color
= stone_other(color
); // to research black moves, root will be white
95 t
->ltree_black
= tree_init_node(t
, pass
, 0, false);
96 t
->ltree_white
= tree_init_node(t
, pass
, 0, false);
97 t
->ltree_aging
= ltree_aging
;
100 if (hbits
) t
->htable
= uct_htable_alloc(hbits
);
105 /* This function may be called by multiple threads in parallel on the
106 * same tree, but not on node n. n may be detached from the tree but
107 * must have been created in this tree originally.
108 * It returns the remaining size of the tree after n has been freed. */
110 tree_done_node(struct tree
*t
, struct tree_node
*n
)
112 struct tree_node
*ni
= n
->children
;
114 struct tree_node
*nj
= ni
->sibling
;
115 tree_done_node(t
, ni
);
119 unsigned long old_size
= __sync_fetch_and_sub(&t
->nodes_size
, sizeof(*n
));
120 return old_size
- sizeof(*n
);
128 /* Worker thread for tree_done_node_detached(). Only for fast_alloc=false. */
130 tree_done_node_worker(void *ctx_
)
132 struct subtree_ctx
*ctx
= ctx_
;
133 char *str
= coord2str(node_coord(ctx
->n
), ctx
->t
->board
);
135 unsigned long tree_size
= tree_done_node(ctx
->t
, ctx
->n
);
139 fprintf(stderr
, "done freeing node at %s, tree size %lu\n", str
, tree_size
);
145 /* Asynchronously free the subtree of nodes rooted at n. If the tree becomes
146 * empty free the tree also. Only for fast_alloc=false. */
148 tree_done_node_detached(struct tree
*t
, struct tree_node
*n
)
150 if (n
->u
.playouts
< 1000) { // no thread for small tree
151 if (!tree_done_node(t
, n
))
156 pthread_attr_init(&attr
);
157 pthread_attr_setdetachstate(&attr
, PTHREAD_CREATE_DETACHED
);
160 struct subtree_ctx
*ctx
= malloc2(sizeof(struct subtree_ctx
));
163 pthread_create(&thread
, &attr
, tree_done_node_worker
, ctx
);
164 pthread_attr_destroy(&attr
);
168 tree_done(struct tree
*t
)
170 tree_done_node(t
, t
->ltree_black
);
171 tree_done_node(t
, t
->ltree_white
);
173 if (t
->htable
) free(t
->htable
);
177 } else if (!tree_done_node(t
, t
->root
)) {
179 /* A tree_done_node_worker might still be running on this tree but
180 * it will free the tree later. It is also freeing nodes faster than
181 * we will create new ones. */
187 tree_node_dump(struct tree
*tree
, struct tree_node
*node
, int treeparity
, int l
, int thres
)
189 for (int i
= 0; i
< l
; i
++) fputc(' ', stderr
);
191 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
193 /* We use 1 as parity, since for all nodes we want to know the
194 * win probability of _us_, not the node color. */
195 fprintf(stderr
, "[%s] %.3f/%d [prior %.3f/%d amaf %.3f/%d crit %.3f vloss %d] h=%x c#=%d <%"PRIhash
">\n",
196 coord2sstr(node_coord(node
), tree
->board
),
197 tree_node_get_value(tree
, treeparity
, node
->u
.value
), node
->u
.playouts
,
198 tree_node_get_value(tree
, treeparity
, node
->prior
.value
), node
->prior
.playouts
,
199 tree_node_get_value(tree
, treeparity
, node
->amaf
.value
), node
->amaf
.playouts
,
200 tree_node_criticality(tree
, node
), node
->descents
,
201 node
->hints
, children
, node
->hash
);
203 /* Print nodes sorted by #playouts. */
205 struct tree_node
*nbox
[1000]; int nboxl
= 0;
206 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
207 if (ni
->u
.playouts
> thres
)
212 for (int i
= 0; i
< nboxl
; i
++)
213 if (nbox
[i
] && (best
< 0 || nbox
[i
]->u
.playouts
> nbox
[best
]->u
.playouts
))
217 tree_node_dump(tree
, nbox
[best
], treeparity
, l
+ 1, /* node->u.value < 0.1 ? 0 : */ thres
);
223 tree_dump(struct tree
*tree
, double thres
)
225 int thres_abs
= thres
> 0 ? tree
->root
->u
.playouts
* thres
: thres
;
226 fprintf(stderr
, "(UCT tree; root %s; extra komi %f; max depth %d)\n",
227 stone2str(tree
->root_color
), tree
->extra_komi
,
228 tree
->max_depth
- tree
->root
->depth
);
229 tree_node_dump(tree
, tree
->root
, 1, 0, thres_abs
);
231 if (DEBUGL(3) && tree
->ltree_black
) {
232 fprintf(stderr
, "B local tree:\n");
233 tree_node_dump(tree
, tree
->ltree_black
, tree
->root_color
== S_WHITE
? 1 : -1, 0, thres_abs
);
234 fprintf(stderr
, "W local tree:\n");
235 tree_node_dump(tree
, tree
->ltree_white
, tree
->root_color
== S_BLACK
? 1 : -1, 0, thres_abs
);
241 tree_book_name(struct board
*b
)
243 static char buf
[256];
244 if (b
->handicap
> 0) {
245 sprintf(buf
, "ucttbook-%d-%02.01f-h%d.pachitree", b
->size
- 2, b
->komi
, b
->handicap
);
247 sprintf(buf
, "ucttbook-%d-%02.01f.pachitree", b
->size
- 2, b
->komi
);
253 tree_node_save(FILE *f
, struct tree_node
*node
, int thres
)
255 bool save_children
= node
->u
.playouts
>= thres
;
258 node
->is_expanded
= 0;
261 fwrite(((void *) node
) + offsetof(struct tree_node
, u
),
262 sizeof(struct tree_node
) - offsetof(struct tree_node
, u
),
266 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
267 tree_node_save(f
, ni
, thres
);
270 node
->is_expanded
= 1;
277 tree_save(struct tree
*tree
, struct board
*b
, int thres
)
279 char *filename
= tree_book_name(b
);
280 FILE *f
= fopen(filename
, "wb");
285 tree_node_save(f
, tree
->root
, thres
);
292 tree_node_load(FILE *f
, struct tree_node
*node
, int *num
)
296 fread(((void *) node
) + offsetof(struct tree_node
, u
),
297 sizeof(struct tree_node
) - offsetof(struct tree_node
, u
),
300 /* Keep values in sane scale, otherwise we start overflowing. */
301 #define MAX_PLAYOUTS 10000000
302 if (node
->u
.playouts
> MAX_PLAYOUTS
) {
303 node
->u
.playouts
= MAX_PLAYOUTS
;
305 if (node
->amaf
.playouts
> MAX_PLAYOUTS
) {
306 node
->amaf
.playouts
= MAX_PLAYOUTS
;
308 memcpy(&node
->pu
, &node
->u
, sizeof(node
->u
));
310 struct tree_node
*ni
= NULL
, *ni_prev
= NULL
;
312 ni_prev
= ni
; ni
= calloc2(1, sizeof(*ni
));
316 ni_prev
->sibling
= ni
;
318 tree_node_load(f
, ni
, num
);
323 tree_load(struct tree
*tree
, struct board
*b
)
325 char *filename
= tree_book_name(b
);
326 FILE *f
= fopen(filename
, "rb");
330 fprintf(stderr
, "Loading opening tbook %s...\n", filename
);
334 tree_node_load(f
, tree
->root
, &num
);
335 fprintf(stderr
, "Loaded %d nodes.\n", num
);
341 /* Copy the subtree rooted at node: all nodes at or below depth
342 * or with at least threshold playouts. Only for fast_alloc.
343 * The code is destructive on src. The relative order of children of
344 * a given node is preserved (assumed by tree_get_node in particular).
345 * Returns the copy of node in the destination tree, or NULL
346 * if we could not copy it. */
347 static struct tree_node
*
348 tree_prune(struct tree
*dest
, struct tree
*src
, struct tree_node
*node
,
349 int threshold
, int depth
)
351 assert(dest
->nodes
&& node
);
352 struct tree_node
*n2
= tree_alloc_node(dest
, 1, true);
356 if (n2
->depth
> dest
->max_depth
)
357 dest
->max_depth
= n2
->depth
;
359 n2
->is_expanded
= false;
361 if (node
->depth
>= depth
&& node
->u
.playouts
< threshold
)
363 /* For deep nodes with many playouts, we must copy all children,
364 * even those with zero playouts, because partially expanded
365 * nodes are not supported. Considering them as fully expanded
366 * would degrade the playing strength. The only exception is
367 * when dest becomes full, but this should never happen in practice
368 * if threshold is chosen to limit the number of nodes traversed. */
369 struct tree_node
*ni
= node
->children
;
372 struct tree_node
**prev2
= &(n2
->children
);
374 struct tree_node
*ni2
= tree_prune(dest
, src
, ni
, threshold
, depth
);
377 prev2
= &(ni2
->sibling
);
382 n2
->is_expanded
= true;
384 n2
->children
= NULL
; // avoid partially expanded nodes
389 /* The following constants are used for garbage collection of nodes.
390 * A tree is considered large if the top node has >= 40K playouts.
391 * For such trees, we copy deep nodes only if they have enough
392 * playouts, with a gradually increasing threshold up to 40.
393 * These constants define how much time we're willing to spend
394 * scanning the source tree when promoting a move. The chosen values
395 * make worst case pruning in about 3s for 20 GB ram, and this
396 * is only for long thinking time (>1M playouts). For fast games the
397 * trees don't grow large. For small ram or fast game we copy the
398 * entire tree. These values do not degrade playing strength and are
399 * necessary to avoid losing on time; increasing DEEP_PLAYOUTS_THRESHOLD
400 * or decreasing LARGE_TREE_PLAYOUTS will make the program faster but
402 #define LARGE_TREE_PLAYOUTS 40000LL
403 #define DEEP_PLAYOUTS_THRESHOLD 40
405 /* Garbage collect the tree early if the top node has < 5K playouts,
406 * to avoid having to do it later on a large subtree.
407 * This guarantees garbage collection in < 1s. */
408 #define SMALL_TREE_PLAYOUTS 5000
410 /* Free all the tree, keeping only the subtree rooted at node.
411 * Prune the subtree if necessary to fit in memory or
412 * to save time scanning the tree.
413 * Returns the moved node. Only for fast_alloc. */
415 tree_garbage_collect(struct tree
*tree
, struct tree_node
*node
)
417 assert(tree
->nodes
&& !node
->parent
&& !node
->sibling
);
418 double start_time
= time_now();
419 unsigned long orig_size
= tree
->nodes_size
;
421 struct tree
*temp_tree
= tree_init(tree
->board
, tree
->root_color
,
422 tree
->max_pruned_size
, 0, 0, tree
->ltree_aging
, 0);
423 temp_tree
->nodes_size
= 0; // We do not want the dummy pass node
424 struct tree_node
*temp_node
;
426 /* Find the maximum depth at which we can copy all nodes. */
428 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
430 unsigned long nodes_size
= max_nodes
* sizeof(*node
);
431 int max_depth
= node
->depth
;
432 while (nodes_size
< tree
->max_pruned_size
&& max_nodes
> 1) {
434 nodes_size
+= max_nodes
* nodes_size
;
438 /* Copy all nodes for small trees. For large trees, copy all nodes
439 * with depth <= max_depth, and all nodes with enough playouts.
440 * Avoiding going too deep (except for nodes with many playouts) is mostly
441 * to save time scanning the source tree. It can take over 20s to traverse
442 * completely a large source tree (20 GB) even without copying because
443 * the traversal is not friendly at all with the memory cache. */
444 int threshold
= (node
->u
.playouts
- LARGE_TREE_PLAYOUTS
) * DEEP_PLAYOUTS_THRESHOLD
/ LARGE_TREE_PLAYOUTS
;
445 if (threshold
< 0) threshold
= 0;
446 if (threshold
> DEEP_PLAYOUTS_THRESHOLD
) threshold
= DEEP_PLAYOUTS_THRESHOLD
;
447 temp_node
= tree_prune(temp_tree
, tree
, node
, threshold
, max_depth
);
450 /* Now copy back to original tree. */
451 tree
->nodes_size
= 0;
453 struct tree_node
*new_node
= tree_prune(tree
, temp_tree
, temp_node
, 0, temp_tree
->max_depth
);
456 double now
= time_now();
457 static double prev_time
;
458 if (!prev_time
) prev_time
= start_time
;
460 "tree pruned in %0.6g s, prev %0.3g s ago, dest depth %d wanted %d,"
461 " size %lu->%lu/%lu, playouts %d\n",
462 now
- start_time
, start_time
- prev_time
, temp_tree
->max_depth
, max_depth
,
463 orig_size
, temp_tree
->nodes_size
, tree
->max_pruned_size
, new_node
->u
.playouts
);
464 prev_time
= start_time
;
466 if (temp_tree
->nodes_size
>= temp_tree
->max_tree_size
) {
467 fprintf(stderr
, "temp tree overflow, max_tree_size %lu, pruning_threshold %lu\n",
468 tree
->max_tree_size
, tree
->pruning_threshold
);
469 /* This is not a serious problem, we will simply recompute the discarded nodes
470 * at the next move if necessary. This is better than frequently wasting memory. */
472 assert(tree
->nodes_size
== temp_tree
->nodes_size
);
473 assert(tree
->max_depth
== temp_tree
->max_depth
);
475 tree_done(temp_tree
);
480 /* Get a node of given coordinate from within parent, possibly creating it
481 * if necessary - in a very raw form (no .d, priors, ...). */
482 /* FIXME: Adjust for board symmetry. */
484 tree_get_node(struct tree
*t
, struct tree_node
*parent
, coord_t c
, bool create
)
486 if (!parent
->children
|| node_coord(parent
->children
) >= c
) {
487 /* Special case: Insertion at the beginning. */
488 if (parent
->children
&& node_coord(parent
->children
) == c
)
489 return parent
->children
;
493 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
494 nn
->parent
= parent
; nn
->sibling
= parent
->children
;
495 parent
->children
= nn
;
499 /* No candidate at the beginning, look through all the children. */
501 struct tree_node
*ni
;
502 for (ni
= parent
->children
; ni
->sibling
; ni
= ni
->sibling
)
503 if (node_coord(ni
->sibling
) >= c
)
506 if (ni
->sibling
&& node_coord(ni
->sibling
) == c
)
508 assert(node_coord(ni
) < c
);
512 struct tree_node
*nn
= tree_init_node(t
, c
, parent
->depth
+ 1, false);
513 nn
->parent
= parent
; nn
->sibling
= ni
->sibling
; ni
->sibling
= nn
;
517 /* Get local tree node corresponding to given node, given local node child
518 * iterator @lni (which points either at the corresponding node, or at the
519 * nearest local tree node after @ni). */
521 tree_lnode_for_node(struct tree
*tree
, struct tree_node
*ni
, struct tree_node
*lni
, int tenuki_d
)
523 /* Now set up lnode, which is the actual local node
524 * corresponding to ni - either lni if it is an
525 * exact match and ni is not tenuki, <pass> local
526 * node if ni is tenuki, or NULL if there is no
527 * corresponding node available. */
529 if (is_pass(node_coord(ni
))) {
530 /* Also, for sanity reasons we never use local
531 * tree for passes. (Maybe we could, but it's
532 * too hard to think about.) */
536 if (node_coord(lni
) == node_coord(ni
)) {
537 /* We don't consider tenuki a sequence play
538 * that we have in local tree even though
539 * ni->d is too high; this can happen if this
540 * occured in different board topology. */
544 if (ni
->d
>= tenuki_d
) {
545 /* Tenuki, pick a pass lsibling if available. */
546 assert(lni
->parent
&& lni
->parent
->children
);
547 if (is_pass(node_coord(lni
->parent
->children
))) {
548 return lni
->parent
->children
;
554 /* No corresponding local node, lnode stays NULL. */
559 /* Tree symmetry: When possible, we will localize the tree to a single part
560 * of the board in tree_expand_node() and possibly flip along symmetry axes
561 * to another part of the board in tree_promote_at(). We follow b->symmetry
562 * guidelines here. */
565 /* This function must be thread safe, given that board b is only modified by the calling thread. */
567 tree_expand_node(struct tree
*t
, struct tree_node
*node
, struct board
*b
, enum stone color
, struct uct
*u
, int parity
)
569 /* Get a Common Fate Graph distance map from parent node. */
570 int distances
[board_size2(b
)];
571 if (!is_pass(b
->last_move
.coord
) && !is_resign(b
->last_move
.coord
)) {
572 cfg_distances(b
, node_coord(node
), distances
, TREE_NODE_D_MAX
);
574 // Pass or resign - everything is too far.
575 foreach_point(b
) { distances
[c
] = TREE_NODE_D_MAX
+ 1; } foreach_point_end
;
578 /* Get a map of prior values to initialize the new nodes with. */
579 struct prior_map map
= {
582 .parity
= tree_parity(t
, parity
),
583 .distances
= distances
,
585 // Include pass in the prior map.
586 struct move_stats map_prior
[board_size2(b
) + 1]; map
.prior
= &map_prior
[1];
587 bool map_consider
[board_size2(b
) + 1]; map
.consider
= &map_consider
[1];
588 memset(map_prior
, 0, sizeof(map_prior
));
589 memset(map_consider
, 0, sizeof(map_consider
));
590 map
.consider
[pass
] = true;
591 int child_count
= 1; // for pass
592 foreach_free_point(b
) {
593 assert(board_at(b
, c
) == S_NONE
);
594 if (!board_is_valid_play_no_suicide(b
, color
, c
))
596 map
.consider
[c
] = true;
598 } foreach_free_point_end
;
599 uct_prior(u
, node
, &map
);
601 /* Now, create the nodes (all at once if fast_alloc) */
602 struct tree_node
*ni
= t
->nodes
? tree_alloc_node(t
, child_count
, true) : tree_alloc_node(t
, 1, false);
603 /* In fast_alloc mode we might temporarily run out of nodes but this should be rare. */
605 node
->is_expanded
= false;
608 tree_setup_node(t
, ni
, pass
, node
->depth
+ 1);
610 struct tree_node
*first_child
= ni
;
612 ni
->prior
= map
.prior
[pass
]; ni
->d
= TREE_NODE_D_MAX
+ 1;
614 /* The loop considers only the symmetry playground. */
616 fprintf(stderr
, "expanding %s within [%d,%d],[%d,%d] %d-%d\n",
617 coord2sstr(node_coord(node
), b
),
618 b
->symmetry
.x1
, b
->symmetry
.y1
,
619 b
->symmetry
.x2
, b
->symmetry
.y2
,
620 b
->symmetry
.type
, b
->symmetry
.d
);
623 for (int j
= b
->symmetry
.y1
; j
<= b
->symmetry
.y2
; j
++) {
624 for (int i
= b
->symmetry
.x1
; i
<= b
->symmetry
.x2
; i
++) {
626 int x
= b
->symmetry
.type
== SYM_DIAG_DOWN
? board_size(b
) - 1 - i
: i
;
629 fprintf(stderr
, "drop %d,%d\n", i
, j
);
634 coord_t c
= coord_xy(t
->board
, i
, j
);
635 if (!map
.consider
[c
]) // Filter out invalid moves
637 assert(c
!= node_coord(node
)); // I have spotted "C3 C3" in some sequence...
639 struct tree_node
*nj
= t
->nodes
? first_child
+ child
++ : tree_alloc_node(t
, 1, false);
640 tree_setup_node(t
, nj
, c
, node
->depth
+ 1);
641 nj
->parent
= node
; ni
->sibling
= nj
; ni
= nj
;
643 ni
->prior
= map
.prior
[c
];
644 ni
->d
= distances
[c
];
647 node
->children
= first_child
; // must be done at the end to avoid race
652 flip_coord(struct board
*b
, coord_t c
,
653 bool flip_horiz
, bool flip_vert
, int flip_diag
)
655 int x
= coord_x(c
, b
), y
= coord_y(c
, b
);
657 int z
= x
; x
= y
; y
= z
;
660 x
= board_size(b
) - 1 - x
;
663 y
= board_size(b
) - 1 - y
;
665 return coord_xy(b
, x
, y
);
669 tree_fix_node_symmetry(struct board
*b
, struct tree_node
*node
,
670 bool flip_horiz
, bool flip_vert
, int flip_diag
)
672 if (!is_pass(node_coord(node
)))
673 node
->coord
= flip_coord(b
, node_coord(node
), flip_horiz
, flip_vert
, flip_diag
);
675 for (struct tree_node
*ni
= node
->children
; ni
; ni
= ni
->sibling
)
676 tree_fix_node_symmetry(b
, ni
, flip_horiz
, flip_vert
, flip_diag
);
680 tree_fix_symmetry(struct tree
*tree
, struct board
*b
, coord_t c
)
685 struct board_symmetry
*s
= &tree
->root_symmetry
;
686 int cx
= coord_x(c
, b
), cy
= coord_y(c
, b
);
688 /* playground X->h->v->d normalization
694 bool flip_horiz
= cx
< s
->x1
|| cx
> s
->x2
;
695 bool flip_vert
= cy
< s
->y1
|| cy
> s
->y2
;
699 bool dir
= (s
->type
== SYM_DIAG_DOWN
);
700 int x
= dir
^ flip_horiz
^ flip_vert
? board_size(b
) - 1 - cx
: cx
;
701 if (flip_vert
? x
< cy
: x
> cy
) {
707 fprintf(stderr
, "%s [%d,%d -> %d,%d;%d,%d] will flip %d %d %d -> %s, sym %d (%d) -> %d (%d)\n",
709 cx
, cy
, s
->x1
, s
->y1
, s
->x2
, s
->y2
,
710 flip_horiz
, flip_vert
, flip_diag
,
711 coord2sstr(flip_coord(b
, c
, flip_horiz
, flip_vert
, flip_diag
), b
),
712 s
->type
, s
->d
, b
->symmetry
.type
, b
->symmetry
.d
);
714 if (flip_horiz
|| flip_vert
|| flip_diag
)
715 tree_fix_node_symmetry(b
, tree
->root
, flip_horiz
, flip_vert
, flip_diag
);
720 tree_unlink_node(struct tree_node
*node
)
722 struct tree_node
*ni
= node
->parent
;
723 if (ni
->children
== node
) {
724 ni
->children
= node
->sibling
;
727 while (ni
->sibling
!= node
)
729 ni
->sibling
= node
->sibling
;
731 node
->sibling
= NULL
;
735 /* Reduce weight of statistics on promotion. Remove nodes that
736 * get reduced to zero playouts; returns next node to consider
737 * in the children list (@node may get deleted). */
738 static struct tree_node
*
739 tree_age_node(struct tree
*tree
, struct tree_node
*node
)
741 node
->u
.playouts
/= tree
->ltree_aging
;
742 if (node
->parent
&& !node
->u
.playouts
) {
743 struct tree_node
*sibling
= node
->sibling
;
744 /* Delete node, no playouts. */
745 tree_unlink_node(node
);
746 tree_done_node(tree
, node
);
750 struct tree_node
*ni
= node
->children
;
751 while (ni
) ni
= tree_age_node(tree
, ni
);
752 return node
->sibling
;
755 /* Promotes the given node as the root of the tree. In the fast_alloc
756 * mode, the node may be moved and some of its subtree may be pruned. */
758 tree_promote_node(struct tree
*tree
, struct tree_node
**node
)
760 assert((*node
)->parent
== tree
->root
);
761 tree_unlink_node(*node
);
763 /* Freeing the rest of the tree can take several seconds on large
764 * trees, so we must do it asynchronously: */
765 tree_done_node_detached(tree
, tree
->root
);
767 /* Garbage collect if we run out of memory, or it is cheap to do so now: */
768 if (tree
->nodes_size
>= tree
->pruning_threshold
769 || (tree
->nodes_size
>= tree
->max_tree_size
/ 10 && (*node
)->u
.playouts
< SMALL_TREE_PLAYOUTS
))
770 *node
= tree_garbage_collect(tree
, *node
);
773 tree
->root_color
= stone_other(tree
->root_color
);
775 board_symmetry_update(tree
->board
, &tree
->root_symmetry
, node_coord(*node
));
776 tree
->avg_score
.playouts
= 0;
778 /* If the tree deepest node was under node, or if we called tree_garbage_collect,
779 * tree->max_depth is correct. Otherwise we could traverse the tree
780 * to recompute max_depth but it's not worth it: it's just for debugging
781 * and soon the tree will grow and max_depth will become correct again. */
783 if (tree
->ltree_aging
!= 1.0f
) { // XXX: != should work here even with the floating_t
784 tree_age_node(tree
, tree
->ltree_black
);
785 tree_age_node(tree
, tree
->ltree_white
);
790 tree_promote_at(struct tree
*tree
, struct board
*b
, coord_t c
)
792 tree_fix_symmetry(tree
, b
, c
);
794 for (struct tree_node
*ni
= tree
->root
->children
; ni
; ni
= ni
->sibling
) {
795 if (node_coord(ni
) == c
) {
796 tree_promote_node(tree
, &ni
);