-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathengine-tt.h
More file actions
142 lines (124 loc) · 3.53 KB
/
engine-tt.h
File metadata and controls
142 lines (124 loc) · 3.53 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
#pragma once
#include "engine-types.h"
struct search_option {
uint64_t hash;
Score16 score;
struct move move;
int depth : 4;
int init : 1;
enum tt_flag {TT_EXACT, TT_LOWER, TT_UPPER} flag : 3;
};
#define TT_ADDRESS_BITS 24
#define TT_ENTRIES (1ULL<<TT_ADDRESS_BITS)
#define TT_MASK (TT_ENTRIES-1)
struct tt {
struct search_option* entries/*[TT_ENTRIES]*/; /* must be initialized somewhere */
size_t mask;
/* stats */
uint64_t collisions;
uint64_t hits;
uint64_t probes;
uint64_t overwritten;
uint64_t insertions;
};
struct zobrist {
uint64_t piece_keys[SQ_COUNT][SIDE_COUNT][PIECE_COUNT];
uint64_t ep_targets[SQ_COUNT];
uint64_t castling_keys[SIDE_COUNT][CASTLE_COUNT];
bool init;
};
static struct zobrist zobrist;
static void init_zobrist()
{
for (Sq8 sq = SQ_BEGIN; sq < SQ_COUNT; ++sq) {
for (Side8 pl = SIDE_BEGIN; pl < SIDE_COUNT; ++pl) {
for (Piece8 piece = PIECE_BEGIN; piece < PIECE_COUNT; ++piece) {
zobrist.piece_keys[sq][pl][piece] = my_rand64();
}
}
zobrist.ep_targets[sq] = my_rand64();
}
for (Side8 pl = SIDE_BEGIN; pl < SIDE_COUNT; ++pl) {
for (enum castle_direction d = CASTLE_BEGIN; d < CASTLE_COUNT; ++d) {
zobrist.castling_keys[pl][d] = my_rand64();
}
}
zobrist.init = true;
}
static inline uint64_t tt_hash_update(uint64_t hash,
Sq8 sq,
Side8 us,
Piece8 piece)
{
if (!zobrist.init) {
init_zobrist();
}
return hash ^ zobrist.piece_keys[sq][us][piece];
}
static inline uint64_t tt_hash_update_castling_rights(uint64_t hash,
Side8 us,
enum castle_direction dir)
{
if (!zobrist.init) {
init_zobrist();
}
return hash ^ zobrist.castling_keys[us][dir];
}
static inline uint64_t tt_hash_update_ep_targets(uint64_t hash, Sq8 sq)
{
if (!zobrist.init) {
init_zobrist();
}
assuming(sq < SQ_COUNT);
return hash ^ zobrist.ep_targets[sq];
}
static inline uint64_t tt_hash_switch_side(uint64_t hash)
{
if (!zobrist.init) {
init_zobrist();
}
return ~hash;
}
static inline struct search_option tt_get(struct tt* tt, uint64_t hash)
{
uint64_t const addr = hash & tt->mask;
struct search_option tte = tt->entries[addr];
#ifndef NSTATS
tt->probes += 1;
if (tte.init && tte.hash == hash) {
tt->hits += 1;
} else if (tte.init && tte.hash != hash) {
tt->collisions += 1;
}
#endif
return tte;
}
static inline void tt_insert(struct tt* tt, uint64_t hash, struct search_option so)
{
uint64_t const addr = hash & tt->mask;
so.init = true;
tt->entries[addr] = so;
#ifndef NSTATS
tt->insertions += 1;
#endif
}
/* tt_insert_maybe inserts only if heuristics say it's a good idea. There are
* two considerations:
* - higher depth saves more work per probe hit
* - entries closer to the leaves are more likely to be searched multiple time
*/
static inline void tt_insert_maybe(struct tt* tt, uint64_t hash, struct search_option so)
{
uint64_t const addr = hash & tt->mask;
#if 0
struct search_option* tte = &tt->entries[addr];
if (so.depth < tte->depth) {
*tte = so;
}
#endif
so.init = true;
tt->entries[addr] = so;
#ifndef NSTATS
tt->insertions += 1;
#endif
}