mirror of
https://github.com/sockspls/badfish
synced 2025-04-30 08:43:09 +00:00
Further tweak accurate pv
It is a non functional change, but because we now skip copying of pv[] in SpNode, patch has been tested for regression with 3 threads: STC LLR: 2.96 (-2.94,2.94) [-3.00,1.00] Total: 54668 W: 9873 L: 9809 D: 34986 No functional change.
This commit is contained in:
parent
0a1f54975f
commit
79232be02a
2 changed files with 16 additions and 18 deletions
|
@ -87,7 +87,7 @@ namespace {
|
|||
void id_loop(Position& pos);
|
||||
Value value_to_tt(Value v, int ply);
|
||||
Value value_from_tt(Value v, int ply);
|
||||
void update_pv(Move* pv, Move move, Move* child);
|
||||
void update_pv(Move* pv, Move move, Move* childPv);
|
||||
void update_stats(const Position& pos, Stack* ss, Move move, Depth depth, Move* quiets, int quietsCnt);
|
||||
string uci_pv(const Position& pos, Depth depth, Value alpha, Value beta);
|
||||
|
||||
|
@ -864,8 +864,9 @@ moves_loop: // When in check and at SpNode search starts from here
|
|||
// parent node fail low with value <= alpha and to try another move.
|
||||
if (PvNode && (moveCount == 1 || (value > alpha && (RootNode || value < beta))))
|
||||
{
|
||||
pv[0] = MOVE_NONE;
|
||||
(ss+1)->pv = pv;
|
||||
(ss+1)->pv[0] = MOVE_NONE;
|
||||
|
||||
value = newDepth < ONE_PLY ?
|
||||
givesCheck ? -qsearch<PV, true>(pos, ss+1, -beta, -alpha, DEPTH_ZERO)
|
||||
: -qsearch<PV, false>(pos, ss+1, -beta, -alpha, DEPTH_ZERO)
|
||||
|
@ -900,8 +901,11 @@ moves_loop: // When in check and at SpNode search starts from here
|
|||
{
|
||||
rm.score = value;
|
||||
rm.pv.resize(1);
|
||||
for (int i = 0; (ss+1)->pv && (ss+1)->pv[i] != MOVE_NONE; ++i)
|
||||
rm.pv.push_back((ss+1)->pv[i]);
|
||||
|
||||
assert((ss+1)->pv);
|
||||
|
||||
for (Move* m = (ss+1)->pv; *m != MOVE_NONE; ++m)
|
||||
rm.pv.push_back(*m);
|
||||
|
||||
// We record how often the best move has been changed in each
|
||||
// iteration. This information is used for time management: When
|
||||
|
@ -924,12 +928,8 @@ moves_loop: // When in check and at SpNode search starts from here
|
|||
{
|
||||
bestMove = SpNode ? splitPoint->bestMove = move : move;
|
||||
|
||||
if (PvNode && !RootNode)
|
||||
{
|
||||
update_pv(ss->pv, move, (ss+1)->pv);
|
||||
if (SpNode)
|
||||
update_pv(splitPoint->ss->pv, move, (ss+1)->pv);
|
||||
}
|
||||
if (PvNode && !RootNode) // Update pv even in fail-high case
|
||||
update_pv(SpNode ? splitPoint->ss->pv : ss->pv, move, (ss+1)->pv);
|
||||
|
||||
if (PvNode && value < beta) // Update alpha! Always alpha < beta
|
||||
alpha = SpNode ? splitPoint->alpha = value : value;
|
||||
|
@ -1179,7 +1179,7 @@ moves_loop: // When in check and at SpNode search starts from here
|
|||
|
||||
if (value > alpha)
|
||||
{
|
||||
if (PvNode)
|
||||
if (PvNode) // Update pv even in fail-high case
|
||||
update_pv(ss->pv, move, (ss+1)->pv);
|
||||
|
||||
if (PvNode && value < beta) // Update alpha here! Always alpha < beta
|
||||
|
@ -1238,12 +1238,12 @@ moves_loop: // When in check and at SpNode search starts from here
|
|||
}
|
||||
|
||||
|
||||
// update_pv() copies child node pv[] adding current move
|
||||
// update_pv() adds current move and appends child pv[]
|
||||
|
||||
void update_pv(Move* pv, Move move, Move* child) {
|
||||
void update_pv(Move* pv, Move move, Move* childPv) {
|
||||
|
||||
for (*pv++ = move; child && *child != MOVE_NONE; )
|
||||
*pv++ = *child++;
|
||||
for (*pv++ = move; childPv && *childPv != MOVE_NONE; )
|
||||
*pv++ = *childPv++;
|
||||
*pv = MOVE_NONE;
|
||||
}
|
||||
|
||||
|
|
|
@ -56,9 +56,7 @@ struct Stack {
|
|||
/// all non-pv moves.
|
||||
struct RootMove {
|
||||
|
||||
RootMove(Move m) : score(-VALUE_INFINITE), prevScore(-VALUE_INFINITE) {
|
||||
pv.push_back(m); pv.push_back(MOVE_NONE);
|
||||
}
|
||||
RootMove(Move m) : score(-VALUE_INFINITE), prevScore(-VALUE_INFINITE), pv(1, m) {}
|
||||
|
||||
bool operator<(const RootMove& m) const { return score > m.score; } // Ascending sort
|
||||
bool operator==(const Move& m) const { return pv[0] == m; }
|
||||
|
|
Loading…
Add table
Reference in a new issue