mirror of
https://github.com/sockspls/badfish
synced 2025-04-29 16:23:09 +00:00
Use node counting to early stop search
This introduces a form of node counting which can be used to further tweak the usage of our search time. The current approach stops the search when almost all nodes are searched on a single move. The idea originally came from Koivisto, but the implemention is a bit different, Koivisto scales the optimal time by the nodes effort and then determines if the search should be stopped. We just scale down the `totalTime` and stop the search if we exceed it and the effort is large enough. Passed STC: https://tests.stockfishchess.org/tests/view/65c8e0661d8e83c78bfcd5ec LLR: 2.97 (-2.94,2.94) <0.00,2.00> Total: 88672 W: 22907 L: 22512 D: 43253 Ptnml(0-2): 310, 10163, 23041, 10466, 356 Passed LTC: https://tests.stockfishchess.org/tests/view/65ca632b1d8e83c78bfcf554 LLR: 2.95 (-2.94,2.94) <0.50,2.50> Total: 170856 W: 42910 L: 42320 D: 85626 Ptnml(0-2): 104, 18337, 47960, 18919, 108 closes https://github.com/official-stockfish/Stockfish/pull/5053 Bench: 1198939
This commit is contained in:
parent
f4f0b32d55
commit
bf2c7306ac
3 changed files with 21 additions and 0 deletions
|
@ -23,6 +23,7 @@
|
|||
#include <atomic>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
#include <initializer_list>
|
||||
#include <iostream>
|
||||
|
@ -418,6 +419,10 @@ void Search::Worker::iterative_deepening() {
|
|||
// Do we have time for the next iteration? Can we stop searching now?
|
||||
if (limits.use_time_management() && !threads.stop && !mainThread->stopOnPonderhit)
|
||||
{
|
||||
auto bestmove = rootMoves[0].pv[0];
|
||||
int nodesEffort = effort[bestmove.from_sq()][bestmove.to_sq()] * 100
|
||||
/ std::max(size_t(1), size_t(nodes));
|
||||
|
||||
double fallingEval = (66 + 14 * (mainThread->bestPreviousAverageScore - bestValue)
|
||||
+ 6 * (mainThread->iterValue[iterIdx] - bestValue))
|
||||
/ 616.6;
|
||||
|
@ -435,6 +440,13 @@ void Search::Worker::iterative_deepening() {
|
|||
if (rootMoves.size() == 1)
|
||||
totalTime = std::min(500.0, totalTime);
|
||||
|
||||
if (completedDepth >= 10 && nodesEffort >= 95
|
||||
&& mainThread->tm.elapsed(threads.nodes_searched()) > totalTime * 3 / 4
|
||||
&& !mainThread->ponder)
|
||||
{
|
||||
threads.stop = true;
|
||||
}
|
||||
|
||||
// Stop the search if we have exceeded the totalTime
|
||||
if (mainThread->tm.elapsed(threads.nodes_searched()) > totalTime)
|
||||
{
|
||||
|
@ -1087,6 +1099,8 @@ moves_loop: // When in check, search starts here
|
|||
ss->continuationHistory =
|
||||
&thisThread->continuationHistory[ss->inCheck][capture][movedPiece][move.to_sq()];
|
||||
|
||||
uint64_t nodeCount = rootNode ? uint64_t(nodes) : 0;
|
||||
|
||||
// Step 16. Make the move
|
||||
thisThread->nodes.fetch_add(1, std::memory_order_relaxed);
|
||||
pos.do_move(move, st, givesCheck);
|
||||
|
@ -1186,6 +1200,9 @@ moves_loop: // When in check, search starts here
|
|||
// Step 19. Undo move
|
||||
pos.undo_move(move);
|
||||
|
||||
if (rootNode)
|
||||
effort[move.from_sq()][move.to_sq()] += nodes - nodeCount;
|
||||
|
||||
assert(value > -VALUE_INFINITE && value < VALUE_INFINITE);
|
||||
|
||||
// Step 20. Check for a new best move
|
||||
|
|
|
@ -219,6 +219,8 @@ class Worker {
|
|||
return static_cast<SearchManager*>(manager.get());
|
||||
}
|
||||
|
||||
std::array<std::array<uint64_t, SQUARE_NB>, SQUARE_NB> effort;
|
||||
|
||||
LimitsType limits;
|
||||
|
||||
size_t pvIdx, pvLast;
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <memory>
|
||||
#include <unordered_map>
|
||||
#include <utility>
|
||||
#include <array>
|
||||
|
||||
#include "misc.h"
|
||||
#include "movegen.h"
|
||||
|
@ -203,6 +204,7 @@ void ThreadPool::start_thinking(const OptionsMap& options,
|
|||
th->worker->rootPos.set(pos.fen(), pos.is_chess960(), &th->worker->rootState);
|
||||
th->worker->rootState = setupStates->back();
|
||||
th->worker->tbConfig = tbConfig;
|
||||
th->worker->effort = {};
|
||||
}
|
||||
|
||||
main_thread()->start_searching();
|
||||
|
|
Loading…
Add table
Reference in a new issue