1
0
Fork 0
mirror of https://github.com/sockspls/badfish synced 2025-04-30 08:43:09 +00:00

[NNUE] More cleanup in nnue folder

No functional change.
This commit is contained in:
Dariusz Orzechowski 2020-08-01 17:55:39 +02:00 committed by Joost VandeVondele
parent aa339506db
commit 122c78b521
15 changed files with 67 additions and 277 deletions

View file

@ -21,7 +21,7 @@ constexpr IndexType kTransformedFeatureDimensions = 256;
namespace Layers {
// define network structure
// Define network structure
using InputLayer = InputSlice<kTransformedFeatureDimensions * 2>;
using HiddenLayer1 = ClippedReLU<AffineTransform<InputLayer, 32>>;
using HiddenLayer2 = ClippedReLU<AffineTransform<HiddenLayer1, 32>>;

View file

@ -44,13 +44,6 @@ namespace Eval::NNUE {
// Evaluation function file name
std::string fileName;
// Get a string that represents the structure of the evaluation function
std::string GetArchitectureString() {
return "Features=" + FeatureTransformer::GetStructureString() +
",Network=" + Network::GetStructureString();
}
namespace Detail {
// Initialize the evaluation function parameters
@ -61,7 +54,7 @@ namespace Eval::NNUE {
std::memset(pointer.get(), 0, sizeof(T));
}
// read evaluation function parameters
// Read evaluation function parameters
template <typename T>
bool ReadParameters(std::istream& stream, const AlignedPtr<T>& pointer) {
@ -80,7 +73,7 @@ namespace Eval::NNUE {
Detail::Initialize(network);
}
// read the header
// Read network header
bool ReadHeader(std::istream& stream,
std::uint32_t* hash_value, std::string* architecture) {
@ -94,7 +87,7 @@ namespace Eval::NNUE {
return !stream.fail();
}
// read evaluation function parameters
// Read network parameters
bool ReadParameters(std::istream& stream) {
std::uint32_t hash_value;
@ -106,7 +99,7 @@ namespace Eval::NNUE {
return stream && stream.peek() == std::ios::traits_type::eof();
}
// proceed if you can calculate the difference
// Proceed with the difference calculation if possible
static void UpdateAccumulatorIfPossible(const Position& pos) {
feature_transformer->UpdateAccumulatorIfPossible(pos);
@ -133,9 +126,7 @@ namespace Eval::NNUE {
return accumulator.score;
}
// read the evaluation function file
// Save and restore Options with bench command etc., so EvalFile is changed at this time,
// This function may be called twice to flag that the evaluation function needs to be reloaded.
// Load the evaluation function file
void load_eval(const std::string& evalFile) {
Initialize();
@ -163,7 +154,7 @@ namespace Eval::NNUE {
return ComputeScore(pos, true);
}
// proceed if you can calculate the difference
// Proceed with the difference calculation if possible
void update_eval(const Position& pos) {
UpdateAccumulatorIfPossible(pos);
}

View file

@ -9,7 +9,7 @@
namespace Eval::NNUE {
// hash value of evaluation function structure
// Hash value of evaluation function structure
constexpr std::uint32_t kHashValue =
FeatureTransformer::GetHashValue() ^ Network::GetHashValue();

View file

@ -8,7 +8,7 @@
namespace Eval::NNUE::Features {
// A class template that represents a list of values
// Class template that represents a list of values
template <typename T, T... Values>
struct CompileTimeList;
@ -26,7 +26,7 @@ namespace Eval::NNUE::Features {
class FeatureSetBase {
public:
// Get a list of indices with a value of 1 among the features
// Get a list of indices for active features
template <typename IndexListType>
static void AppendActiveIndices(
const Position& pos, TriggerEvent trigger, IndexListType active[2]) {
@ -37,7 +37,7 @@ namespace Eval::NNUE::Features {
}
}
// Get a list of indices whose values have changed from the previous one in the feature quantity
// Get a list of indices for recently changed features
template <typename PositionType, typename IndexListType>
static void AppendChangedIndices(
const PositionType& pos, TriggerEvent trigger,
@ -70,30 +70,24 @@ namespace Eval::NNUE::Features {
};
// Class template that represents the feature set
// Specialization with one template argument
template <typename FeatureType>
class FeatureSet<FeatureType> : public FeatureSetBase<FeatureSet<FeatureType>> {
public:
// Hash value embedded in the evaluation function file
// Hash value embedded in the evaluation file
static constexpr std::uint32_t kHashValue = FeatureType::kHashValue;
// number of feature dimensions
// Number of feature dimensions
static constexpr IndexType kDimensions = FeatureType::kDimensions;
// The maximum value of the number of indexes whose value is 1 at the same time among the feature values
// Maximum number of simultaneously active features
static constexpr IndexType kMaxActiveDimensions =
FeatureType::kMaxActiveDimensions;
// List of timings to perform all calculations instead of difference calculation
// Trigger for full calculation instead of difference calculation
using SortedTriggerSet =
CompileTimeList<TriggerEvent, FeatureType::kRefreshTrigger>;
static constexpr auto kRefreshTriggers = SortedTriggerSet::kValues;
// Get the feature quantity name
static std::string GetName() {
return FeatureType::kName;
}
private:
// Get a list of indices with a value of 1 among the features
// Get a list of indices for active features
static void CollectActiveIndices(
const Position& pos, const TriggerEvent trigger, const Color perspective,
IndexList* const active) {
@ -102,7 +96,7 @@ namespace Eval::NNUE::Features {
}
}
// Get a list of indices whose values have changed from the previous one in the feature quantity
// Get a list of indices for recently changed features
static void CollectChangedIndices(
const Position& pos, const TriggerEvent trigger, const Color perspective,
IndexList* const removed, IndexList* const added) {

View file

@ -8,21 +8,18 @@
namespace Eval::NNUE::Features {
// Index list type
class IndexList;
// Class template that represents the feature set
template <typename... FeatureTypes>
class FeatureSet;
// Type of timing to perform all calculations instead of difference calculation
// Trigger to perform full calculations instead of difference only
enum class TriggerEvent {
kFriendKingMoved // calculate all when own king moves
kFriendKingMoved // calculate full evaluation when own king moves
};
// turn side or other side
enum class Side {
kFriend // turn side
kFriend // side to move
};
} // namespace Eval::NNUE::Features

View file

@ -11,7 +11,7 @@ namespace Eval::NNUE::Features {
return static_cast<IndexType>(PS_END) * static_cast<IndexType>(sq_k) + p;
}
// Get the piece information
// Get pieces information
template <Side AssociatedKing>
inline void HalfKP<AssociatedKing>::GetPieces(
const Position& pos, Color perspective,
@ -26,12 +26,12 @@ namespace Eval::NNUE::Features {
*sq_target_k = static_cast<Square>(((*pieces)[target] - PS_W_KING) % SQUARE_NB);
}
// Get a list of indices with a value of 1 among the features
// Get a list of indices for active features
template <Side AssociatedKing>
void HalfKP<AssociatedKing>::AppendActiveIndices(
const Position& pos, Color perspective, IndexList* active) {
// do nothing if array size is small to avoid compiler warning
// Do nothing if array size is small to avoid compiler warning
if (RawFeatures::kMaxActiveDimensions < kMaxActiveDimensions) return;
PieceSquare* pieces;
@ -44,7 +44,7 @@ namespace Eval::NNUE::Features {
}
}
// Get a list of indices whose values have changed from the previous one in the feature quantity
// Get a list of indices for recently changed features
template <Side AssociatedKing>
void HalfKP<AssociatedKing>::AppendChangedIndices(
const Position& pos, Color perspective,

View file

@ -8,39 +8,38 @@
namespace Eval::NNUE::Features {
// Feature HalfKP: Combination of the position of own king or enemy king
// Feature HalfKP: Combination of the position of own king
// and the position of pieces other than kings
template <Side AssociatedKing>
class HalfKP {
public:
// feature quantity name
static constexpr const char* kName =
(AssociatedKing == Side::kFriend) ? "HalfKP(Friend)" : "HalfKP(Enemy)";
// Hash value embedded in the evaluation function file
// Feature name
static constexpr const char* kName = "HalfKP(Friend)";
// Hash value embedded in the evaluation file
static constexpr std::uint32_t kHashValue =
0x5D69D5B9u ^ (AssociatedKing == Side::kFriend);
// number of feature dimensions
// Number of feature dimensions
static constexpr IndexType kDimensions =
static_cast<IndexType>(SQUARE_NB) * static_cast<IndexType>(PS_END);
// The maximum value of the number of indexes whose value is 1 at the same time among the feature values
// Maximum number of simultaneously active features
static constexpr IndexType kMaxActiveDimensions = PIECE_ID_KING;
// Timing of full calculation instead of difference calculation
// Trigger for full calculation instead of difference calculation
static constexpr TriggerEvent kRefreshTrigger = TriggerEvent::kFriendKingMoved;
// Get a list of indices with a value of 1 among the features
// Get a list of indices for active features
static void AppendActiveIndices(const Position& pos, Color perspective,
IndexList* active);
// Get a list of indices whose values have changed from the previous one in the feature quantity
// Get a list of indices for recently changed features
static void AppendChangedIndices(const Position& pos, Color perspective,
IndexList* removed, IndexList* added);
// Find the index of the feature quantity from the king position and PieceSquare
// Index of a feature for a given king position and another piece on some square
static IndexType MakeIndex(Square sq_k, PieceSquare p);
private:
// Get the piece information
// Get pieces information
static void GetPieces(const Position& pos, Color perspective,
PieceSquare** pieces, Square* sq_target_k);
};

View file

@ -8,7 +8,7 @@
namespace Eval::NNUE::Layers {
// affine transformation layer
// Affine transformation layer
template <typename PreviousLayer, IndexType OutputDimensions>
class AffineTransform {
public:
@ -17,7 +17,7 @@ namespace Eval::NNUE::Layers {
using OutputType = std::int32_t;
static_assert(std::is_same<InputType, std::uint8_t>::value, "");
// number of input/output dimensions
// Number of input/output dimensions
static constexpr IndexType kInputDimensions =
PreviousLayer::kOutputDimensions;
static constexpr IndexType kOutputDimensions = OutputDimensions;
@ -32,7 +32,7 @@ namespace Eval::NNUE::Layers {
static constexpr std::size_t kBufferSize =
PreviousLayer::kBufferSize + kSelfBufferSize;
// Hash value embedded in the evaluation function file
// Hash value embedded in the evaluation file
static constexpr std::uint32_t GetHashValue() {
std::uint32_t hash_value = 0xCC03DAE4u;
hash_value += kOutputDimensions;
@ -41,15 +41,7 @@ namespace Eval::NNUE::Layers {
return hash_value;
}
// A string that represents the structure from the input layer to this layer
static std::string GetStructureString() {
return "AffineTransform[" +
std::to_string(kOutputDimensions) + "<-" +
std::to_string(kInputDimensions) + "](" +
PreviousLayer::GetStructureString() + ")";
}
// read parameters
// Read network parameters
bool ReadParameters(std::istream& stream) {
if (!previous_layer_.ReadParameters(stream)) return false;
stream.read(reinterpret_cast<char*>(biases_),
@ -60,7 +52,7 @@ namespace Eval::NNUE::Layers {
return !stream.fail();
}
// forward propagation
// Forward propagation
const OutputType* Propagate(
const TransformedFeatureType* transformed_features, char* buffer) const {
const auto input = previous_layer_.Propagate(
@ -190,17 +182,11 @@ namespace Eval::NNUE::Layers {
}
private:
// parameter type
using BiasType = OutputType;
using WeightType = std::int8_t;
// Make the learning class a friend
friend class Trainer<AffineTransform>;
// the layer immediately before this layer
PreviousLayer previous_layer_;
// parameter
alignas(kCacheLineSize) BiasType biases_[kOutputDimensions];
alignas(kCacheLineSize)
WeightType weights_[kOutputDimensions * kPaddedInputDimensions];

View file

@ -16,7 +16,7 @@ namespace Eval::NNUE::Layers {
using OutputType = std::uint8_t;
static_assert(std::is_same<InputType, std::int32_t>::value, "");
// number of input/output dimensions
// Number of input/output dimensions
static constexpr IndexType kInputDimensions =
PreviousLayer::kOutputDimensions;
static constexpr IndexType kOutputDimensions = kInputDimensions;
@ -29,26 +29,19 @@ namespace Eval::NNUE::Layers {
static constexpr std::size_t kBufferSize =
PreviousLayer::kBufferSize + kSelfBufferSize;
// Hash value embedded in the evaluation function file
// Hash value embedded in the evaluation file
static constexpr std::uint32_t GetHashValue() {
std::uint32_t hash_value = 0x538D24C7u;
hash_value += PreviousLayer::GetHashValue();
return hash_value;
}
// A string that represents the structure from the input layer to this layer
static std::string GetStructureString() {
return "ClippedReLU[" +
std::to_string(kOutputDimensions) + "](" +
PreviousLayer::GetStructureString() + ")";
}
// read parameters
// Read network parameters
bool ReadParameters(std::istream& stream) {
return previous_layer_.ReadParameters(stream);
}
// forward propagation
// Forward propagation
const OutputType* Propagate(
const TransformedFeatureType* transformed_features, char* buffer) const {
const auto input = previous_layer_.Propagate(
@ -167,10 +160,6 @@ namespace Eval::NNUE::Layers {
}
private:
// Make the learning class a friend
friend class Trainer<ClippedReLU>;
// the layer immediately before this layer
PreviousLayer previous_layer_;
};

View file

@ -7,42 +7,35 @@
namespace Eval::NNUE::Layers {
// input layer
// Input layer
template <IndexType OutputDimensions, IndexType Offset = 0>
class InputSlice {
public:
// need to maintain alignment
// Need to maintain alignment
static_assert(Offset % kMaxSimdWidth == 0, "");
// output type
// Output type
using OutputType = TransformedFeatureType;
// output dimensionality
// Output dimensionality
static constexpr IndexType kOutputDimensions = OutputDimensions;
// Size of the forward propagation buffer used from the input layer to this layer
// Size of forward propagation buffer used from the input layer to this layer
static constexpr std::size_t kBufferSize = 0;
// Hash value embedded in the evaluation function file
// Hash value embedded in the evaluation file
static constexpr std::uint32_t GetHashValue() {
std::uint32_t hash_value = 0xEC42E90Du;
hash_value ^= kOutputDimensions ^ (Offset << 10);
return hash_value;
}
// A string that represents the structure from the input layer to this layer
static std::string GetStructureString() {
return "InputSlice[" + std::to_string(kOutputDimensions) + "(" +
std::to_string(Offset) + ":" +
std::to_string(Offset + kOutputDimensions) + ")]";
}
// read parameters
// Read network parameters
bool ReadParameters(std::istream& /*stream*/) {
return true;
}
// forward propagation
// Forward propagation
const OutputType* Propagate(
const TransformedFeatureType* transformed_features,
char* /*buffer*/) const {

View file

@ -1,141 +0,0 @@
// Definition of layer Sum of NNUE evaluation function
#ifndef NNUE_LAYERS_SUM_H_INCLUDED
#define NNUE_LAYERS_SUM_H_INCLUDED
#include "../nnue_common.h"
namespace Eval::NNUE::Layers {
// Layer that sums the output of multiple layers
template <typename FirstPreviousLayer, typename... RemainingPreviousLayers>
class Sum : public Sum<RemainingPreviousLayers...> {
private:
using Head = FirstPreviousLayer;
using Tail = Sum<RemainingPreviousLayers...>;
public:
// Input/output type
using InputType = typename Head::OutputType;
using OutputType = InputType;
static_assert(std::is_same<InputType, typename Tail::InputType>::value, "");
// number of input/output dimensions
static constexpr IndexType kInputDimensions = Head::kOutputDimensions;
static constexpr IndexType kOutputDimensions = kInputDimensions;
static_assert(kInputDimensions == Tail::kInputDimensions ,"");
// Size of forward propagation buffer used in this layer
static constexpr std::size_t kSelfBufferSize =
CeilToMultiple(kOutputDimensions * sizeof(OutputType), kCacheLineSize);
// Size of the forward propagation buffer used from the input layer to this layer
static constexpr std::size_t kBufferSize =
std::max(Head::kBufferSize + kSelfBufferSize, Tail::kBufferSize);
// Hash value embedded in the evaluation function file
static constexpr std::uint32_t GetHashValue() {
std::uint32_t hash_value = 0xBCE400B4u;
hash_value ^= Head::GetHashValue() >> 1;
hash_value ^= Head::GetHashValue() << 31;
hash_value ^= Tail::GetHashValue() >> 2;
hash_value ^= Tail::GetHashValue() << 30;
return hash_value;
}
// A string that represents the structure from the input layer to this layer
static std::string GetStructureString() {
return "Sum[" +
std::to_string(kOutputDimensions) + "](" + GetSummandsString() + ")";
}
// read parameters
bool ReadParameters(std::istream& stream) {
if (!Tail::ReadParameters(stream)) return false;
return previous_layer_.ReadParameters(stream);
}
// forward propagation
const OutputType* Propagate(
const TransformedFeatureType* transformed_features, char* buffer) const {
Tail::Propagate(transformed_features, buffer);
const auto head_output = previous_layer_.Propagate(
transformed_features, buffer + kSelfBufferSize);
const auto output = reinterpret_cast<OutputType*>(buffer);
for (IndexType i = 0; i <kOutputDimensions; ++i) {
output[i] += head_output[i];
}
return output;
}
protected:
// A string that represents the list of layers to be summed
static std::string GetSummandsString() {
return Head::GetStructureString() + "," + Tail::GetSummandsString();
}
// Make the learning class a friend
friend class Trainer<Sum>;
// the layer immediately before this layer
FirstPreviousLayer previous_layer_;
};
// Layer that sums the output of multiple layers (when there is one template argument)
template <typename PreviousLayer>
class Sum<PreviousLayer> {
public:
// Input/output type
using InputType = typename PreviousLayer::OutputType;
using OutputType = InputType;
// number of input/output dimensions
static constexpr IndexType kInputDimensions =
PreviousLayer::kOutputDimensions;
static constexpr IndexType kOutputDimensions = kInputDimensions;
// Size of the forward propagation buffer used from the input layer to this layer
static constexpr std::size_t kBufferSize = PreviousLayer::kBufferSize;
// Hash value embedded in the evaluation function file
static constexpr std::uint32_t GetHashValue() {
std::uint32_t hash_value = 0xBCE400B4u;
hash_value ^= PreviousLayer::GetHashValue() >> 1;
hash_value ^= PreviousLayer::GetHashValue() << 31;
return hash_value;
}
// A string that represents the structure from the input layer to this layer
static std::string GetStructureString() {
return "Sum[" +
std::to_string(kOutputDimensions) + "](" + GetSummandsString() + ")";
}
// read parameters
bool ReadParameters(std::istream& stream) {
return previous_layer_.ReadParameters(stream);
}
// forward propagation
const OutputType* Propagate(
const TransformedFeatureType* transformed_features, char* buffer) const {
return previous_layer_.Propagate(transformed_features, buffer);
}
protected:
// A string that represents the list of layers to be summed
static std::string GetSummandsString() {
return PreviousLayer::GetStructureString();
}
// Make the learning class a friend
friend class Trainer<Sum>;
// the layer immediately before this layer
PreviousLayer previous_layer_;
};
} // namespace Eval::NNUE::Layers
#endif // #ifndef NNUE_LAYERS_SUM_H_INCLUDED

View file

@ -8,7 +8,6 @@
namespace Eval::NNUE {
// Class that holds the result of affine transformation of input features
// Keep the evaluation value that is the final output together
struct alignas(32) Accumulator {
std::int16_t
accumulation[2][kRefreshTriggers.size()][kTransformedFeatureDimensions];

View file

@ -3,7 +3,7 @@
#ifndef NNUE_ARCHITECTURE_H_INCLUDED
#define NNUE_ARCHITECTURE_H_INCLUDED
// include a header that defines the input features and network structure
// Defines the network structure
#include "architectures/halfkp_256x2-32-32.h"
namespace Eval::NNUE {
@ -12,7 +12,7 @@ namespace Eval::NNUE {
static_assert(Network::kOutputDimensions == 1, "");
static_assert(std::is_same<Network::OutputType, std::int32_t>::value, "");
// List of timings to perform all calculations instead of difference calculation
// Trigger for full calculation instead of difference calculation
constexpr auto kRefreshTriggers = RawFeatures::kRefreshTriggers;
} // namespace Eval::NNUE

View file

@ -18,7 +18,7 @@
namespace Eval::NNUE {
// A constant that represents the version of the evaluation function file
// Version of the evaluation file
constexpr std::uint32_t kVersion = 0x7AF32F16u;
// Constant used in evaluation value calculation
@ -43,15 +43,9 @@ namespace Eval::NNUE {
// Type of input feature after conversion
using TransformedFeatureType = std::uint8_t;
// index type
using IndexType = std::uint32_t;
// Forward declaration of learning class template
template <typename Layer>
class Trainer;
// find the smallest multiple of n and above
// Round n up to be a multiple of base
template <typename IntType>
constexpr IntType CeilToMultiple(IntType n, IntType base) {
return (n + base - 1) / base * base;

View file

@ -15,34 +15,27 @@ namespace Eval::NNUE {
class FeatureTransformer {
private:
// number of output dimensions for one side
// Number of output dimensions for one side
static constexpr IndexType kHalfDimensions = kTransformedFeatureDimensions;
public:
// output type
// Output type
using OutputType = TransformedFeatureType;
// number of input/output dimensions
// Number of input/output dimensions
static constexpr IndexType kInputDimensions = RawFeatures::kDimensions;
static constexpr IndexType kOutputDimensions = kHalfDimensions * 2;
// size of forward propagation buffer
// Size of forward propagation buffer
static constexpr std::size_t kBufferSize =
kOutputDimensions * sizeof(OutputType);
// Hash value embedded in the evaluation function file
// Hash value embedded in the evaluation file
static constexpr std::uint32_t GetHashValue() {
return RawFeatures::kHashValue ^ kOutputDimensions;
}
// a string representing the structure
static std::string GetStructureString() {
return RawFeatures::GetName() + "[" +
std::to_string(kInputDimensions) + "->" +
std::to_string(kHalfDimensions) + "x2]";
}
// read parameters
// Read network parameters
bool ReadParameters(std::istream& stream) {
stream.read(reinterpret_cast<char*>(biases_),
kHalfDimensions * sizeof(BiasType));
@ -51,7 +44,7 @@ namespace Eval::NNUE {
return !stream.fail();
}
// proceed with the difference calculation if possible
// Proceed with the difference calculation if possible
bool UpdateAccumulatorIfPossible(const Position& pos) const {
const auto now = pos.state();
if (now->accumulator.computed_accumulation) {
@ -65,7 +58,7 @@ namespace Eval::NNUE {
return false;
}
// convert input features
// Convert input features
void Transform(const Position& pos, OutputType* output, bool refresh) const {
if (refresh || !UpdateAccumulatorIfPossible(pos)) {
RefreshAccumulator(pos);
@ -259,10 +252,11 @@ namespace Eval::NNUE {
if (reset[perspective]) {
std::memcpy(accumulator.accumulation[perspective][i], biases_,
kHalfDimensions * sizeof(BiasType));
} else {// Difference calculation for the feature amount changed from 1 to 0
} else {
std::memcpy(accumulator.accumulation[perspective][i],
prev_accumulator.accumulation[perspective][i],
kHalfDimensions * sizeof(BiasType));
// Difference calculation for the deactivated features
for (const auto index : removed_indices[perspective]) {
const IndexType offset = kHalfDimensions * index;
@ -293,7 +287,7 @@ namespace Eval::NNUE {
}
}
{// Difference calculation for features that changed from 0 to 1
{ // Difference calculation for the activated features
for (const auto index : added_indices[perspective]) {
const IndexType offset = kHalfDimensions * index;
@ -330,14 +324,9 @@ namespace Eval::NNUE {
accumulator.computed_score = false;
}
// parameter type
using BiasType = std::int16_t;
using WeightType = std::int16_t;
// Make the learning class a friend
friend class Trainer<FeatureTransformer>;
// parameter
alignas(kCacheLineSize) BiasType biases_[kHalfDimensions];
alignas(kCacheLineSize)
WeightType weights_[kHalfDimensions * kInputDimensions];