Skip to content

Commit

Permalink
bench 10561225
Browse files Browse the repository at this point in the history
  • Loading branch information
rn5f107s2 committed Sep 12, 2024
1 parent 49c14f7 commit a868fe1
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 18 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ OBJ_DIR=build
MOLY_DIR=src
CXX=clang++

DEFAULT_NET_NAME=moly_20240526.nnue
DEFAULT_NET_NAME=params.bin

DEFAULT_EXE = $(OBJ_DIR)/Molybdenum
DATAGEN_EXE = $(OBJ_DIR)/Datagen
Expand Down
Binary file added src/Nets/params.bin
Binary file not shown.
28 changes: 20 additions & 8 deletions src/nnue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,16 @@ INCBIN(network, EVALFILE);
const Weights defaultWeights = *reinterpret_cast<const Weights*>(gnetworkData);

void Net::loadDefaultNet() {
weights0 = defaultWeights.weights0;
for (size_t i = 0; i < defaultWeights.weights0.size(); i++)
weights0[i] = int16_t(defaultWeights.weights0[i] * 255);

for (size_t i = 0; i < defaultWeights.bias0.size(); i++)
bias0[i] = int16_t(defaultWeights.bias0[i] * 255);

weights1 = defaultWeights.weights1;
bias0 = defaultWeights.bias0;
bias1 = defaultWeights.bias1;
bias1 = defaultWeights.bias1;
weights2 = defaultWeights.weights2;
bias2 = defaultWeights.bias2;
}

void Net::initAccumulator(std::array<u64, 13> &bitboards) {
Expand All @@ -52,12 +58,18 @@ void Net::initAccumulator(std::array<u64, 13> &bitboards) {
}

int Net::calculate(Color c) {
int output = 0;
float out = bias2[0];
std::array<float, 8> output = bias1;

for (int n = 0; n != L1_SIZE; n++) {
output += screlu(accumulator[ c][n]) * weights1[n ];
output += screlu(accumulator[!c][n]) * weights1[n + L1_SIZE];
for (int n = 0; n < L1_SIZE; n++) {
for (int m = 0; m < L2_SIZE; m++) {
output[m] += screlu(float(accumulator[ c][n]) / 255.0f) * weights1[n * L2_SIZE + m ];
output[m] += screlu(float(accumulator[!c][n]) / 255.0f) * weights1[n * L2_SIZE + m + L1_SIZE * L2_SIZE];
}
}

return ((output / 255) + bias1[0]) * 133 / (64 * 255);
for (int n = 0; n < L2_SIZE; n++)
out += screlu(output[n]) * weights2[n];

return int(out * 133.0f);
}
26 changes: 17 additions & 9 deletions src/nnue.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,26 @@ enum Toggle {

static const int INPUT_SIZE = 12 * 64;
static const int L1_SIZE = 256;
static const int OUTPUT_SIZE = 1;
static const int NET_SIZE = 3;
static const std::array<int, NET_SIZE> LAYER_SIZE = {INPUT_SIZE, L1_SIZE, OUTPUT_SIZE};
static const int L2_SIZE = 8;
static const int OUT_SIZE = 1;

struct Weights {
std::array<int16_t , L1_SIZE * INPUT_SIZE> weights0{};
std::array<int16_t, L1_SIZE> bias0{};
std::array<int16_t, L1_SIZE * OUTPUT_SIZE * 2> weights1{};
std::array<int16_t, OUTPUT_SIZE> bias1{};
std::array<float, L1_SIZE * INPUT_SIZE> weights0{};
std::array<float, L1_SIZE> bias0{};
std::array<float, L1_SIZE * L2_SIZE * 2> weights1{};
std::array<float, L2_SIZE> bias1{};
std::array<float, L2_SIZE * OUT_SIZE> weights2{};
std::array<float, OUT_SIZE> bias2{};
};

class Net {
public:
std::array<int16_t , L1_SIZE * INPUT_SIZE> weights0{};
std::array<int16_t, L1_SIZE> bias0{};
std::array<int16_t, L1_SIZE * OUTPUT_SIZE * 2> weights1{};
std::array<int16_t, OUTPUT_SIZE> bias1{};
std::array<float, L1_SIZE * L2_SIZE * 2> weights1{};
std::array<float, L2_SIZE> bias1{};
std::array<float, L2_SIZE * OUT_SIZE> weights2{};
std::array<float, OUT_SIZE> bias2{};
std::array<std::array<int16_t, L1_SIZE>, 2> accumulator{};
Stack<std::array<std::array<int16_t, L1_SIZE>, 2>, MAXDEPTH> accumulatorStack;

Expand All @@ -50,6 +53,11 @@ inline int screlu(int16_t input) {
return clamped * clamped;
}

inline float screlu(float input) {
float clamped = std::clamp(input, 0.0f, 1.0f);
return clamped * clamped;
}

template<Color C> inline
int index(int pc, int sq) {
int square = C ? sq : sq ^ 56;
Expand Down

0 comments on commit a868fe1

Please sign in to comment.