1350 lines
113 KiB
Plaintext
1350 lines
113 KiB
Plaintext
{
|
|
"nbformat": 4,
|
|
"nbformat_minor": 0,
|
|
"metadata": {
|
|
"colab": {
|
|
"provenance": [],
|
|
"collapsed_sections": [
|
|
"7rz9TTaUt547",
|
|
"iPa7Uf7ltt-r",
|
|
"eBkUA_MdFwpr"
|
|
],
|
|
"gpuType": "L4",
|
|
"machine_shape": "hm",
|
|
"authorship_tag": "ABX9TyNVvTasDtJNNgNqrI7lCCAW",
|
|
"include_colab_link": true
|
|
},
|
|
"kernelspec": {
|
|
"name": "python3",
|
|
"display_name": "Python 3"
|
|
},
|
|
"language_info": {
|
|
"name": "python"
|
|
},
|
|
"accelerator": "GPU"
|
|
},
|
|
"cells": [
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "view-in-github",
|
|
"colab_type": "text"
|
|
},
|
|
"source": [
|
|
"<a href=\"https://colab.research.google.com/github/YannAhlgrim/chess-engine/blob/main/ChessEngine.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"# Building a Chess Engine based on CNN and one based on Vision Transformer"
|
|
],
|
|
"metadata": {
|
|
"id": "JtjQT-pmt-Ms"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"## Installs and Imports"
|
|
],
|
|
"metadata": {
|
|
"id": "7rz9TTaUt547"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"!pip install python-chess"
|
|
],
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"id": "QDffgdH7bar-",
|
|
"outputId": "73e2f865-7b40-4a05-e8a6-ecc8f7dfe35c"
|
|
},
|
|
"execution_count": 1,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Collecting python-chess\n",
|
|
" Downloading python_chess-1.999-py3-none-any.whl.metadata (776 bytes)\n",
|
|
"Collecting chess<2,>=1 (from python-chess)\n",
|
|
" Downloading chess-1.11.2.tar.gz (6.1 MB)\n",
|
|
"\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.1/6.1 MB\u001b[0m \u001b[31m80.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
|
"\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
|
"Downloading python_chess-1.999-py3-none-any.whl (1.4 kB)\n",
|
|
"Building wheels for collected packages: chess\n",
|
|
" Building wheel for chess (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
|
|
" Created wheel for chess: filename=chess-1.11.2-py3-none-any.whl size=147775 sha256=05ce0aec95b34740e55ad4b33d066b405abc4b38cf207f512af7ee2e090a2722\n",
|
|
" Stored in directory: /root/.cache/pip/wheels/83/1f/4e/8f4300f7dd554eb8de70ddfed96e94d3d030ace10c5b53d447\n",
|
|
"Successfully built chess\n",
|
|
"Installing collected packages: chess, python-chess\n",
|
|
"Successfully installed chess-1.11.2 python-chess-1.999\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"import torch\n",
|
|
"import torch.nn as nn\n",
|
|
"import torch.optim as optim\n",
|
|
"import torch.nn.functional as F\n",
|
|
"import chess\n",
|
|
"\n",
|
|
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
|
|
"print(f\"Using device: {device}\")"
|
|
],
|
|
"metadata": {
|
|
"id": "hQXTOxy4pyN6",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "214a48d9-c691-4229-a672-52deef85f4e2"
|
|
},
|
|
"execution_count": 2,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Using device: cuda\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"## Build the Neural Network Class"
|
|
],
|
|
"metadata": {
|
|
"id": "iPa7Uf7ltt-r"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"class ChessNet(nn.Module):\n",
|
|
"\n",
|
|
" def __init__(self):\n",
|
|
" super(ChessNet, self).__init__()\n",
|
|
" # 12 planes and 8x8 board\n",
|
|
" self.conv1 = nn.Conv2d(12, 64, 1)\n",
|
|
" self.conv2 = nn.Conv2d(64, 128, 1)\n",
|
|
" # After flattening:\n",
|
|
" self.lin1 = nn.Linear(128 * 8 * 8, 512)\n",
|
|
" self.lin2 = nn.Linear(512, 256)\n",
|
|
" self.lin3 = nn.Linear(256, 1)\n",
|
|
"\n",
|
|
" def forward(self, x):\n",
|
|
" x = F.relu(self.conv1(x))\n",
|
|
" x = F.relu(self.conv2(x))\n",
|
|
" # Flatten the output of the convolutional layers before passing to linear layers\n",
|
|
" x = x.view(-1, 128 * 8 * 8)\n",
|
|
" x = F.relu(self.lin1(x))\n",
|
|
" x = F.relu(self.lin2(x))\n",
|
|
" x = self.lin3(x)\n",
|
|
" return x\n",
|
|
"\n",
|
|
"\n",
|
|
"test_model = ChessNet()\n",
|
|
"\n",
|
|
"# One test data point with shape: 12x8x8\n",
|
|
"test_data = torch.randn(1, 12, 8, 8)\n",
|
|
"\n",
|
|
"output = test_model(test_data)\n",
|
|
"print(output)"
|
|
],
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"id": "aMJG_2TUqPvW",
|
|
"outputId": "c85a69fc-0ea8-4907-cb36-f6553d8cc3dd"
|
|
},
|
|
"execution_count": 3,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"tensor([[-0.0571]], grad_fn=<AddmmBackward0>)\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"## Create a function that turns a Board into a Tensor"
|
|
],
|
|
"metadata": {
|
|
"id": "eBkUA_MdFwpr"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"def board_to_tensor(board):\n",
|
|
" tensor = torch.zeros((12, 8, 8))\n",
|
|
" pieces = board.piece_map()\n",
|
|
" for square, piece in pieces.items():\n",
|
|
" if piece.color == chess.WHITE:\n",
|
|
" tensor[0 + piece.piece_type][square // 8][square % 8] = 1\n",
|
|
" else:\n",
|
|
" tensor[5 + piece.piece_type][square // 8][square % 8] = 1\n",
|
|
" return tensor\n",
|
|
"\n",
|
|
"# test board to tensor function\n",
|
|
"test_board = chess.Board()\n",
|
|
"test_tensor = board_to_tensor(test_board)\n",
|
|
"print(test_tensor)\n",
|
|
"\n"
|
|
],
|
|
"metadata": {
|
|
"id": "2OGSEelMFwVR",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "d9dfe85d-6a4b-4946-82ac-ed21fdba9d70"
|
|
},
|
|
"execution_count": 4,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"tensor([[[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [1., 1., 1., 1., 1., 1., 1., 1.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 1., 0., 0., 0., 0., 1., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 1., 0., 0., 1., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[1., 0., 0., 0., 0., 0., 0., 1.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 1., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 1., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [1., 1., 1., 1., 1., 1., 1., 1.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 1., 0., 0., 0., 0., 1., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 1., 0., 0., 1., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [1., 0., 0., 0., 0., 0., 0., 1.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 1., 0., 0., 0., 0.]],\n",
|
|
"\n",
|
|
" [[0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 0., 0., 0., 0.],\n",
|
|
" [0., 0., 0., 0., 1., 0., 0., 0.]]])\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "8a2d513f"
|
|
},
|
|
"source": [
|
|
"## Define RL Training Parameters\n",
|
|
"\n",
|
|
"Define key parameters for reinforcement learning, such as learning rate, discount factor (gamma), exploration rate (epsilon), number of training episodes, and batch size for experience replay.\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "26e027a2"
|
|
},
|
|
"source": [
|
|
"LEARNING_RATE = 5e-4\n",
|
|
"GAMMA = 0.99\n",
|
|
"EPSILON_START = 1.0\n",
|
|
"EPSILON_END = 0.01\n",
|
|
"EPSILON_DECAY = 0.995\n",
|
|
"NUM_EPISODES = 3000\n",
|
|
"BATCH_SIZE = 64\n",
|
|
"BUFFER_SIZE = 10000"
|
|
],
|
|
"execution_count": 5,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "e0863229"
|
|
},
|
|
"source": [
|
|
"## Initialize Model and Optimizer\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "0aa0e1b2"
|
|
},
|
|
"source": [
|
|
"model = ChessNet().to(device)\n",
|
|
"optimizer = optim.Adam(model.parameters(), lr=LEARNING_RATE)"
|
|
],
|
|
"execution_count": 6,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "aa6a5836"
|
|
},
|
|
"source": [
|
|
"## Implement Experience Replay Buffer\n",
|
|
"\n",
|
|
"Create a simple experience replay buffer to store game transitions (state, action, reward, next_state, done) for training stability.\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "f6e19433"
|
|
},
|
|
"source": [
|
|
"import random\n",
|
|
"from collections import deque\n",
|
|
"\n",
|
|
"class ReplayBuffer:\n",
|
|
" def __init__(self, capacity):\n",
|
|
" self.buffer = deque(maxlen=capacity)\n",
|
|
"\n",
|
|
" def add(self, state, reward, next_state, done):\n",
|
|
" self.buffer.append((state, reward, next_state, done))\n",
|
|
"\n",
|
|
" def sample(self, batch_size):\n",
|
|
" if len(self.buffer) < batch_size:\n",
|
|
" return [] # Not enough samples to form a batch\n",
|
|
" experiences = random.sample(self.buffer, batch_size)\n",
|
|
"\n",
|
|
" # Unpack experience\n",
|
|
" states, rewards, next_states, dones = zip(*experiences)\n",
|
|
"\n",
|
|
" return states, rewards, next_states, dones\n",
|
|
"\n",
|
|
" def __len__(self):\n",
|
|
" return len(self.buffer)"
|
|
],
|
|
"execution_count": 7,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"## Create a function where the model plays against itself"
|
|
],
|
|
"metadata": {
|
|
"id": "QQUs_Gj8dxHW"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"In this function the model will evaluate all the next moves from white's perspective and play the most advantageous move.\n",
|
|
"\n",
|
|
"We have 3 different outcomes and rewards:\n",
|
|
"- White wins: +1\n",
|
|
"- Black wins: -1\n",
|
|
"- Draw: 0\n",
|
|
"\n",
|
|
"We will add one policy of playing maximum 100 moves otherwise it is a draw --> Reward = 0"
|
|
],
|
|
"metadata": {
|
|
"id": "hG-M_1leeRBu"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "075de970"
|
|
},
|
|
"source": [
|
|
"import random\n",
|
|
"\n",
|
|
"def play_against_itself(model, epsilon, initial_board=None, max_moves=100, device='cpu'):\n",
|
|
" if initial_board is None:\n",
|
|
" board = chess.Board()\n",
|
|
" else:\n",
|
|
" board = initial_board.copy()\n",
|
|
"\n",
|
|
" game_transitions = []\n",
|
|
"\n",
|
|
" final_game_result_value = 0\n",
|
|
"\n",
|
|
" for move_count in range(max_moves):\n",
|
|
" if board.is_game_over():\n",
|
|
" break\n",
|
|
"\n",
|
|
" current_state_tensor = board_to_tensor(board).unsqueeze(0).to(device)\n",
|
|
" legal_moves = list(board.legal_moves)\n",
|
|
"\n",
|
|
" if not legal_moves:\n",
|
|
" # Stalemate or checkmate\n",
|
|
" break\n",
|
|
"\n",
|
|
" best_move = None\n",
|
|
" # exploration\n",
|
|
" if random.random() < epsilon:\n",
|
|
" random_move = random.choice(legal_moves)\n",
|
|
" best_move = random_move\n",
|
|
" else: # Exploitation\n",
|
|
" if board.turn == chess.WHITE:\n",
|
|
" best_evaluation = -float('inf') # White wants to maximize the score\n",
|
|
" else:\n",
|
|
" best_evaluation = float('inf') # Black wants to minimize the score\n",
|
|
"\n",
|
|
" # Evaluate all legal moves\n",
|
|
" for move in legal_moves:\n",
|
|
" temp_board = board.copy()\n",
|
|
" temp_board.push(move)\n",
|
|
"\n",
|
|
" input_tensor = board_to_tensor(temp_board).unsqueeze(0).to(device)\n",
|
|
" with torch.no_grad():\n",
|
|
" evaluation = model(input_tensor).item()\n",
|
|
"\n",
|
|
" if board.turn == chess.WHITE:\n",
|
|
" if evaluation > best_evaluation:\n",
|
|
" best_evaluation = evaluation\n",
|
|
" best_move = move\n",
|
|
" else:\n",
|
|
" if evaluation < best_evaluation:\n",
|
|
" best_evaluation = evaluation\n",
|
|
" best_move = move\n",
|
|
"\n",
|
|
" if best_move is not None:\n",
|
|
" board.push(best_move)\n",
|
|
" else:\n",
|
|
" break\n",
|
|
"\n",
|
|
" next_state_tensor = board_to_tensor(board).unsqueeze(0).to(device)\n",
|
|
" is_done_after_move = board.is_game_over()\n",
|
|
"\n",
|
|
" game_transitions.append((current_state_tensor, next_state_tensor, is_done_after_move))\n",
|
|
"\n",
|
|
" if is_done_after_move:\n",
|
|
" break\n",
|
|
"\n",
|
|
" if board.is_game_over():\n",
|
|
" result = board.result()\n",
|
|
" if result == \"1-0\":\n",
|
|
" final_game_result_value = 1 # White wins\n",
|
|
" elif result == \"0-1\":\n",
|
|
" final_game_result_value = -1 # Black wins\n",
|
|
" else:\n",
|
|
" final_game_result_value = 0 # Draw\n",
|
|
"\n",
|
|
" return game_transitions, final_game_result_value"
|
|
],
|
|
"execution_count": 8,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "271301cc"
|
|
},
|
|
"source": [
|
|
"## Implement RL Training Loop\n",
|
|
"\n",
|
|
"Create a training loop that plays multiple games, stores experiences in the replay buffer, samples from the buffer, calculates the Q-value loss, and updates the model's weights using backpropagation.\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "bb5cfc23",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "08251dff-d41d-4050-b226-a530fe02fecd"
|
|
},
|
|
"source": [
|
|
"replay_buffer = ReplayBuffer(BUFFER_SIZE)\n",
|
|
"epsilon = EPSILON_START\n",
|
|
"episode_rewards = []\n",
|
|
"\n",
|
|
"# Mean Squared Error for Q-learning\n",
|
|
"criterion = nn.MSELoss()\n",
|
|
"\n",
|
|
"print(\"Starting RL training loop...\")\n",
|
|
"\n",
|
|
"for episode in range(NUM_EPISODES):\n",
|
|
" game_transitions, final_game_result = play_against_itself(model, epsilon, max_moves=100, device=device)\n",
|
|
"\n",
|
|
" # Process game_transitions to add to replay buffer\n",
|
|
" for i, (current_state_tensor, next_state_tensor, is_done_after_move) in enumerate(game_transitions):\n",
|
|
" reward = 0\n",
|
|
" if is_done_after_move:\n",
|
|
" reward = final_game_result\n",
|
|
"\n",
|
|
" state_to_buffer = current_state_tensor.squeeze(0).cpu()\n",
|
|
" next_state_to_buffer = next_state_tensor.squeeze(0).cpu()\n",
|
|
"\n",
|
|
" replay_buffer.add(state_to_buffer, reward, next_state_to_buffer, is_done_after_move)\n",
|
|
"\n",
|
|
" episode_rewards.append(final_game_result)\n",
|
|
"\n",
|
|
" # Train the model if enough experiences are in the buffer\n",
|
|
" if len(replay_buffer) >= BATCH_SIZE:\n",
|
|
" states, rewards, next_states, dones = replay_buffer.sample(BATCH_SIZE)\n",
|
|
"\n",
|
|
" states = torch.stack(states).float().to(device)\n",
|
|
" rewards = torch.tensor(rewards).float().to(device)\n",
|
|
" next_states = torch.stack(next_states).float().to(device)\n",
|
|
" dones = torch.tensor(dones).bool().to(device)\n",
|
|
"\n",
|
|
" # Get predicted Q-values for current states\n",
|
|
" current_q_values = model(states).squeeze(1)\n",
|
|
"\n",
|
|
" # Calculate target Q-values\n",
|
|
" with torch.no_grad():\n",
|
|
" next_q_values = model(next_states).squeeze(1)\n",
|
|
" max_next_q_values = next_q_values\n",
|
|
" target_q_values = rewards + GAMMA * max_next_q_values * (~dones)\n",
|
|
"\n",
|
|
" # Compute loss and perform backpropagation\n",
|
|
" optimizer.zero_grad()\n",
|
|
" loss = criterion(current_q_values, target_q_values)\n",
|
|
" loss.backward()\n",
|
|
" optimizer.step()\n",
|
|
"\n",
|
|
" # Decay epsilon\n",
|
|
" epsilon = max(EPSILON_END, epsilon * EPSILON_DECAY)\n",
|
|
"\n",
|
|
" if (episode + 1) % 100 == 0:\n",
|
|
" avg_reward = sum(episode_rewards[-100:]) / 100\n",
|
|
" print(f\"Episode {episode + 1}/{NUM_EPISODES}, Epsilon: {epsilon:.4f}, Average Reward (last 100): {avg_reward:.2f}\")\n",
|
|
"\n",
|
|
"print(\"RL training loop completed.\")\n",
|
|
"print(f\"Final Epsilon: {epsilon:.4f}\")\n",
|
|
"print(f\"Total Episodes: {len(episode_rewards)}\")\n",
|
|
"print(f\"Average reward over all episodes: {sum(episode_rewards) / len(episode_rewards):.2f}\")"
|
|
],
|
|
"execution_count": 9,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Starting RL training loop...\n",
|
|
"Episode 100/3000, Epsilon: 0.6058, Average Reward (last 100): 0.01\n",
|
|
"Episode 200/3000, Epsilon: 0.3670, Average Reward (last 100): -0.06\n",
|
|
"Episode 300/3000, Epsilon: 0.2223, Average Reward (last 100): -0.07\n",
|
|
"Episode 400/3000, Epsilon: 0.1347, Average Reward (last 100): -0.07\n",
|
|
"Episode 500/3000, Epsilon: 0.0816, Average Reward (last 100): -0.01\n",
|
|
"Episode 600/3000, Epsilon: 0.0494, Average Reward (last 100): -0.01\n",
|
|
"Episode 700/3000, Epsilon: 0.0299, Average Reward (last 100): 0.00\n",
|
|
"Episode 800/3000, Epsilon: 0.0181, Average Reward (last 100): -0.01\n",
|
|
"Episode 900/3000, Epsilon: 0.0110, Average Reward (last 100): 0.00\n",
|
|
"Episode 1000/3000, Epsilon: 0.0100, Average Reward (last 100): -0.03\n",
|
|
"Episode 1100/3000, Epsilon: 0.0100, Average Reward (last 100): -0.03\n",
|
|
"Episode 1200/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 1300/3000, Epsilon: 0.0100, Average Reward (last 100): 0.01\n",
|
|
"Episode 1400/3000, Epsilon: 0.0100, Average Reward (last 100): 0.05\n",
|
|
"Episode 1500/3000, Epsilon: 0.0100, Average Reward (last 100): 0.07\n",
|
|
"Episode 1600/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1700/3000, Epsilon: 0.0100, Average Reward (last 100): -0.02\n",
|
|
"Episode 1800/3000, Epsilon: 0.0100, Average Reward (last 100): -0.06\n",
|
|
"Episode 1900/3000, Epsilon: 0.0100, Average Reward (last 100): -0.02\n",
|
|
"Episode 2000/3000, Epsilon: 0.0100, Average Reward (last 100): -0.05\n",
|
|
"Episode 2100/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 2200/3000, Epsilon: 0.0100, Average Reward (last 100): -0.08\n",
|
|
"Episode 2300/3000, Epsilon: 0.0100, Average Reward (last 100): -0.06\n",
|
|
"Episode 2400/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2500/3000, Epsilon: 0.0100, Average Reward (last 100): -0.08\n",
|
|
"Episode 2600/3000, Epsilon: 0.0100, Average Reward (last 100): 0.01\n",
|
|
"Episode 2700/3000, Epsilon: 0.0100, Average Reward (last 100): 0.04\n",
|
|
"Episode 2800/3000, Epsilon: 0.0100, Average Reward (last 100): 0.01\n",
|
|
"Episode 2900/3000, Epsilon: 0.0100, Average Reward (last 100): 0.01\n",
|
|
"Episode 3000/3000, Epsilon: 0.0100, Average Reward (last 100): 0.07\n",
|
|
"RL training loop completed.\n",
|
|
"Final Epsilon: 0.0100\n",
|
|
"Total Episodes: 3000\n",
|
|
"Average reward over all episodes: -0.01\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"source": [
|
|
"## Save Model"
|
|
],
|
|
"metadata": {
|
|
"id": "aZryCSqb0xGB"
|
|
}
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"torch.save(model.state_dict(), './chess_net_model.pth')"
|
|
],
|
|
"metadata": {
|
|
"id": "GIaJO-ep0zGv"
|
|
},
|
|
"execution_count": 10,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"# test loading\n",
|
|
"loaded_model = ChessNet().to(device)\n",
|
|
"loaded_model.load_state_dict(torch.load('./chess_net_model.pth', map_location=device))\n",
|
|
"loaded_model.eval()\n",
|
|
"\n",
|
|
"board = chess.Board()\n",
|
|
"evaluation = loaded_model(board_to_tensor(board).unsqueeze(0).to(device))\n",
|
|
"print(evaluation)"
|
|
],
|
|
"metadata": {
|
|
"id": "1ygqRtG02fVt",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "fbc3ac6b-ed7c-4ed4-8f82-d088af6dab0c"
|
|
},
|
|
"execution_count": 11,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"tensor([[-0.0481]], device='cuda:0', grad_fn=<AddmmBackward0>)\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "20863772"
|
|
},
|
|
"source": [
|
|
"## Define ChessViT Model\n",
|
|
"\n",
|
|
"Create a new Python class `ChessViT` that implements a Vision Transformer structure based on the paper \"An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale\""
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "46b282ae"
|
|
},
|
|
"source": [
|
|
"class MultiHeadSelfAttention(nn.Module):\n",
|
|
" def __init__(self, embed_dim, num_heads):\n",
|
|
" super(MultiHeadSelfAttention, self).__init__()\n",
|
|
" self.embed_dim = embed_dim\n",
|
|
" self.num_heads = num_heads\n",
|
|
" self.head_dim = embed_dim // num_heads\n",
|
|
" assert self.head_dim * num_heads == self.embed_dim, \"embed_dim must be divisible by num_heads\"\n",
|
|
"\n",
|
|
" self.qkv = nn.Linear(embed_dim, embed_dim * 3) # Projects to Q, K, V\n",
|
|
" self.proj = nn.Linear(embed_dim, embed_dim)\n",
|
|
"\n",
|
|
" def forward(self, x):\n",
|
|
" batch_size, seq_len, embed_dim = x.size()\n",
|
|
"\n",
|
|
" qkv = self.qkv(x).reshape(batch_size, seq_len, 3, self.num_heads, self.head_dim)\n",
|
|
" q, k, v = qkv.permute(2, 0, 3, 1, 4) # 3, B, H, S, D_H\n",
|
|
"\n",
|
|
" # Scaled Dot-Product Attention\n",
|
|
" # (B, H, S, D_H) @ (B, H, D_H, S) -> (B, H, S, S)\n",
|
|
" attention_scores = torch.matmul(q, k.transpose(-2, -1)) / (self.head_dim**0.5)\n",
|
|
" attention_probs = F.softmax(attention_scores, dim=-1)\n",
|
|
"\n",
|
|
" # (B, H, S, S) @ (B, H, S, D_H) -> (B, H, S, D_H)\n",
|
|
" output = torch.matmul(attention_probs, v)\n",
|
|
"\n",
|
|
" # Concatenate heads and apply final projection\n",
|
|
" output = output.transpose(1, 2).reshape(batch_size, seq_len, embed_dim)\n",
|
|
" output = self.proj(output)\n",
|
|
" return output"
|
|
],
|
|
"execution_count": 12,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "28956b08"
|
|
},
|
|
"source": [
|
|
"class FeedForwardNetwork(nn.Module):\n",
|
|
" def __init__(self, embed_dim, hidden_dim):\n",
|
|
" super(FeedForwardNetwork, self).__init__()\n",
|
|
" self.net = nn.Sequential(\n",
|
|
" nn.Linear(embed_dim, hidden_dim),\n",
|
|
" nn.GELU(),\n",
|
|
" nn.Linear(hidden_dim, embed_dim)\n",
|
|
" )\n",
|
|
"\n",
|
|
" def forward(self, x):\n",
|
|
" return self.net(x)"
|
|
],
|
|
"execution_count": 13,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "dedf8bfd"
|
|
},
|
|
"source": [
|
|
"class TransformerEncoderBlock(nn.Module):\n",
|
|
" def __init__(self, embed_dim, num_heads, hidden_dim):\n",
|
|
" super(TransformerEncoderBlock, self).__init__()\n",
|
|
" self.norm1 = nn.LayerNorm(embed_dim)\n",
|
|
" self.attn = MultiHeadSelfAttention(embed_dim, num_heads)\n",
|
|
" self.norm2 = nn.LayerNorm(embed_dim)\n",
|
|
" self.ffn = FeedForwardNetwork(embed_dim, hidden_dim)\n",
|
|
"\n",
|
|
" def forward(self, x):\n",
|
|
" x = x + self.attn(self.norm1(x))\n",
|
|
" x = x + self.ffn(self.norm2(x))\n",
|
|
" return x"
|
|
],
|
|
"execution_count": 14,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "6e1472bd"
|
|
},
|
|
"source": [
|
|
"class ChessViT(nn.Module):\n",
|
|
" def __init__(self, in_channels=12, image_size=8, patch_size=2, embed_dim=128, num_layers=2, num_heads=4, hidden_dim=None):\n",
|
|
" super(ChessViT, self).__init__()\n",
|
|
"\n",
|
|
" self.patch_size = patch_size\n",
|
|
" self.embed_dim = embed_dim\n",
|
|
" self.num_layers = num_layers\n",
|
|
" self.num_heads = num_heads\n",
|
|
"\n",
|
|
" if hidden_dim is None:\n",
|
|
" hidden_dim = embed_dim * 4\n",
|
|
"\n",
|
|
" assert image_size % patch_size == 0, \"Image size must be divisible by patch size\"\n",
|
|
" self.num_patches = (image_size // patch_size)**2\n",
|
|
"\n",
|
|
" # 1. Patch Embedding\n",
|
|
" self.patch_embedding = nn.Conv2d(in_channels, embed_dim, kernel_size=patch_size, stride=patch_size)\n",
|
|
"\n",
|
|
" # 2. Class Token\n",
|
|
" self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))\n",
|
|
"\n",
|
|
" # 3. Positional Embedding\n",
|
|
" # num_patches + 1 for the class token\n",
|
|
" self.positional_embedding = nn.Parameter(torch.zeros(1, self.num_patches + 1, embed_dim))\n",
|
|
"\n",
|
|
" # 4. Transformer Encoder Blocks\n",
|
|
" self.transformer_encoder_blocks = nn.ModuleList(\n",
|
|
" [TransformerEncoderBlock(embed_dim, num_heads, hidden_dim) for _ in range(num_layers)]\n",
|
|
" )\n",
|
|
"\n",
|
|
" # 5. MLP Head for regression (evaluation score)\n",
|
|
" self.mlp_head = nn.Sequential(\n",
|
|
" nn.LayerNorm(embed_dim),\n",
|
|
" nn.Linear(embed_dim, 1)\n",
|
|
" )\n",
|
|
"\n",
|
|
" def forward(self, x):\n",
|
|
" batch_size = x.shape[0]\n",
|
|
"\n",
|
|
" # Apply patch embedding (Conv2d output: B, C, H, W)\n",
|
|
" x = self.patch_embedding(x)\n",
|
|
" # Flatten and transpose to (B, num_patches, embed_dim)\n",
|
|
" x = x.flatten(2).transpose(1, 2)\n",
|
|
"\n",
|
|
" # Prepend class token\n",
|
|
" cls_token = self.cls_token.expand(batch_size, -1, -1) # (B, 1, embed_dim)\n",
|
|
" x = torch.cat((cls_token, x), dim=1) # (B, num_patches + 1, embed_dim)\n",
|
|
"\n",
|
|
" # Add positional embedding\n",
|
|
" x = x + self.positional_embedding # (B, num_patches + 1, embed_dim)\n",
|
|
"\n",
|
|
" # Pass through Transformer Encoder Blocks\n",
|
|
" for block in self.transformer_encoder_blocks:\n",
|
|
" x = block(x)\n",
|
|
"\n",
|
|
" # Extract the output for the class token (first element)\n",
|
|
" cls_token_output = x[:, 0]\n",
|
|
"\n",
|
|
" # Pass through MLP head for final evaluation score\n",
|
|
" evaluation_score = self.mlp_head(cls_token_output)\n",
|
|
"\n",
|
|
" return evaluation_score"
|
|
],
|
|
"execution_count": 15,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"model = ChessViT()\n",
|
|
"optimizer = optim.Adam(model.parameters(), lr=LEARNING_RATE)"
|
|
],
|
|
"metadata": {
|
|
"id": "sXBYr7-3JPWs"
|
|
},
|
|
"execution_count": 16,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "9c555432",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "7b28938c-dda5-40a1-a92d-555deb3ba0d5"
|
|
},
|
|
"source": [
|
|
"replay_buffer = ReplayBuffer(BUFFER_SIZE)\n",
|
|
"epsilon = EPSILON_START\n",
|
|
"episode_rewards = []\n",
|
|
"\n",
|
|
"criterion = nn.MSELoss()\n",
|
|
"\n",
|
|
"print(\"Starting RL training loop...\")\n",
|
|
"\n",
|
|
"for episode in range(NUM_EPISODES):\n",
|
|
" game_transitions, final_game_result = play_against_itself(model, epsilon, max_moves=100)\n",
|
|
"\n",
|
|
" for i, (current_state_tensor, next_state_tensor, is_done_after_move) in enumerate(game_transitions):\n",
|
|
" reward = 0\n",
|
|
" if is_done_after_move:\n",
|
|
" reward = final_game_result\n",
|
|
"\n",
|
|
" state_to_buffer = current_state_tensor.squeeze(0)\n",
|
|
" next_state_to_buffer = next_state_tensor.squeeze(0)\n",
|
|
"\n",
|
|
" replay_buffer.add(state_to_buffer, reward, next_state_to_buffer, is_done_after_move)\n",
|
|
"\n",
|
|
" episode_rewards.append(final_game_result)\n",
|
|
"\n",
|
|
" if len(replay_buffer) >= BATCH_SIZE:\n",
|
|
" states, rewards, next_states, dones = replay_buffer.sample(BATCH_SIZE)\n",
|
|
"\n",
|
|
" states = torch.stack(states).float()\n",
|
|
" rewards = torch.tensor(rewards).float()\n",
|
|
" next_states = torch.stack(next_states).float()\n",
|
|
" dones = torch.tensor(dones).bool()\n",
|
|
"\n",
|
|
" current_q_values = model(states).squeeze(1)\n",
|
|
"\n",
|
|
" with torch.no_grad():\n",
|
|
" next_q_values = model(next_states).squeeze(1)\n",
|
|
" max_next_q_values = next_q_values\n",
|
|
" target_q_values = rewards + GAMMA * max_next_q_values * (~dones)\n",
|
|
"\n",
|
|
" optimizer.zero_grad()\n",
|
|
" loss = criterion(current_q_values, target_q_values)\n",
|
|
" loss.backward()\n",
|
|
" optimizer.step()\n",
|
|
"\n",
|
|
" epsilon = max(EPSILON_END, epsilon * EPSILON_DECAY)\n",
|
|
"\n",
|
|
" if (episode + 1) % 100 == 0:\n",
|
|
" avg_reward = sum(episode_rewards[-100:]) / 100\n",
|
|
" print(f\"Episode {episode + 1}/{NUM_EPISODES}, Epsilon: {epsilon:.4f}, Average Reward (last 100): {avg_reward:.2f}\")\n",
|
|
"\n",
|
|
"print(\"RL training loop completed.\")\n",
|
|
"print(f\"Final Epsilon: {epsilon:.4f}\")\n",
|
|
"print(f\"Total Episodes: {len(episode_rewards)}\")\n",
|
|
"print(f\"Average reward over all episodes: {sum(episode_rewards) / len(episode_rewards):.2f}\")"
|
|
],
|
|
"execution_count": 17,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Starting RL training loop...\n",
|
|
"Episode 100/3000, Epsilon: 0.6058, Average Reward (last 100): -0.01\n",
|
|
"Episode 200/3000, Epsilon: 0.3670, Average Reward (last 100): -0.03\n",
|
|
"Episode 300/3000, Epsilon: 0.2223, Average Reward (last 100): -0.01\n",
|
|
"Episode 400/3000, Epsilon: 0.1347, Average Reward (last 100): -0.02\n",
|
|
"Episode 500/3000, Epsilon: 0.0816, Average Reward (last 100): 0.00\n",
|
|
"Episode 600/3000, Epsilon: 0.0494, Average Reward (last 100): 0.00\n",
|
|
"Episode 700/3000, Epsilon: 0.0299, Average Reward (last 100): 0.02\n",
|
|
"Episode 800/3000, Epsilon: 0.0181, Average Reward (last 100): 0.00\n",
|
|
"Episode 900/3000, Epsilon: 0.0110, Average Reward (last 100): 0.00\n",
|
|
"Episode 1000/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 1100/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1200/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1300/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1400/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1500/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1600/3000, Epsilon: 0.0100, Average Reward (last 100): 0.02\n",
|
|
"Episode 1700/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1800/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 1900/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2000/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 2100/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 2200/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 2300/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2400/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2500/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2600/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 2700/3000, Epsilon: 0.0100, Average Reward (last 100): 0.05\n",
|
|
"Episode 2800/3000, Epsilon: 0.0100, Average Reward (last 100): -0.01\n",
|
|
"Episode 2900/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"Episode 3000/3000, Epsilon: 0.0100, Average Reward (last 100): 0.00\n",
|
|
"RL training loop completed.\n",
|
|
"Final Epsilon: 0.0100\n",
|
|
"Total Episodes: 3000\n",
|
|
"Average reward over all episodes: -0.00\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "8cc8cdcd",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "ec8c16b0-e15b-42c8-ffc5-02b9346d9035"
|
|
},
|
|
"source": [
|
|
"torch.save(model.state_dict(), './chess_vit_model.pth')\n",
|
|
"print(\"ChessViT model saved to ./chess_vit_model.pth\")"
|
|
],
|
|
"execution_count": 18,
|
|
"outputs": [
|
|
{
|
|
"metadata": {
|
|
"tags": null
|
|
},
|
|
"name": "stdout",
|
|
"output_type": "stream",
|
|
"text": [
|
|
"ChessViT model saved to ./chess_vit_model.pth\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "aff6003b",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "d649fc7b-2c82-4f19-fd7b-4ea6386dc591"
|
|
},
|
|
"source": [
|
|
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
|
|
"print(f\"Using device: {device}\")\n",
|
|
"\n",
|
|
"loaded_vit_model = ChessViT().to(device)\n",
|
|
"loaded_vit_model.load_state_dict(torch.load('./chess_vit_model.pth', map_location=device))\n",
|
|
"loaded_vit_model.eval()\n",
|
|
"\n",
|
|
"board = chess.Board()\n",
|
|
"evaluation = loaded_vit_model(board_to_tensor(board).unsqueeze(0).to(device))\n",
|
|
"print(f\"Evaluation of default board by loaded ChessViT model: {evaluation.item():.4f}\")"
|
|
],
|
|
"execution_count": 19,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Using device: cuda\n",
|
|
"Evaluation of default board by loaded ChessViT model: -0.0031\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "markdown",
|
|
"metadata": {
|
|
"id": "78ee595b"
|
|
},
|
|
"source": [
|
|
"## Let thje 2 models play against each other\n",
|
|
"\n"
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "75f2c74b"
|
|
},
|
|
"source": [
|
|
"def play_models_against_each_other(white_model, black_model, max_moves=100, device='cpu'):\n",
|
|
" board = chess.Board()\n",
|
|
" white_model.eval()\n",
|
|
" black_model.eval()\n",
|
|
"\n",
|
|
" for move_count in range(max_moves):\n",
|
|
" if board.is_game_over():\n",
|
|
" break\n",
|
|
"\n",
|
|
" legal_moves = list(board.legal_moves)\n",
|
|
" if not legal_moves:\n",
|
|
" break\n",
|
|
"\n",
|
|
" best_move = None\n",
|
|
"\n",
|
|
" if board.turn == chess.WHITE:\n",
|
|
" current_model = white_model\n",
|
|
" best_evaluation = -float('inf') # White wants to maximize the score\n",
|
|
" else:\n",
|
|
" current_model = black_model\n",
|
|
" best_evaluation = float('inf') # Black wants to minimize the score\n",
|
|
"\n",
|
|
" for move in legal_moves:\n",
|
|
" temp_board = board.copy()\n",
|
|
" temp_board.push(move)\n",
|
|
"\n",
|
|
" input_tensor = board_to_tensor(temp_board).unsqueeze(0).to(device)\n",
|
|
" with torch.no_grad():\n",
|
|
" evaluation = current_model(input_tensor).item()\n",
|
|
"\n",
|
|
" if board.turn == chess.WHITE:\n",
|
|
" if evaluation > best_evaluation:\n",
|
|
" best_evaluation = evaluation\n",
|
|
" best_move = move\n",
|
|
" else:\n",
|
|
" if evaluation < best_evaluation:\n",
|
|
" best_evaluation = evaluation\n",
|
|
" best_move = move\n",
|
|
"\n",
|
|
" if best_move is not None:\n",
|
|
" board.push(best_move)\n",
|
|
" else:\n",
|
|
" break\n",
|
|
"\n",
|
|
" return board\n"
|
|
],
|
|
"execution_count": 31,
|
|
"outputs": []
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "9c608df5",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "850e2323-cd8b-4aab-b3e5-de8fa7eac3b1"
|
|
},
|
|
"source": [
|
|
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
|
|
"print(f\"Using device: {device}\")\n",
|
|
"\n",
|
|
"loaded_chessnet_model = ChessNet().to(device)\n",
|
|
"loaded_chessnet_model.load_state_dict(torch.load('./chess_net_model.pth', map_location=device))\n",
|
|
"loaded_chessnet_model.eval()\n"
|
|
],
|
|
"execution_count": 27,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Using device: cuda\n"
|
|
]
|
|
},
|
|
{
|
|
"output_type": "execute_result",
|
|
"data": {
|
|
"text/plain": [
|
|
"ChessNet(\n",
|
|
" (conv1): Conv2d(12, 64, kernel_size=(1, 1), stride=(1, 1))\n",
|
|
" (conv2): Conv2d(64, 128, kernel_size=(1, 1), stride=(1, 1))\n",
|
|
" (lin1): Linear(in_features=8192, out_features=512, bias=True)\n",
|
|
" (lin2): Linear(in_features=512, out_features=256, bias=True)\n",
|
|
" (lin3): Linear(in_features=256, out_features=1, bias=True)\n",
|
|
")"
|
|
]
|
|
},
|
|
"metadata": {},
|
|
"execution_count": 27
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "7c92530e",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "ca0822f5-4e12-4c1b-db6f-c1f5c8c14411"
|
|
},
|
|
"source": [
|
|
"device = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n",
|
|
"print(f\"Using device: {device}\")\n",
|
|
"\n",
|
|
"loaded_chessvit_model = ChessViT().to(device)\n",
|
|
"loaded_chessvit_model.load_state_dict(torch.load('./chess_vit_model.pth', map_location=device))\n",
|
|
"loaded_chessvit_model.eval()"
|
|
],
|
|
"execution_count": 28,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"Using device: cuda\n"
|
|
]
|
|
},
|
|
{
|
|
"output_type": "execute_result",
|
|
"data": {
|
|
"text/plain": [
|
|
"ChessViT(\n",
|
|
" (patch_embedding): Conv2d(12, 128, kernel_size=(2, 2), stride=(2, 2))\n",
|
|
" (transformer_encoder_blocks): ModuleList(\n",
|
|
" (0-1): 2 x TransformerEncoderBlock(\n",
|
|
" (norm1): LayerNorm((128,), eps=1e-05, elementwise_affine=True)\n",
|
|
" (attn): MultiHeadSelfAttention(\n",
|
|
" (qkv): Linear(in_features=128, out_features=384, bias=True)\n",
|
|
" (proj): Linear(in_features=128, out_features=128, bias=True)\n",
|
|
" )\n",
|
|
" (norm2): LayerNorm((128,), eps=1e-05, elementwise_affine=True)\n",
|
|
" (ffn): FeedForwardNetwork(\n",
|
|
" (net): Sequential(\n",
|
|
" (0): Linear(in_features=128, out_features=512, bias=True)\n",
|
|
" (1): GELU(approximate='none')\n",
|
|
" (2): Linear(in_features=512, out_features=128, bias=True)\n",
|
|
" )\n",
|
|
" )\n",
|
|
" )\n",
|
|
" )\n",
|
|
" (mlp_head): Sequential(\n",
|
|
" (0): LayerNorm((128,), eps=1e-05, elementwise_affine=True)\n",
|
|
" (1): Linear(in_features=128, out_features=1, bias=True)\n",
|
|
" )\n",
|
|
")"
|
|
]
|
|
},
|
|
"metadata": {},
|
|
"execution_count": 28
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "04067aa1",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "7d750185-2690-4a13-c179-c891d5f7c2d1"
|
|
},
|
|
"source": [
|
|
"results = []\n",
|
|
"for i in range(100):\n",
|
|
" game_result = play_models_against_each_other(loaded_chessnet_model, loaded_chessvit_model, device=device)\n",
|
|
" results.append(game_result.result())\n",
|
|
"\n",
|
|
"print(\"ChessNet (White) vs ChessViT (Black):\")\n",
|
|
"print({result: results.count(result) for result in set(results)})\n"
|
|
],
|
|
"execution_count": 29,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"ChessNet (White) vs ChessViT (Black):\n",
|
|
"{'1/2-1/2': 100}\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"metadata": {
|
|
"id": "188527c5",
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/"
|
|
},
|
|
"outputId": "ba5138b9-a47f-4fcd-ab1b-8393649ea24b"
|
|
},
|
|
"source": [
|
|
"results = []\n",
|
|
"for i in range(100):\n",
|
|
" game_result = play_models_against_each_other(loaded_chessvit_model, loaded_chessnet_model, device=device)\n",
|
|
" results.append(game_result.result())\n",
|
|
"\n",
|
|
"print(\"ChessViT (White) vs ChessNet (Black):\")\n",
|
|
"print({result: results.count(result) for result in set(results)})"
|
|
],
|
|
"execution_count": 30,
|
|
"outputs": [
|
|
{
|
|
"output_type": "stream",
|
|
"name": "stdout",
|
|
"text": [
|
|
"ChessViT (White) vs ChessNet (Black):\n",
|
|
"{'1/2-1/2': 100}\n"
|
|
]
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"game_result = play_models_against_each_other(loaded_chessnet_model, loaded_chessvit_model, device=device)\n",
|
|
"game_result\n"
|
|
],
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/",
|
|
"height": 411
|
|
},
|
|
"id": "rEyFB14ziH5u",
|
|
"outputId": "0deeccde-97e4-46b4-862f-ebc6f07d64ef"
|
|
},
|
|
"execution_count": 32,
|
|
"outputs": [
|
|
{
|
|
"output_type": "execute_result",
|
|
"data": {
|
|
"text/plain": [
|
|
"Board('2q2bn1/p2kp2r/5p2/p1p1nPp1/1rB1Pp1P/2NP4/2P3K1/R1B5 w - - 18 36')"
|
|
],
|
|
"image/svg+xml": "<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" viewBox=\"0 0 390 390\" width=\"390\" height=\"390\"><desc><pre>. . q . . b n .\np . . k p . . r\n. . . . . p . .\np . p . n P p .\n. r B . P p . P\n. . N P . . . .\n. . P . . . K .\nR . B . . . . .</pre></desc><defs><g id=\"white-pawn\" class=\"white pawn\"><path d=\"M22.5 9c-2.21 0-4 1.79-4 4 0 .89.29 1.71.78 2.38C17.33 16.5 16 18.59 16 21c0 2.03.94 3.84 2.41 5.03-3 1.06-7.41 5.55-7.41 13.47h23c0-7.92-4.41-12.41-7.41-13.47 1.47-1.19 2.41-3 2.41-5.03 0-2.41-1.33-4.5-3.28-5.62.49-.67.78-1.49.78-2.38 0-2.21-1.79-4-4-4z\" fill=\"#fff\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" /></g><g id=\"white-knight\" class=\"white knight\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18\" style=\"fill:#ffffff; stroke:#000000;\" /><path d=\"M 24,18 C 24.38,20.91 18.45,25.37 16,27 C 13,29 13.18,31.34 11,31 C 9.958,30.06 12.41,27.96 11,28 C 10,28 11.19,29.23 10,30 C 9,30 5.997,31 6,26 C 6,24 12,14 12,14 C 12,14 13.89,12.1 14,10.5 C 13.27,9.506 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.28,8.008 21,7 C 22,7 22,10 22,10\" style=\"fill:#ffffff; stroke:#000000;\" /><path d=\"M 9.5 25.5 A 0.5 0.5 0 1 1 8.5,25.5 A 0.5 0.5 0 1 1 9.5 25.5 z\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z\" transform=\"matrix(0.866,0.5,-0.5,0.866,9.693,-5.173)\" style=\"fill:#000000; stroke:#000000;\" /></g><g id=\"white-bishop\" class=\"white bishop\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><g fill=\"#fff\" stroke-linecap=\"butt\"><path d=\"M9 36c3.39-.97 10.11.43 13.5-2 3.39 2.43 10.11 1.03 13.5 2 0 0 1.65.54 3 2-.68.97-1.65.99-3 .5-3.39-.97-10.11.46-13.5-1-3.39 1.46-10.11.03-13.5 1-1.354.49-2.323.47-3-.5 1.354-1.94 3-2 3-2zM15 32c2.5 2.5 12.5 2.5 15 0 .5-1.5 0-2 0-2 0-2.5-2.5-4-2.5-4 5.5-1.5 6-11.5-5-15.5-11 4-10.5 14-5 15.5 0 0-2.5 1.5-2.5 4 0 0-.5.5 0 2zM25 8a2.5 2.5 0 1 1-5 0 2.5 2.5 0 1 1 5 0z\" /></g><path d=\"M17.5 26h10M15 30h15m-7.5-14.5v5M20 18h5\" stroke-linejoin=\"miter\" /></g><g id=\"white-rook\" class=\"white rook\" fill=\"#fff\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 39h27v-3H9v3zM12 36v-4h21v4H12zM11 14V9h4v2h5V9h5v2h5V9h4v5\" stroke-linecap=\"butt\" /><path d=\"M34 14l-3 3H14l-3-3\" /><path d=\"M31 17v12.5H14V17\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M31 29.5l1.5 2.5h-20l1.5-2.5\" /><path d=\"M11 14h23\" fill=\"none\" stroke-linejoin=\"miter\" /></g><g id=\"white-king\" class=\"white king\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M22.5 11.63V6M20 8h5\" stroke-linejoin=\"miter\" /><path d=\"M22.5 25s4.5-7.5 3-10.5c0 0-1-2.5-3-2.5s-3 2.5-3 2.5c-1.5 3 3 10.5 3 10.5\" fill=\"#fff\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M11.5 37c5.5 3.5 15.5 3.5 21 0v-7s9-4.5 6-10.5c-4-6.5-13.5-3.5-16 4V27v-3.5c-3.5-7.5-13-10.5-16-4-3 6 5 10 5 10V37z\" fill=\"#fff\" /><path d=\"M11.5 30c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0\" /></g><g id=\"black-pawn\" class=\"black pawn\"><path d=\"M22.5 9c-2.21 0-4 1.79-4 4 0 .89.29 1.71.78 2.38C17.33 16.5 16 18.59 16 21c0 2.03.94 3.84 2.41 5.03-3 1.06-7.41 5.55-7.41 13.47h23c0-7.92-4.41-12.41-7.41-13.47 1.47-1.19 2.41-3 2.41-5.03 0-2.41-1.33-4.5-3.28-5.62.49-.67.78-1.49.78-2.38 0-2.21-1.79-4-4-4z\" fill=\"#000\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" /></g><g id=\"black-knight\" class=\"black knight\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 24,18 C 24.38,20.91 18.45,25.37 16,27 C 13,29 13.18,31.34 11,31 C 9.958,30.06 12.41,27.96 11,28 C 10,28 11.19,29.23 10,30 C 9,30 5.997,31 6,26 C 6,24 12,14 12,14 C 12,14 13.89,12.1 14,10.5 C 13.27,9.506 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.28,8.008 21,7 C 22,7 22,10 22,10\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 9.5 25.5 A 0.5 0.5 0 1 1 8.5,25.5 A 0.5 0.5 0 1 1 9.5 25.5 z\" style=\"fill:#ececec; stroke:#ececec;\" /><path d=\"M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z\" transform=\"matrix(0.866,0.5,-0.5,0.866,9.693,-5.173)\" style=\"fill:#ececec; stroke:#ececec;\" /><path d=\"M 24.55,10.4 L 24.1,11.85 L 24.6,12 C 27.75,13 30.25,14.49 32.5,18.75 C 34.75,23.01 35.75,29.06 35.25,39 L 35.2,39.5 L 37.45,39.5 L 37.5,39 C 38,28.94 36.62,22.15 34.25,17.66 C 31.88,13.17 28.46,11.02 25.06,10.5 L 24.55,10.4 z \" style=\"fill:#ececec; stroke:none;\" /></g><g id=\"black-bishop\" class=\"black bishop\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 36c3.39-.97 10.11.43 13.5-2 3.39 2.43 10.11 1.03 13.5 2 0 0 1.65.54 3 2-.68.97-1.65.99-3 .5-3.39-.97-10.11.46-13.5-1-3.39 1.46-10.11.03-13.5 1-1.354.49-2.323.47-3-.5 1.354-1.94 3-2 3-2zm6-4c2.5 2.5 12.5 2.5 15 0 .5-1.5 0-2 0-2 0-2.5-2.5-4-2.5-4 5.5-1.5 6-11.5-5-15.5-11 4-10.5 14-5 15.5 0 0-2.5 1.5-2.5 4 0 0-.5.5 0 2zM25 8a2.5 2.5 0 1 1-5 0 2.5 2.5 0 1 1 5 0z\" fill=\"#000\" stroke-linecap=\"butt\" /><path d=\"M17.5 26h10M15 30h15m-7.5-14.5v5M20 18h5\" stroke=\"#fff\" stroke-linejoin=\"miter\" /></g><g id=\"black-rook\" class=\"black rook\" fill=\"#000\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 39h27v-3H9v3zM12.5 32l1.5-2.5h17l1.5 2.5h-20zM12 36v-4h21v4H12z\" stroke-linecap=\"butt\" /><path d=\"M14 29.5v-13h17v13H14z\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M14 16.5L11 14h23l-3 2.5H14zM11 14V9h4v2h5V9h5v2h5V9h4v5H11z\" stroke-linecap=\"butt\" /><path d=\"M12 35.5h21M13 31.5h19M14 29.5h17M14 16.5h17M11 14h23\" fill=\"none\" stroke=\"#fff\" stroke-width=\"1\" stroke-linejoin=\"miter\" /></g><g id=\"black-queen\" class=\"black queen\" fill=\"#000\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><g fill=\"#000\" stroke=\"none\"><circle cx=\"6\" cy=\"12\" r=\"2.75\" /><circle cx=\"14\" cy=\"9\" r=\"2.75\" /><circle cx=\"22.5\" cy=\"8\" r=\"2.75\" /><circle cx=\"31\" cy=\"9\" r=\"2.75\" /><circle cx=\"39\" cy=\"12\" r=\"2.75\" /></g><path d=\"M9 26c8.5-1.5 21-1.5 27 0l2.5-12.5L31 25l-.3-14.1-5.2 13.6-3-14.5-3 14.5-5.2-13.6L14 25 6.5 13.5 9 26zM9 26c0 2 1.5 2 2.5 4 1 1.5 1 1 .5 3.5-1.5 1-1.5 2.5-1.5 2.5-1.5 1.5.5 2.5.5 2.5 6.5 1 16.5 1 23 0 0 0 1.5-1 0-2.5 0 0 .5-1.5-1-2.5-.5-2.5-.5-2 .5-3.5 1-2 2.5-2 2.5-4-8.5-1.5-18.5-1.5-27 0z\" stroke-linecap=\"butt\" /><path d=\"M11 38.5a35 35 1 0 0 23 0\" fill=\"none\" stroke-linecap=\"butt\" /><path d=\"M11 29a35 35 1 0 1 23 0M12.5 31.5h20M11.5 34.5a35 35 1 0 0 22 0M10.5 37.5a35 35 1 0 0 24 0\" fill=\"none\" stroke=\"#fff\" /></g><g id=\"black-king\" class=\"black king\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M22.5 11.63V6\" stroke-linejoin=\"miter\" /><path d=\"M22.5 25s4.5-7.5 3-10.5c0 0-1-2.5-3-2.5s-3 2.5-3 2.5c-1.5 3 3 10.5 3 10.5\" fill=\"#000\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M11.5 37c5.5 3.5 15.5 3.5 21 0v-7s9-4.5 6-10.5c-4-6.5-13.5-3.5-16 4V27v-3.5c-3.5-7.5-13-10.5-16-4-3 6 5 10 5 10V37z\" fill=\"#000\" /><path d=\"M20 8h5\" stroke-linejoin=\"miter\" /><path d=\"M32 29.5s8.5-4 6.03-9.65C34.15 14 25 18 22.5 24.5l.01 2.1-.01-2.1C20 18 9.906 14 6.997 19.85c-2.497 5.65 4.853 9 4.853 9M11.5 30c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0\" stroke=\"#fff\" /></g></defs><rect x=\"7.5\" y=\"7.5\" width=\"375\" height=\"375\" fill=\"none\" stroke=\"#212121\" stroke-width=\"15\" /><g transform=\"translate(20, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M23.328 10.016q-1.742 0-2.414.398-.672.398-.672 1.36 0 .765.5 1.218.508.445 1.375.445 1.196 0 1.914-.843.727-.852.727-2.258v-.32zm2.867-.594v4.992h-1.437v-1.328q-.492.797-1.227 1.18-.734.375-1.797.375-1.343 0-2.14-.75-.79-.758-.79-2.024 0-1.476.985-2.226.992-.75 2.953-.75h2.016V8.75q0-.992-.656-1.531-.649-.547-1.829-.547-.75 0-1.46.18-.711.18-1.368.539V6.062q.79-.304 1.532-.453.742-.156 1.445-.156 1.898 0 2.836.984.937.985.937 2.985z\" /></g><g transform=\"translate(20, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M23.328 10.016q-1.742 0-2.414.398-.672.398-.672 1.36 0 .765.5 1.218.508.445 1.375.445 1.196 0 1.914-.843.727-.852.727-2.258v-.32zm2.867-.594v4.992h-1.437v-1.328q-.492.797-1.227 1.18-.734.375-1.797.375-1.343 0-2.14-.75-.79-.758-.79-2.024 0-1.476.985-2.226.992-.75 2.953-.75h2.016V8.75q0-.992-.656-1.531-.649-.547-1.829-.547-.75 0-1.46.18-.711.18-1.368.539V6.062q.79-.304 1.532-.453.742-.156 1.445-.156 1.898 0 2.836.984.937.985.937 2.985z\" /></g><g transform=\"translate(65, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.922 10.047q0-1.586-.656-2.485-.649-.906-1.79-.906-1.14 0-1.796.906-.649.899-.649 2.485 0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.789-.898.656-.906.656-2.492zm-4.89-3.055q.452-.781 1.14-1.156.695-.383 1.656-.383 1.594 0 2.586 1.266 1 1.265 1 3.328 0 2.062-1 3.328-.992 1.266-2.586 1.266-.96 0-1.656-.375-.688-.383-1.14-1.164v1.312h-1.446V2.258h1.445z\" /></g><g transform=\"translate(65, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.922 10.047q0-1.586-.656-2.485-.649-.906-1.79-.906-1.14 0-1.796.906-.649.899-.649 2.485 0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.789-.898.656-.906.656-2.492zm-4.89-3.055q.452-.781 1.14-1.156.695-.383 1.656-.383 1.594 0 2.586 1.266 1 1.265 1 3.328 0 2.062-1 3.328-.992 1.266-2.586 1.266-.96 0-1.656-.375-.688-.383-1.14-1.164v1.312h-1.446V2.258h1.445z\" /></g><g transform=\"translate(110, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.96 6v1.344q-.608-.336-1.226-.5-.609-.172-1.234-.172-1.398 0-2.172.89-.773.883-.773 2.485 0 1.601.773 2.492.774.883 2.172.883.625 0 1.234-.164.618-.172 1.227-.508v1.328q-.602.281-1.25.422-.64.14-1.367.14-1.977 0-3.14-1.242-1.165-1.242-1.165-3.351 0-2.14 1.172-3.367 1.18-1.227 3.227-1.227.664 0 1.296.14.633.134 1.227.407z\" /></g><g transform=\"translate(110, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.96 6v1.344q-.608-.336-1.226-.5-.609-.172-1.234-.172-1.398 0-2.172.89-.773.883-.773 2.485 0 1.601.773 2.492.774.883 2.172.883.625 0 1.234-.164.618-.172 1.227-.508v1.328q-.602.281-1.25.422-.64.14-1.367.14-1.977 0-3.14-1.242-1.165-1.242-1.165-3.351 0-2.14 1.172-3.367 1.18-1.227 3.227-1.227.664 0 1.296.14.633.134 1.227.407z\" /></g><g transform=\"translate(155, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 6.992V2.258h1.437v12.156h-1.437v-1.312q-.453.78-1.149 1.164-.687.375-1.656.375-1.586 0-2.586-1.266-.992-1.266-.992-3.328 0-2.063.992-3.328 1-1.266 2.586-1.266.969 0 1.656.383.696.375 1.149 1.156zm-4.899 3.055q0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.796-.898.657-.906.657-2.492 0-1.586-.657-2.485-.656-.906-1.796-.906-1.141 0-1.797.906-.649.899-.649 2.485z\" /></g><g transform=\"translate(155, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 6.992V2.258h1.437v12.156h-1.437v-1.312q-.453.78-1.149 1.164-.687.375-1.656.375-1.586 0-2.586-1.266-.992-1.266-.992-3.328 0-2.063.992-3.328 1-1.266 2.586-1.266.969 0 1.656.383.696.375 1.149 1.156zm-4.899 3.055q0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.796-.898.657-.906.657-2.492 0-1.586-.657-2.485-.656-.906-1.796-.906-1.141 0-1.797.906-.649.899-.649 2.485z\" /></g><g transform=\"translate(200, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.555 9.68v.703h-6.61q.094 1.484.89 2.265.806.774 2.235.774.828 0 1.602-.203.781-.203 1.547-.61v1.36q-.774.328-1.586.5-.813.172-1.649.172-2.093 0-3.32-1.22-1.219-1.218-1.219-3.296 0-2.148 1.157-3.406 1.164-1.266 3.132-1.266 1.766 0 2.79 1.14 1.03 1.134 1.03 3.087zm-1.438-.422q-.015-1.18-.664-1.883-.64-.703-1.703-.703-1.203 0-1.93.68-.718.68-.828 1.914z\" /></g><g transform=\"translate(200, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.555 9.68v.703h-6.61q.094 1.484.89 2.265.806.774 2.235.774.828 0 1.602-.203.781-.203 1.547-.61v1.36q-.774.328-1.586.5-.813.172-1.649.172-2.093 0-3.32-1.22-1.219-1.218-1.219-3.296 0-2.148 1.157-3.406 1.164-1.266 3.132-1.266 1.766 0 2.79 1.14 1.03 1.134 1.03 3.087zm-1.438-.422q-.015-1.18-.664-1.883-.64-.703-1.703-.703-1.203 0-1.93.68-.718.68-.828 1.914z\" /></g><g transform=\"translate(245, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.285 2.258v1.195H23.91q-.773 0-1.078.313-.297.312-.297 1.125v.773h2.367v1.117h-2.367v7.633H21.09V6.781h-1.375V5.664h1.375v-.61q0-1.46.68-2.124.68-.672 2.156-.672z\" /></g><g transform=\"translate(245, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.285 2.258v1.195H23.91q-.773 0-1.078.313-.297.312-.297 1.125v.773h2.367v1.117h-2.367v7.633H21.09V6.781h-1.375V5.664h1.375v-.61q0-1.46.68-2.124.68-.672 2.156-.672z\" /></g><g transform=\"translate(290, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 9.937q0-1.562-.649-2.421-.64-.86-1.804-.86-1.157 0-1.805.86-.64.859-.64 2.421 0 1.555.64 2.415.648.859 1.805.859 1.164 0 1.804-.86.649-.859.649-2.414zm1.437 3.391q0 2.234-.992 3.32-.992 1.094-3.04 1.094-.757 0-1.429-.117-.672-.11-1.304-.344v-1.398q.632.344 1.25.508.617.164 1.257.164 1.414 0 2.118-.743.703-.734.703-2.226v-.711q-.446.773-1.141 1.156-.695.383-1.664.383-1.61 0-2.594-1.227-.984-1.226-.984-3.25 0-2.03.984-3.257.985-1.227 2.594-1.227.969 0 1.664.383t1.14 1.156V5.664h1.438z\" /></g><g transform=\"translate(290, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 9.937q0-1.562-.649-2.421-.64-.86-1.804-.86-1.157 0-1.805.86-.64.859-.64 2.421 0 1.555.64 2.415.648.859 1.805.859 1.164 0 1.804-.86.649-.859.649-2.414zm1.437 3.391q0 2.234-.992 3.32-.992 1.094-3.04 1.094-.757 0-1.429-.117-.672-.11-1.304-.344v-1.398q.632.344 1.25.508.617.164 1.257.164 1.414 0 2.118-.743.703-.734.703-2.226v-.711q-.446.773-1.141 1.156-.695.383-1.664.383-1.61 0-2.594-1.227-.984-1.226-.984-3.25 0-2.03.984-3.257.985-1.227 2.594-1.227.969 0 1.664.383t1.14 1.156V5.664h1.438z\" /></g><g transform=\"translate(335, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.164 9.133v5.281h-1.437V9.18q0-1.243-.485-1.86-.484-.617-1.453-.617-1.164 0-1.836.742-.672.742-.672 2.024v4.945h-1.445V2.258h1.445v4.765q.516-.789 1.211-1.18.703-.39 1.617-.39 1.508 0 2.282.938.773.93.773 2.742z\" /></g><g transform=\"translate(335, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.164 9.133v5.281h-1.437V9.18q0-1.243-.485-1.86-.484-.617-1.453-.617-1.164 0-1.836.742-.672.742-.672 2.024v4.945h-1.445V2.258h1.445v4.765q.516-.789 1.211-1.18.703-.39 1.617-.39 1.508 0 2.282.938.773.93.773 2.742z\" /></g><g transform=\"translate(0, 335) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.754 26.996h2.578v-8.898l-2.805.562v-1.437l2.79-.563h1.578v10.336h2.578v1.328h-6.72z\" /></g><g transform=\"translate(375, 335) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.754 26.996h2.578v-8.898l-2.805.562v-1.437l2.79-.563h1.578v10.336h2.578v1.328h-6.72z\" /></g><g transform=\"translate(0, 290) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M8.195 26.996h5.508v1.328H6.297v-1.328q.898-.93 2.445-2.492 1.555-1.57 1.953-2.024.758-.851 1.055-1.437.305-.594.305-1.164 0-.93-.657-1.516-.648-.586-1.695-.586-.742 0-1.57.258-.82.258-1.758.781v-1.593q.953-.383 1.781-.578.828-.196 1.516-.196 1.812 0 2.89.906 1.079.907 1.079 2.422 0 .72-.274 1.368-.265.64-.976 1.515-.196.227-1.243 1.313-1.046 1.078-2.953 3.023z\" /></g><g transform=\"translate(375, 290) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M8.195 26.996h5.508v1.328H6.297v-1.328q.898-.93 2.445-2.492 1.555-1.57 1.953-2.024.758-.851 1.055-1.437.305-.594.305-1.164 0-.93-.657-1.516-.648-.586-1.695-.586-.742 0-1.57.258-.82.258-1.758.781v-1.593q.953-.383 1.781-.578.828-.196 1.516-.196 1.812 0 2.89.906 1.079.907 1.079 2.422 0 .72-.274 1.368-.265.64-.976 1.515-.196.227-1.243 1.313-1.046 1.078-2.953 3.023z\" /></g><g transform=\"translate(0, 245) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.434 22.035q1.132.242 1.765 1.008.64.766.64 1.89 0 1.727-1.187 2.672-1.187.946-3.375.946-.734 0-1.515-.149-.774-.14-1.602-.43V26.45q.656.383 1.438.578.78.196 1.632.196 1.485 0 2.258-.586.782-.586.782-1.703 0-1.032-.727-1.61-.719-.586-2.008-.586h-1.36v-1.297h1.423q1.164 0 1.78-.46.618-.47.618-1.344 0-.899-.64-1.375-.633-.485-1.82-.485-.65 0-1.391.141-.743.14-1.633.437V16.95q.898-.25 1.68-.375.788-.125 1.484-.125 1.797 0 2.844.82 1.046.813 1.046 2.204 0 .968-.554 1.64-.555.664-1.578.922z\" /></g><g transform=\"translate(375, 245) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.434 22.035q1.132.242 1.765 1.008.64.766.64 1.89 0 1.727-1.187 2.672-1.187.946-3.375.946-.734 0-1.515-.149-.774-.14-1.602-.43V26.45q.656.383 1.438.578.78.196 1.632.196 1.485 0 2.258-.586.782-.586.782-1.703 0-1.032-.727-1.61-.719-.586-2.008-.586h-1.36v-1.297h1.423q1.164 0 1.78-.46.618-.47.618-1.344 0-.899-.64-1.375-.633-.485-1.82-.485-.65 0-1.391.141-.743.14-1.633.437V16.95q.898-.25 1.68-.375.788-.125 1.484-.125 1.797 0 2.844.82 1.046.813 1.046 2.204 0 .968-.554 1.64-.555.664-1.578.922z\" /></g><g transform=\"translate(0, 200) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.016 18.035L7.03 24.262h3.985zm-.414-1.375h1.984v7.602h1.664v1.312h-1.664v2.75h-1.57v-2.75H5.75v-1.523z\" /></g><g transform=\"translate(375, 200) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.016 18.035L7.03 24.262h3.985zm-.414-1.375h1.984v7.602h1.664v1.312h-1.664v2.75h-1.57v-2.75H5.75v-1.523z\" /></g><g transform=\"translate(0, 155) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.719 16.66h6.195v1.328h-4.75v2.86q.344-.118.688-.172.343-.063.687-.063 1.953 0 3.094 1.07 1.14 1.07 1.14 2.899 0 1.883-1.171 2.93-1.172 1.039-3.305 1.039-.735 0-1.5-.125-.758-.125-1.57-.375v-1.586q.703.383 1.453.57.75.188 1.586.188 1.351 0 2.14-.711.79-.711.79-1.93 0-1.219-.79-1.93-.789-.71-2.14-.71-.633 0-1.266.14-.625.14-1.281.438z\" /></g><g transform=\"translate(375, 155) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.719 16.66h6.195v1.328h-4.75v2.86q.344-.118.688-.172.343-.063.687-.063 1.953 0 3.094 1.07 1.14 1.07 1.14 2.899 0 1.883-1.171 2.93-1.172 1.039-3.305 1.039-.735 0-1.5-.125-.758-.125-1.57-.375v-1.586q.703.383 1.453.57.75.188 1.586.188 1.351 0 2.14-.711.79-.711.79-1.93 0-1.219-.79-1.93-.789-.71-2.14-.71-.633 0-1.266.14-.625.14-1.281.438z\" /></g><g transform=\"translate(0, 110) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10.137 21.863q-1.063 0-1.688.727-.617.726-.617 1.992 0 1.258.617 1.992.625.727 1.688.727 1.062 0 1.68-.727.624-.734.624-1.992 0-1.266-.625-1.992-.617-.727-1.68-.727zm3.133-4.945v1.437q-.594-.28-1.204-.43-.601-.148-1.195-.148-1.562 0-2.39 1.055-.82 1.055-.938 3.188.46-.68 1.156-1.04.696-.367 1.531-.367 1.758 0 2.774 1.07 1.023 1.063 1.023 2.899 0 1.797-1.062 2.883-1.063 1.086-2.828 1.086-2.024 0-3.094-1.547-1.07-1.555-1.07-4.5 0-2.766 1.312-4.406 1.313-1.649 3.524-1.649.593 0 1.195.117.61.118 1.266.352z\" /></g><g transform=\"translate(375, 110) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10.137 21.863q-1.063 0-1.688.727-.617.726-.617 1.992 0 1.258.617 1.992.625.727 1.688.727 1.062 0 1.68-.727.624-.734.624-1.992 0-1.266-.625-1.992-.617-.727-1.68-.727zm3.133-4.945v1.437q-.594-.28-1.204-.43-.601-.148-1.195-.148-1.562 0-2.39 1.055-.82 1.055-.938 3.188.46-.68 1.156-1.04.696-.367 1.531-.367 1.758 0 2.774 1.07 1.023 1.063 1.023 2.899 0 1.797-1.062 2.883-1.063 1.086-2.828 1.086-2.024 0-3.094-1.547-1.07-1.555-1.07-4.5 0-2.766 1.312-4.406 1.313-1.649 3.524-1.649.593 0 1.195.117.61.118 1.266.352z\" /></g><g transform=\"translate(0, 65) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.25 16.66h7.5v.672L9.516 28.324H7.867l3.985-10.336H6.25z\" /></g><g transform=\"translate(375, 65) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.25 16.66h7.5v.672L9.516 28.324H7.867l3.985-10.336H6.25z\" /></g><g transform=\"translate(0, 20) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10 22.785q-1.125 0-1.773.602-.641.601-.641 1.656t.64 1.656q.649.602 1.774.602t1.773-.602q.649-.61.649-1.656 0-1.055-.649-1.656-.64-.602-1.773-.602zm-1.578-.672q-1.016-.25-1.586-.945-.563-.695-.563-1.695 0-1.399.993-2.211 1-.813 2.734-.813 1.742 0 2.734.813.993.812.993 2.21 0 1-.57 1.696-.563.695-1.571.945 1.14.266 1.773 1.04.641.773.641 1.89 0 1.695-1.04 2.602-1.03.906-2.96.906t-2.969-.906Q6 26.738 6 25.043q0-1.117.64-1.89.641-.774 1.782-1.04zm-.578-2.492q0 .906.562 1.414.57.508 1.594.508 1.016 0 1.586-.508.578-.508.578-1.414 0-.906-.578-1.414-.57-.508-1.586-.508-1.023 0-1.594.508-.562.508-.562 1.414z\" /></g><g transform=\"translate(375, 20) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10 22.785q-1.125 0-1.773.602-.641.601-.641 1.656t.64 1.656q.649.602 1.774.602t1.773-.602q.649-.61.649-1.656 0-1.055-.649-1.656-.64-.602-1.773-.602zm-1.578-.672q-1.016-.25-1.586-.945-.563-.695-.563-1.695 0-1.399.993-2.211 1-.813 2.734-.813 1.742 0 2.734.813.993.812.993 2.21 0 1-.57 1.696-.563.695-1.571.945 1.14.266 1.773 1.04.641.773.641 1.89 0 1.695-1.04 2.602-1.03.906-2.96.906t-2.969-.906Q6 26.738 6 25.043q0-1.117.64-1.89.641-.774 1.782-1.04zm-.578-2.492q0 .906.562 1.414.57.508 1.594.508 1.016 0 1.586-.508.578-.508.578-1.414 0-.906-.578-1.414-.57-.508-1.586-.508-1.023 0-1.594.508-.562.508-.562 1.414z\" /></g><rect x=\"15\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark a1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"330\" width=\"45\" height=\"45\" class=\"square light b1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark c1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"330\" width=\"45\" height=\"45\" class=\"square light d1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark e1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"330\" width=\"45\" height=\"45\" class=\"square light f1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark g1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"330\" width=\"45\" height=\"45\" class=\"square light h1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"285\" width=\"45\" height=\"45\" class=\"square light a2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark b2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"285\" width=\"45\" height=\"45\" class=\"square light c2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark d2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"285\" width=\"45\" height=\"45\" class=\"square light e2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark f2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"285\" width=\"45\" height=\"45\" class=\"square light g2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark h2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"15\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark a3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"240\" width=\"45\" height=\"45\" class=\"square light b3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark c3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"240\" width=\"45\" height=\"45\" class=\"square light d3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark e3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"240\" width=\"45\" height=\"45\" class=\"square light f3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark g3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"240\" width=\"45\" height=\"45\" class=\"square light h3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"195\" width=\"45\" height=\"45\" class=\"square light a4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark b4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"195\" width=\"45\" height=\"45\" class=\"square light c4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark d4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"195\" width=\"45\" height=\"45\" class=\"square light e4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark f4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"195\" width=\"45\" height=\"45\" class=\"square light g4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark h4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"15\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark a5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"150\" width=\"45\" height=\"45\" class=\"square light b5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark c5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"150\" width=\"45\" height=\"45\" class=\"square light d5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark e5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"150\" width=\"45\" height=\"45\" class=\"square light f5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark g5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"150\" width=\"45\" height=\"45\" class=\"square light h5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"105\" width=\"45\" height=\"45\" class=\"square light a6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark b6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"105\" width=\"45\" height=\"45\" class=\"square light c6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark d6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"105\" width=\"45\" height=\"45\" class=\"square light e6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark f6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"105\" width=\"45\" height=\"45\" class=\"square light g6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark lastmove h6\" stroke=\"none\" fill=\"#aaa23b\" /><rect x=\"15\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark a7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"60\" width=\"45\" height=\"45\" class=\"square light b7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark c7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"60\" width=\"45\" height=\"45\" class=\"square light d7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark e7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"60\" width=\"45\" height=\"45\" class=\"square light f7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark g7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"60\" width=\"45\" height=\"45\" class=\"square light h7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"15\" width=\"45\" height=\"45\" class=\"square light a8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark b8\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"15\" width=\"45\" height=\"45\" class=\"square light c8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark d8\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"15\" width=\"45\" height=\"45\" class=\"square light e8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark lastmove f8\" stroke=\"none\" fill=\"#aaa23b\" /><rect x=\"285\" y=\"15\" width=\"45\" height=\"45\" class=\"square light g8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark h8\" stroke=\"none\" fill=\"#d18b47\" /><use href=\"#white-rook\" xlink:href=\"#white-rook\" transform=\"translate(15, 330)\" /><use href=\"#white-bishop\" xlink:href=\"#white-bishop\" transform=\"translate(105, 330)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(105, 285)\" /><use href=\"#white-king\" xlink:href=\"#white-king\" transform=\"translate(285, 285)\" /><use href=\"#white-knight\" xlink:href=\"#white-knight\" transform=\"translate(105, 240)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(150, 240)\" /><use href=\"#black-rook\" xlink:href=\"#black-rook\" transform=\"translate(60, 195)\" /><use href=\"#white-bishop\" xlink:href=\"#white-bishop\" transform=\"translate(105, 195)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(195, 195)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(240, 195)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(330, 195)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(15, 150)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(105, 150)\" /><use href=\"#black-knight\" xlink:href=\"#black-knight\" transform=\"translate(195, 150)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(240, 150)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(285, 150)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(240, 105)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(15, 60)\" /><use href=\"#black-king\" xlink:href=\"#black-king\" transform=\"translate(150, 60)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(195, 60)\" /><use href=\"#black-rook\" xlink:href=\"#black-rook\" transform=\"translate(330, 60)\" /><use href=\"#black-queen\" xlink:href=\"#black-queen\" transform=\"translate(105, 15)\" /><use href=\"#black-bishop\" xlink:href=\"#black-bishop\" transform=\"translate(240, 15)\" /><use href=\"#black-knight\" xlink:href=\"#black-knight\" transform=\"translate(285, 15)\" /></svg>"
|
|
},
|
|
"metadata": {},
|
|
"execution_count": 32
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"cell_type": "code",
|
|
"source": [
|
|
"game_result = play_models_against_each_other(loaded_chessvit_model, loaded_chessnet_model, device=device)\n",
|
|
"game_result\n"
|
|
],
|
|
"metadata": {
|
|
"colab": {
|
|
"base_uri": "https://localhost:8080/",
|
|
"height": 411
|
|
},
|
|
"id": "6S1JgndCiYsV",
|
|
"outputId": "419c73a9-659c-414f-9392-38c4da335d22"
|
|
},
|
|
"execution_count": 33,
|
|
"outputs": [
|
|
{
|
|
"output_type": "execute_result",
|
|
"data": {
|
|
"text/plain": [
|
|
"Board('1nb3nr/2ppkppp/4p3/r1b5/p6q/1PPKPNPP/3PBP2/RNB1Q2R b - - 16 21')"
|
|
],
|
|
"image/svg+xml": "<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" viewBox=\"0 0 390 390\" width=\"390\" height=\"390\"><desc><pre>. n b . . . n r\n. . p p k p p p\n. . . . p . . .\nr . b . . . . .\np . . . . . . q\n. P P K P N P P\n. . . P B P . .\nR N B . Q . . R</pre></desc><defs><g id=\"white-pawn\" class=\"white pawn\"><path d=\"M22.5 9c-2.21 0-4 1.79-4 4 0 .89.29 1.71.78 2.38C17.33 16.5 16 18.59 16 21c0 2.03.94 3.84 2.41 5.03-3 1.06-7.41 5.55-7.41 13.47h23c0-7.92-4.41-12.41-7.41-13.47 1.47-1.19 2.41-3 2.41-5.03 0-2.41-1.33-4.5-3.28-5.62.49-.67.78-1.49.78-2.38 0-2.21-1.79-4-4-4z\" fill=\"#fff\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" /></g><g id=\"white-knight\" class=\"white knight\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18\" style=\"fill:#ffffff; stroke:#000000;\" /><path d=\"M 24,18 C 24.38,20.91 18.45,25.37 16,27 C 13,29 13.18,31.34 11,31 C 9.958,30.06 12.41,27.96 11,28 C 10,28 11.19,29.23 10,30 C 9,30 5.997,31 6,26 C 6,24 12,14 12,14 C 12,14 13.89,12.1 14,10.5 C 13.27,9.506 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.28,8.008 21,7 C 22,7 22,10 22,10\" style=\"fill:#ffffff; stroke:#000000;\" /><path d=\"M 9.5 25.5 A 0.5 0.5 0 1 1 8.5,25.5 A 0.5 0.5 0 1 1 9.5 25.5 z\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z\" transform=\"matrix(0.866,0.5,-0.5,0.866,9.693,-5.173)\" style=\"fill:#000000; stroke:#000000;\" /></g><g id=\"white-bishop\" class=\"white bishop\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><g fill=\"#fff\" stroke-linecap=\"butt\"><path d=\"M9 36c3.39-.97 10.11.43 13.5-2 3.39 2.43 10.11 1.03 13.5 2 0 0 1.65.54 3 2-.68.97-1.65.99-3 .5-3.39-.97-10.11.46-13.5-1-3.39 1.46-10.11.03-13.5 1-1.354.49-2.323.47-3-.5 1.354-1.94 3-2 3-2zM15 32c2.5 2.5 12.5 2.5 15 0 .5-1.5 0-2 0-2 0-2.5-2.5-4-2.5-4 5.5-1.5 6-11.5-5-15.5-11 4-10.5 14-5 15.5 0 0-2.5 1.5-2.5 4 0 0-.5.5 0 2zM25 8a2.5 2.5 0 1 1-5 0 2.5 2.5 0 1 1 5 0z\" /></g><path d=\"M17.5 26h10M15 30h15m-7.5-14.5v5M20 18h5\" stroke-linejoin=\"miter\" /></g><g id=\"white-rook\" class=\"white rook\" fill=\"#fff\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 39h27v-3H9v3zM12 36v-4h21v4H12zM11 14V9h4v2h5V9h5v2h5V9h4v5\" stroke-linecap=\"butt\" /><path d=\"M34 14l-3 3H14l-3-3\" /><path d=\"M31 17v12.5H14V17\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M31 29.5l1.5 2.5h-20l1.5-2.5\" /><path d=\"M11 14h23\" fill=\"none\" stroke-linejoin=\"miter\" /></g><g id=\"white-queen\" class=\"white queen\" fill=\"#fff\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M8 12a2 2 0 1 1-4 0 2 2 0 1 1 4 0zM24.5 7.5a2 2 0 1 1-4 0 2 2 0 1 1 4 0zM41 12a2 2 0 1 1-4 0 2 2 0 1 1 4 0zM16 8.5a2 2 0 1 1-4 0 2 2 0 1 1 4 0zM33 9a2 2 0 1 1-4 0 2 2 0 1 1 4 0z\" /><path d=\"M9 26c8.5-1.5 21-1.5 27 0l2-12-7 11V11l-5.5 13.5-3-15-3 15-5.5-14V25L7 14l2 12zM9 26c0 2 1.5 2 2.5 4 1 1.5 1 1 .5 3.5-1.5 1-1.5 2.5-1.5 2.5-1.5 1.5.5 2.5.5 2.5 6.5 1 16.5 1 23 0 0 0 1.5-1 0-2.5 0 0 .5-1.5-1-2.5-.5-2.5-.5-2 .5-3.5 1-2 2.5-2 2.5-4-8.5-1.5-18.5-1.5-27 0z\" stroke-linecap=\"butt\" /><path d=\"M11.5 30c3.5-1 18.5-1 22 0M12 33.5c6-1 15-1 21 0\" fill=\"none\" /></g><g id=\"white-king\" class=\"white king\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M22.5 11.63V6M20 8h5\" stroke-linejoin=\"miter\" /><path d=\"M22.5 25s4.5-7.5 3-10.5c0 0-1-2.5-3-2.5s-3 2.5-3 2.5c-1.5 3 3 10.5 3 10.5\" fill=\"#fff\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M11.5 37c5.5 3.5 15.5 3.5 21 0v-7s9-4.5 6-10.5c-4-6.5-13.5-3.5-16 4V27v-3.5c-3.5-7.5-13-10.5-16-4-3 6 5 10 5 10V37z\" fill=\"#fff\" /><path d=\"M11.5 30c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0\" /></g><g id=\"black-pawn\" class=\"black pawn\"><path d=\"M22.5 9c-2.21 0-4 1.79-4 4 0 .89.29 1.71.78 2.38C17.33 16.5 16 18.59 16 21c0 2.03.94 3.84 2.41 5.03-3 1.06-7.41 5.55-7.41 13.47h23c0-7.92-4.41-12.41-7.41-13.47 1.47-1.19 2.41-3 2.41-5.03 0-2.41-1.33-4.5-3.28-5.62.49-.67.78-1.49.78-2.38 0-2.21-1.79-4-4-4z\" fill=\"#000\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" /></g><g id=\"black-knight\" class=\"black knight\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M 22,10 C 32.5,11 38.5,18 38,39 L 15,39 C 15,30 25,32.5 23,18\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 24,18 C 24.38,20.91 18.45,25.37 16,27 C 13,29 13.18,31.34 11,31 C 9.958,30.06 12.41,27.96 11,28 C 10,28 11.19,29.23 10,30 C 9,30 5.997,31 6,26 C 6,24 12,14 12,14 C 12,14 13.89,12.1 14,10.5 C 13.27,9.506 13.5,8.5 13.5,7.5 C 14.5,6.5 16.5,10 16.5,10 L 18.5,10 C 18.5,10 19.28,8.008 21,7 C 22,7 22,10 22,10\" style=\"fill:#000000; stroke:#000000;\" /><path d=\"M 9.5 25.5 A 0.5 0.5 0 1 1 8.5,25.5 A 0.5 0.5 0 1 1 9.5 25.5 z\" style=\"fill:#ececec; stroke:#ececec;\" /><path d=\"M 15 15.5 A 0.5 1.5 0 1 1 14,15.5 A 0.5 1.5 0 1 1 15 15.5 z\" transform=\"matrix(0.866,0.5,-0.5,0.866,9.693,-5.173)\" style=\"fill:#ececec; stroke:#ececec;\" /><path d=\"M 24.55,10.4 L 24.1,11.85 L 24.6,12 C 27.75,13 30.25,14.49 32.5,18.75 C 34.75,23.01 35.75,29.06 35.25,39 L 35.2,39.5 L 37.45,39.5 L 37.5,39 C 38,28.94 36.62,22.15 34.25,17.66 C 31.88,13.17 28.46,11.02 25.06,10.5 L 24.55,10.4 z \" style=\"fill:#ececec; stroke:none;\" /></g><g id=\"black-bishop\" class=\"black bishop\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 36c3.39-.97 10.11.43 13.5-2 3.39 2.43 10.11 1.03 13.5 2 0 0 1.65.54 3 2-.68.97-1.65.99-3 .5-3.39-.97-10.11.46-13.5-1-3.39 1.46-10.11.03-13.5 1-1.354.49-2.323.47-3-.5 1.354-1.94 3-2 3-2zm6-4c2.5 2.5 12.5 2.5 15 0 .5-1.5 0-2 0-2 0-2.5-2.5-4-2.5-4 5.5-1.5 6-11.5-5-15.5-11 4-10.5 14-5 15.5 0 0-2.5 1.5-2.5 4 0 0-.5.5 0 2zM25 8a2.5 2.5 0 1 1-5 0 2.5 2.5 0 1 1 5 0z\" fill=\"#000\" stroke-linecap=\"butt\" /><path d=\"M17.5 26h10M15 30h15m-7.5-14.5v5M20 18h5\" stroke=\"#fff\" stroke-linejoin=\"miter\" /></g><g id=\"black-rook\" class=\"black rook\" fill=\"#000\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M9 39h27v-3H9v3zM12.5 32l1.5-2.5h17l1.5 2.5h-20zM12 36v-4h21v4H12z\" stroke-linecap=\"butt\" /><path d=\"M14 29.5v-13h17v13H14z\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M14 16.5L11 14h23l-3 2.5H14zM11 14V9h4v2h5V9h5v2h5V9h4v5H11z\" stroke-linecap=\"butt\" /><path d=\"M12 35.5h21M13 31.5h19M14 29.5h17M14 16.5h17M11 14h23\" fill=\"none\" stroke=\"#fff\" stroke-width=\"1\" stroke-linejoin=\"miter\" /></g><g id=\"black-queen\" class=\"black queen\" fill=\"#000\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><g fill=\"#000\" stroke=\"none\"><circle cx=\"6\" cy=\"12\" r=\"2.75\" /><circle cx=\"14\" cy=\"9\" r=\"2.75\" /><circle cx=\"22.5\" cy=\"8\" r=\"2.75\" /><circle cx=\"31\" cy=\"9\" r=\"2.75\" /><circle cx=\"39\" cy=\"12\" r=\"2.75\" /></g><path d=\"M9 26c8.5-1.5 21-1.5 27 0l2.5-12.5L31 25l-.3-14.1-5.2 13.6-3-14.5-3 14.5-5.2-13.6L14 25 6.5 13.5 9 26zM9 26c0 2 1.5 2 2.5 4 1 1.5 1 1 .5 3.5-1.5 1-1.5 2.5-1.5 2.5-1.5 1.5.5 2.5.5 2.5 6.5 1 16.5 1 23 0 0 0 1.5-1 0-2.5 0 0 .5-1.5-1-2.5-.5-2.5-.5-2 .5-3.5 1-2 2.5-2 2.5-4-8.5-1.5-18.5-1.5-27 0z\" stroke-linecap=\"butt\" /><path d=\"M11 38.5a35 35 1 0 0 23 0\" fill=\"none\" stroke-linecap=\"butt\" /><path d=\"M11 29a35 35 1 0 1 23 0M12.5 31.5h20M11.5 34.5a35 35 1 0 0 22 0M10.5 37.5a35 35 1 0 0 24 0\" fill=\"none\" stroke=\"#fff\" /></g><g id=\"black-king\" class=\"black king\" fill=\"none\" fill-rule=\"evenodd\" stroke=\"#000\" stroke-width=\"1.5\" stroke-linecap=\"round\" stroke-linejoin=\"round\"><path d=\"M22.5 11.63V6\" stroke-linejoin=\"miter\" /><path d=\"M22.5 25s4.5-7.5 3-10.5c0 0-1-2.5-3-2.5s-3 2.5-3 2.5c-1.5 3 3 10.5 3 10.5\" fill=\"#000\" stroke-linecap=\"butt\" stroke-linejoin=\"miter\" /><path d=\"M11.5 37c5.5 3.5 15.5 3.5 21 0v-7s9-4.5 6-10.5c-4-6.5-13.5-3.5-16 4V27v-3.5c-3.5-7.5-13-10.5-16-4-3 6 5 10 5 10V37z\" fill=\"#000\" /><path d=\"M20 8h5\" stroke-linejoin=\"miter\" /><path d=\"M32 29.5s8.5-4 6.03-9.65C34.15 14 25 18 22.5 24.5l.01 2.1-.01-2.1C20 18 9.906 14 6.997 19.85c-2.497 5.65 4.853 9 4.853 9M11.5 30c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0m-21 3.5c5.5-3 15.5-3 21 0\" stroke=\"#fff\" /></g></defs><rect x=\"7.5\" y=\"7.5\" width=\"375\" height=\"375\" fill=\"none\" stroke=\"#212121\" stroke-width=\"15\" /><g transform=\"translate(20, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M23.328 10.016q-1.742 0-2.414.398-.672.398-.672 1.36 0 .765.5 1.218.508.445 1.375.445 1.196 0 1.914-.843.727-.852.727-2.258v-.32zm2.867-.594v4.992h-1.437v-1.328q-.492.797-1.227 1.18-.734.375-1.797.375-1.343 0-2.14-.75-.79-.758-.79-2.024 0-1.476.985-2.226.992-.75 2.953-.75h2.016V8.75q0-.992-.656-1.531-.649-.547-1.829-.547-.75 0-1.46.18-.711.18-1.368.539V6.062q.79-.304 1.532-.453.742-.156 1.445-.156 1.898 0 2.836.984.937.985.937 2.985z\" /></g><g transform=\"translate(20, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M23.328 10.016q-1.742 0-2.414.398-.672.398-.672 1.36 0 .765.5 1.218.508.445 1.375.445 1.196 0 1.914-.843.727-.852.727-2.258v-.32zm2.867-.594v4.992h-1.437v-1.328q-.492.797-1.227 1.18-.734.375-1.797.375-1.343 0-2.14-.75-.79-.758-.79-2.024 0-1.476.985-2.226.992-.75 2.953-.75h2.016V8.75q0-.992-.656-1.531-.649-.547-1.829-.547-.75 0-1.46.18-.711.18-1.368.539V6.062q.79-.304 1.532-.453.742-.156 1.445-.156 1.898 0 2.836.984.937.985.937 2.985z\" /></g><g transform=\"translate(65, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.922 10.047q0-1.586-.656-2.485-.649-.906-1.79-.906-1.14 0-1.796.906-.649.899-.649 2.485 0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.789-.898.656-.906.656-2.492zm-4.89-3.055q.452-.781 1.14-1.156.695-.383 1.656-.383 1.594 0 2.586 1.266 1 1.265 1 3.328 0 2.062-1 3.328-.992 1.266-2.586 1.266-.96 0-1.656-.375-.688-.383-1.14-1.164v1.312h-1.446V2.258h1.445z\" /></g><g transform=\"translate(65, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.922 10.047q0-1.586-.656-2.485-.649-.906-1.79-.906-1.14 0-1.796.906-.649.899-.649 2.485 0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.789-.898.656-.906.656-2.492zm-4.89-3.055q.452-.781 1.14-1.156.695-.383 1.656-.383 1.594 0 2.586 1.266 1 1.265 1 3.328 0 2.062-1 3.328-.992 1.266-2.586 1.266-.96 0-1.656-.375-.688-.383-1.14-1.164v1.312h-1.446V2.258h1.445z\" /></g><g transform=\"translate(110, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.96 6v1.344q-.608-.336-1.226-.5-.609-.172-1.234-.172-1.398 0-2.172.89-.773.883-.773 2.485 0 1.601.773 2.492.774.883 2.172.883.625 0 1.234-.164.618-.172 1.227-.508v1.328q-.602.281-1.25.422-.64.14-1.367.14-1.977 0-3.14-1.242-1.165-1.242-1.165-3.351 0-2.14 1.172-3.367 1.18-1.227 3.227-1.227.664 0 1.296.14.633.134 1.227.407z\" /></g><g transform=\"translate(110, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.96 6v1.344q-.608-.336-1.226-.5-.609-.172-1.234-.172-1.398 0-2.172.89-.773.883-.773 2.485 0 1.601.773 2.492.774.883 2.172.883.625 0 1.234-.164.618-.172 1.227-.508v1.328q-.602.281-1.25.422-.64.14-1.367.14-1.977 0-3.14-1.242-1.165-1.242-1.165-3.351 0-2.14 1.172-3.367 1.18-1.227 3.227-1.227.664 0 1.296.14.633.134 1.227.407z\" /></g><g transform=\"translate(155, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 6.992V2.258h1.437v12.156h-1.437v-1.312q-.453.78-1.149 1.164-.687.375-1.656.375-1.586 0-2.586-1.266-.992-1.266-.992-3.328 0-2.063.992-3.328 1-1.266 2.586-1.266.969 0 1.656.383.696.375 1.149 1.156zm-4.899 3.055q0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.796-.898.657-.906.657-2.492 0-1.586-.657-2.485-.656-.906-1.796-.906-1.141 0-1.797.906-.649.899-.649 2.485z\" /></g><g transform=\"translate(155, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 6.992V2.258h1.437v12.156h-1.437v-1.312q-.453.78-1.149 1.164-.687.375-1.656.375-1.586 0-2.586-1.266-.992-1.266-.992-3.328 0-2.063.992-3.328 1-1.266 2.586-1.266.969 0 1.656.383.696.375 1.149 1.156zm-4.899 3.055q0 1.586.649 2.492.656.898 1.797.898 1.14 0 1.796-.898.657-.906.657-2.492 0-1.586-.657-2.485-.656-.906-1.796-.906-1.141 0-1.797.906-.649.899-.649 2.485z\" /></g><g transform=\"translate(200, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.555 9.68v.703h-6.61q.094 1.484.89 2.265.806.774 2.235.774.828 0 1.602-.203.781-.203 1.547-.61v1.36q-.774.328-1.586.5-.813.172-1.649.172-2.093 0-3.32-1.22-1.219-1.218-1.219-3.296 0-2.148 1.157-3.406 1.164-1.266 3.132-1.266 1.766 0 2.79 1.14 1.03 1.134 1.03 3.087zm-1.438-.422q-.015-1.18-.664-1.883-.64-.703-1.703-.703-1.203 0-1.93.68-.718.68-.828 1.914z\" /></g><g transform=\"translate(200, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.555 9.68v.703h-6.61q.094 1.484.89 2.265.806.774 2.235.774.828 0 1.602-.203.781-.203 1.547-.61v1.36q-.774.328-1.586.5-.813.172-1.649.172-2.093 0-3.32-1.22-1.219-1.218-1.219-3.296 0-2.148 1.157-3.406 1.164-1.266 3.132-1.266 1.766 0 2.79 1.14 1.03 1.134 1.03 3.087zm-1.438-.422q-.015-1.18-.664-1.883-.64-.703-1.703-.703-1.203 0-1.93.68-.718.68-.828 1.914z\" /></g><g transform=\"translate(245, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.285 2.258v1.195H23.91q-.773 0-1.078.313-.297.312-.297 1.125v.773h2.367v1.117h-2.367v7.633H21.09V6.781h-1.375V5.664h1.375v-.61q0-1.46.68-2.124.68-.672 2.156-.672z\" /></g><g transform=\"translate(245, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M25.285 2.258v1.195H23.91q-.773 0-1.078.313-.297.312-.297 1.125v.773h2.367v1.117h-2.367v7.633H21.09V6.781h-1.375V5.664h1.375v-.61q0-1.46.68-2.124.68-.672 2.156-.672z\" /></g><g transform=\"translate(290, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 9.937q0-1.562-.649-2.421-.64-.86-1.804-.86-1.157 0-1.805.86-.64.859-.64 2.421 0 1.555.64 2.415.648.859 1.805.859 1.164 0 1.804-.86.649-.859.649-2.414zm1.437 3.391q0 2.234-.992 3.32-.992 1.094-3.04 1.094-.757 0-1.429-.117-.672-.11-1.304-.344v-1.398q.632.344 1.25.508.617.164 1.257.164 1.414 0 2.118-.743.703-.734.703-2.226v-.711q-.446.773-1.141 1.156-.695.383-1.664.383-1.61 0-2.594-1.227-.984-1.226-.984-3.25 0-2.03.984-3.257.985-1.227 2.594-1.227.969 0 1.664.383t1.14 1.156V5.664h1.438z\" /></g><g transform=\"translate(290, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M24.973 9.937q0-1.562-.649-2.421-.64-.86-1.804-.86-1.157 0-1.805.86-.64.859-.64 2.421 0 1.555.64 2.415.648.859 1.805.859 1.164 0 1.804-.86.649-.859.649-2.414zm1.437 3.391q0 2.234-.992 3.32-.992 1.094-3.04 1.094-.757 0-1.429-.117-.672-.11-1.304-.344v-1.398q.632.344 1.25.508.617.164 1.257.164 1.414 0 2.118-.743.703-.734.703-2.226v-.711q-.446.773-1.141 1.156-.695.383-1.664.383-1.61 0-2.594-1.227-.984-1.226-.984-3.25 0-2.03.984-3.257.985-1.227 2.594-1.227.969 0 1.664.383t1.14 1.156V5.664h1.438z\" /></g><g transform=\"translate(335, 1) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.164 9.133v5.281h-1.437V9.18q0-1.243-.485-1.86-.484-.617-1.453-.617-1.164 0-1.836.742-.672.742-.672 2.024v4.945h-1.445V2.258h1.445v4.765q.516-.789 1.211-1.18.703-.39 1.617-.39 1.508 0 2.282.938.773.93.773 2.742z\" /></g><g transform=\"translate(335, 375) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M26.164 9.133v5.281h-1.437V9.18q0-1.243-.485-1.86-.484-.617-1.453-.617-1.164 0-1.836.742-.672.742-.672 2.024v4.945h-1.445V2.258h1.445v4.765q.516-.789 1.211-1.18.703-.39 1.617-.39 1.508 0 2.282.938.773.93.773 2.742z\" /></g><g transform=\"translate(0, 335) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.754 26.996h2.578v-8.898l-2.805.562v-1.437l2.79-.563h1.578v10.336h2.578v1.328h-6.72z\" /></g><g transform=\"translate(375, 335) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.754 26.996h2.578v-8.898l-2.805.562v-1.437l2.79-.563h1.578v10.336h2.578v1.328h-6.72z\" /></g><g transform=\"translate(0, 290) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M8.195 26.996h5.508v1.328H6.297v-1.328q.898-.93 2.445-2.492 1.555-1.57 1.953-2.024.758-.851 1.055-1.437.305-.594.305-1.164 0-.93-.657-1.516-.648-.586-1.695-.586-.742 0-1.57.258-.82.258-1.758.781v-1.593q.953-.383 1.781-.578.828-.196 1.516-.196 1.812 0 2.89.906 1.079.907 1.079 2.422 0 .72-.274 1.368-.265.64-.976 1.515-.196.227-1.243 1.313-1.046 1.078-2.953 3.023z\" /></g><g transform=\"translate(375, 290) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M8.195 26.996h5.508v1.328H6.297v-1.328q.898-.93 2.445-2.492 1.555-1.57 1.953-2.024.758-.851 1.055-1.437.305-.594.305-1.164 0-.93-.657-1.516-.648-.586-1.695-.586-.742 0-1.57.258-.82.258-1.758.781v-1.593q.953-.383 1.781-.578.828-.196 1.516-.196 1.812 0 2.89.906 1.079.907 1.079 2.422 0 .72-.274 1.368-.265.64-.976 1.515-.196.227-1.243 1.313-1.046 1.078-2.953 3.023z\" /></g><g transform=\"translate(0, 245) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.434 22.035q1.132.242 1.765 1.008.64.766.64 1.89 0 1.727-1.187 2.672-1.187.946-3.375.946-.734 0-1.515-.149-.774-.14-1.602-.43V26.45q.656.383 1.438.578.78.196 1.632.196 1.485 0 2.258-.586.782-.586.782-1.703 0-1.032-.727-1.61-.719-.586-2.008-.586h-1.36v-1.297h1.423q1.164 0 1.78-.46.618-.47.618-1.344 0-.899-.64-1.375-.633-.485-1.82-.485-.65 0-1.391.141-.743.14-1.633.437V16.95q.898-.25 1.68-.375.788-.125 1.484-.125 1.797 0 2.844.82 1.046.813 1.046 2.204 0 .968-.554 1.64-.555.664-1.578.922z\" /></g><g transform=\"translate(375, 245) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.434 22.035q1.132.242 1.765 1.008.64.766.64 1.89 0 1.727-1.187 2.672-1.187.946-3.375.946-.734 0-1.515-.149-.774-.14-1.602-.43V26.45q.656.383 1.438.578.78.196 1.632.196 1.485 0 2.258-.586.782-.586.782-1.703 0-1.032-.727-1.61-.719-.586-2.008-.586h-1.36v-1.297h1.423q1.164 0 1.78-.46.618-.47.618-1.344 0-.899-.64-1.375-.633-.485-1.82-.485-.65 0-1.391.141-.743.14-1.633.437V16.95q.898-.25 1.68-.375.788-.125 1.484-.125 1.797 0 2.844.82 1.046.813 1.046 2.204 0 .968-.554 1.64-.555.664-1.578.922z\" /></g><g transform=\"translate(0, 200) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.016 18.035L7.03 24.262h3.985zm-.414-1.375h1.984v7.602h1.664v1.312h-1.664v2.75h-1.57v-2.75H5.75v-1.523z\" /></g><g transform=\"translate(375, 200) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M11.016 18.035L7.03 24.262h3.985zm-.414-1.375h1.984v7.602h1.664v1.312h-1.664v2.75h-1.57v-2.75H5.75v-1.523z\" /></g><g transform=\"translate(0, 155) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.719 16.66h6.195v1.328h-4.75v2.86q.344-.118.688-.172.343-.063.687-.063 1.953 0 3.094 1.07 1.14 1.07 1.14 2.899 0 1.883-1.171 2.93-1.172 1.039-3.305 1.039-.735 0-1.5-.125-.758-.125-1.57-.375v-1.586q.703.383 1.453.57.75.188 1.586.188 1.351 0 2.14-.711.79-.711.79-1.93 0-1.219-.79-1.93-.789-.71-2.14-.71-.633 0-1.266.14-.625.14-1.281.438z\" /></g><g transform=\"translate(375, 155) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.719 16.66h6.195v1.328h-4.75v2.86q.344-.118.688-.172.343-.063.687-.063 1.953 0 3.094 1.07 1.14 1.07 1.14 2.899 0 1.883-1.171 2.93-1.172 1.039-3.305 1.039-.735 0-1.5-.125-.758-.125-1.57-.375v-1.586q.703.383 1.453.57.75.188 1.586.188 1.351 0 2.14-.711.79-.711.79-1.93 0-1.219-.79-1.93-.789-.71-2.14-.71-.633 0-1.266.14-.625.14-1.281.438z\" /></g><g transform=\"translate(0, 110) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10.137 21.863q-1.063 0-1.688.727-.617.726-.617 1.992 0 1.258.617 1.992.625.727 1.688.727 1.062 0 1.68-.727.624-.734.624-1.992 0-1.266-.625-1.992-.617-.727-1.68-.727zm3.133-4.945v1.437q-.594-.28-1.204-.43-.601-.148-1.195-.148-1.562 0-2.39 1.055-.82 1.055-.938 3.188.46-.68 1.156-1.04.696-.367 1.531-.367 1.758 0 2.774 1.07 1.023 1.063 1.023 2.899 0 1.797-1.062 2.883-1.063 1.086-2.828 1.086-2.024 0-3.094-1.547-1.07-1.555-1.07-4.5 0-2.766 1.312-4.406 1.313-1.649 3.524-1.649.593 0 1.195.117.61.118 1.266.352z\" /></g><g transform=\"translate(375, 110) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10.137 21.863q-1.063 0-1.688.727-.617.726-.617 1.992 0 1.258.617 1.992.625.727 1.688.727 1.062 0 1.68-.727.624-.734.624-1.992 0-1.266-.625-1.992-.617-.727-1.68-.727zm3.133-4.945v1.437q-.594-.28-1.204-.43-.601-.148-1.195-.148-1.562 0-2.39 1.055-.82 1.055-.938 3.188.46-.68 1.156-1.04.696-.367 1.531-.367 1.758 0 2.774 1.07 1.023 1.063 1.023 2.899 0 1.797-1.062 2.883-1.063 1.086-2.828 1.086-2.024 0-3.094-1.547-1.07-1.555-1.07-4.5 0-2.766 1.312-4.406 1.313-1.649 3.524-1.649.593 0 1.195.117.61.118 1.266.352z\" /></g><g transform=\"translate(0, 65) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.25 16.66h7.5v.672L9.516 28.324H7.867l3.985-10.336H6.25z\" /></g><g transform=\"translate(375, 65) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M6.25 16.66h7.5v.672L9.516 28.324H7.867l3.985-10.336H6.25z\" /></g><g transform=\"translate(0, 20) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10 22.785q-1.125 0-1.773.602-.641.601-.641 1.656t.64 1.656q.649.602 1.774.602t1.773-.602q.649-.61.649-1.656 0-1.055-.649-1.656-.64-.602-1.773-.602zm-1.578-.672q-1.016-.25-1.586-.945-.563-.695-.563-1.695 0-1.399.993-2.211 1-.813 2.734-.813 1.742 0 2.734.813.993.812.993 2.21 0 1-.57 1.696-.563.695-1.571.945 1.14.266 1.773 1.04.641.773.641 1.89 0 1.695-1.04 2.602-1.03.906-2.96.906t-2.969-.906Q6 26.738 6 25.043q0-1.117.64-1.89.641-.774 1.782-1.04zm-.578-2.492q0 .906.562 1.414.57.508 1.594.508 1.016 0 1.586-.508.578-.508.578-1.414 0-.906-.578-1.414-.57-.508-1.586-.508-1.023 0-1.594.508-.562.508-.562 1.414z\" /></g><g transform=\"translate(375, 20) scale(0.75, 0.75)\" fill=\"#e5e5e5\" stroke=\"#e5e5e5\"><path d=\"M10 22.785q-1.125 0-1.773.602-.641.601-.641 1.656t.64 1.656q.649.602 1.774.602t1.773-.602q.649-.61.649-1.656 0-1.055-.649-1.656-.64-.602-1.773-.602zm-1.578-.672q-1.016-.25-1.586-.945-.563-.695-.563-1.695 0-1.399.993-2.211 1-.813 2.734-.813 1.742 0 2.734.813.993.812.993 2.21 0 1-.57 1.696-.563.695-1.571.945 1.14.266 1.773 1.04.641.773.641 1.89 0 1.695-1.04 2.602-1.03.906-2.96.906t-2.969-.906Q6 26.738 6 25.043q0-1.117.64-1.89.641-.774 1.782-1.04zm-.578-2.492q0 .906.562 1.414.57.508 1.594.508 1.016 0 1.586-.508.578-.508.578-1.414 0-.906-.578-1.414-.57-.508-1.586-.508-1.023 0-1.594.508-.562.508-.562 1.414z\" /></g><rect x=\"15\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark lastmove a1\" stroke=\"none\" fill=\"#aaa23b\" /><rect x=\"60\" y=\"330\" width=\"45\" height=\"45\" class=\"square light b1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark c1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"330\" width=\"45\" height=\"45\" class=\"square light d1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark e1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"330\" width=\"45\" height=\"45\" class=\"square light f1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"330\" width=\"45\" height=\"45\" class=\"square dark g1\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"330\" width=\"45\" height=\"45\" class=\"square light h1\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"285\" width=\"45\" height=\"45\" class=\"square light a2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark b2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"285\" width=\"45\" height=\"45\" class=\"square light c2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark d2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"285\" width=\"45\" height=\"45\" class=\"square light e2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark f2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"285\" width=\"45\" height=\"45\" class=\"square light g2\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"285\" width=\"45\" height=\"45\" class=\"square dark h2\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"15\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark lastmove a3\" stroke=\"none\" fill=\"#aaa23b\" /><rect x=\"60\" y=\"240\" width=\"45\" height=\"45\" class=\"square light b3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark c3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"240\" width=\"45\" height=\"45\" class=\"square light d3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark e3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"240\" width=\"45\" height=\"45\" class=\"square light f3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"240\" width=\"45\" height=\"45\" class=\"square dark g3\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"240\" width=\"45\" height=\"45\" class=\"square light h3\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"195\" width=\"45\" height=\"45\" class=\"square light a4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark b4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"195\" width=\"45\" height=\"45\" class=\"square light c4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark d4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"195\" width=\"45\" height=\"45\" class=\"square light e4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark f4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"195\" width=\"45\" height=\"45\" class=\"square light g4\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"195\" width=\"45\" height=\"45\" class=\"square dark h4\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"15\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark a5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"150\" width=\"45\" height=\"45\" class=\"square light b5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark c5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"150\" width=\"45\" height=\"45\" class=\"square light d5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark e5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"150\" width=\"45\" height=\"45\" class=\"square light f5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"150\" width=\"45\" height=\"45\" class=\"square dark g5\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"150\" width=\"45\" height=\"45\" class=\"square light h5\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"105\" width=\"45\" height=\"45\" class=\"square light a6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark b6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"105\" width=\"45\" height=\"45\" class=\"square light c6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark d6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"105\" width=\"45\" height=\"45\" class=\"square light e6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark f6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"105\" width=\"45\" height=\"45\" class=\"square light g6\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"105\" width=\"45\" height=\"45\" class=\"square dark h6\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"15\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark a7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"60\" y=\"60\" width=\"45\" height=\"45\" class=\"square light b7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"105\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark c7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"150\" y=\"60\" width=\"45\" height=\"45\" class=\"square light d7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"195\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark e7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"240\" y=\"60\" width=\"45\" height=\"45\" class=\"square light f7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"285\" y=\"60\" width=\"45\" height=\"45\" class=\"square dark g7\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"330\" y=\"60\" width=\"45\" height=\"45\" class=\"square light h7\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"15\" y=\"15\" width=\"45\" height=\"45\" class=\"square light a8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"60\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark b8\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"105\" y=\"15\" width=\"45\" height=\"45\" class=\"square light c8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"150\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark d8\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"195\" y=\"15\" width=\"45\" height=\"45\" class=\"square light e8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"240\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark f8\" stroke=\"none\" fill=\"#d18b47\" /><rect x=\"285\" y=\"15\" width=\"45\" height=\"45\" class=\"square light g8\" stroke=\"none\" fill=\"#ffce9e\" /><rect x=\"330\" y=\"15\" width=\"45\" height=\"45\" class=\"square dark h8\" stroke=\"none\" fill=\"#d18b47\" /><use href=\"#white-rook\" xlink:href=\"#white-rook\" transform=\"translate(15, 330)\" /><use href=\"#white-knight\" xlink:href=\"#white-knight\" transform=\"translate(60, 330)\" /><use href=\"#white-bishop\" xlink:href=\"#white-bishop\" transform=\"translate(105, 330)\" /><use href=\"#white-queen\" xlink:href=\"#white-queen\" transform=\"translate(195, 330)\" /><use href=\"#white-rook\" xlink:href=\"#white-rook\" transform=\"translate(330, 330)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(150, 285)\" /><use href=\"#white-bishop\" xlink:href=\"#white-bishop\" transform=\"translate(195, 285)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(240, 285)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(60, 240)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(105, 240)\" /><use href=\"#white-king\" xlink:href=\"#white-king\" transform=\"translate(150, 240)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(195, 240)\" /><use href=\"#white-knight\" xlink:href=\"#white-knight\" transform=\"translate(240, 240)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(285, 240)\" /><use href=\"#white-pawn\" xlink:href=\"#white-pawn\" transform=\"translate(330, 240)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(15, 195)\" /><use href=\"#black-queen\" xlink:href=\"#black-queen\" transform=\"translate(330, 195)\" /><use href=\"#black-rook\" xlink:href=\"#black-rook\" transform=\"translate(15, 150)\" /><use href=\"#black-bishop\" xlink:href=\"#black-bishop\" transform=\"translate(105, 150)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(195, 105)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(105, 60)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(150, 60)\" /><use href=\"#black-king\" xlink:href=\"#black-king\" transform=\"translate(195, 60)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(240, 60)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(285, 60)\" /><use href=\"#black-pawn\" xlink:href=\"#black-pawn\" transform=\"translate(330, 60)\" /><use href=\"#black-knight\" xlink:href=\"#black-knight\" transform=\"translate(60, 15)\" /><use href=\"#black-bishop\" xlink:href=\"#black-bishop\" transform=\"translate(105, 15)\" /><use href=\"#black-knight\" xlink:href=\"#black-knight\" transform=\"translate(285, 15)\" /><use href=\"#black-rook\" xlink:href=\"#black-rook\" transform=\"translate(330, 15)\" /></svg>"
|
|
},
|
|
"metadata": {},
|
|
"execution_count": 33
|
|
}
|
|
]
|
|
}
|
|
]
|
|
} |