Bitsec (subnet 60)
Back to Reports

Vulnerability History

Date High Risk Low Risk
2025-04-29 1 1

Audit Report Details

4127
Lines of Code
0
Resolved
🚨 High Risk Vulnerabilities

None found

⚠️ Low Risk Vulnerabilities

None found

Vulnerable Code:

1# Repo Tree (Python files only, excluding .gitignored files)
2
3β”œβ”€β”€ contrib
4β”œβ”€β”€ docs
5β”‚ └── stream_tutorial
6β”‚ β”œβ”€β”€ client.py
7β”‚ β”œβ”€β”€ config.py
8β”‚ β”œβ”€β”€ miner.py
9β”‚ └── protocol.py
10β”œβ”€β”€ neurons
11β”‚ β”œβ”€β”€ __init__.py
12β”‚ β”œβ”€β”€ miner.py
13β”‚ └── validator.py
14β”œβ”€β”€ node-stack
15β”‚ β”œβ”€β”€ miner
16β”‚ β”‚ β”œβ”€β”€ modules
17β”‚ β”‚ β”œβ”€β”€ routes
18β”‚ β”‚ └── tests
19β”‚ └── validator
20β”‚ β”œβ”€β”€ modules
21β”‚ β”œβ”€β”€ routes
22β”‚ β”œβ”€β”€ tests
23β”œβ”€β”€ scripts
24β”œβ”€β”€ setup.py
25β”œβ”€β”€ sybil
26β”‚ β”œβ”€β”€ __init__.py
27β”‚ β”œβ”€β”€ api
28β”‚ β”‚ β”œβ”€β”€ __init__.py
29β”‚ β”‚ β”œβ”€β”€ dummy.py
30β”‚ β”‚ └── get_query_axons.py
31β”‚ β”œβ”€β”€ base
32β”‚ β”‚ β”œβ”€β”€ __init__.py
33β”‚ β”‚ β”œβ”€β”€ miner.py
34β”‚ β”‚ β”œβ”€β”€ neuron.py
35β”‚ β”‚ β”œβ”€β”€ utils
36β”‚ β”‚ β”‚ β”œβ”€β”€ __init__.py
37β”‚ β”‚ β”‚ └── weight_utils.py
38β”‚ β”‚ └── validator.py
39β”‚ β”œβ”€β”€ mock.py
40β”‚ β”œβ”€β”€ protocol.py
41β”‚ β”œβ”€β”€ subnet_links.py
42β”‚ β”œβ”€β”€ utils
43β”‚ β”‚ β”œβ”€β”€ __init__.py
44β”‚ β”‚ β”œβ”€β”€ config.py
45β”‚ β”‚ β”œβ”€β”€ logging.py
46β”‚ β”‚ β”œβ”€β”€ misc.py
47β”‚ β”‚ └── uids.py
48β”‚ └── validator
49β”‚ β”œβ”€β”€ __init__.py
50β”‚ β”œβ”€β”€ forward.py
51β”‚ β”œβ”€β”€ reward.py
52β”‚ └── utils.py
53β”œβ”€β”€ tests
54β”‚ β”œβ”€β”€ __init__.py
55β”‚ β”œβ”€β”€ helpers.py
56β”‚ β”œβ”€β”€ test_mock.py
57β”‚ └── test_sybil_validator.py
58└── verify
59 β”œβ”€β”€ generate.py
60 └── verify.py
61
62
63# Complete repo contents (files-to-prompt output)
64
65target_repo/setup.py
66---
67# The MIT License (MIT)
68# Copyright Β© 2023 Yuma Rao
69# TODO(developer): Set your name
70# Copyright Β© 2023 <your name>
71
72# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
73# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
74# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
75# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
76
77# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
78# the Software.
79
80# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
81# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
82# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
83# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
84# DEALINGS IN THE SOFTWARE.
85
86import re
87import os
88import codecs
89import pathlib
90from os import path
91from io import open
92from setuptools import setup, find_packages
93from pkg_resources import parse_requirements
94
95
96def read_requirements(path):
97 with open(path, "r") as f:
98 requirements = f.read().splitlines()
99 processed_requirements = []
100
101 for req in requirements:
102 # For git or other VCS links
103 if req.startswith("git+") or "@" in req:
104 pkg_name = re.search(r"(#egg=)([\w\-_]+)", req)
105 if pkg_name:
106 processed_requirements.append(pkg_name.group(2))
107 else:
108 # You may decide to raise an exception here,
109 # if you want to ensure every VCS link has an #egg=<package_name> at the end
110 continue
111 else:
112 processed_requirements.append(req)
113 return processed_requirements
114
115
116requirements = read_requirements("requirements.txt")
117here = path.abspath(path.dirname(__file__))
118
119with open(path.join(here, "README.md"), encoding="utf-8") as f:
120 long_description = f.read()
121
122# loading version from setup.py
123with codecs.open(
124 os.path.join(here, "sybil/__init__.py"), encoding="utf-8"
125) as init_file:
126 version_match = re.search(
127 r"^__version__ = ['\"]([^'\"]*)['\"]", init_file.read(), re.M
128 )
129 version_string = version_match.group(1)
130
131setup(
132 name="Sybil Network", # TODO(developer): Change this value to your module subnet name.
133 version=version_string,
134 description="Bittensor Subnet for VPN Services", # TODO(developer): Change this value to your module subnet description.
135 long_description=long_description,
136 long_description_content_type="text/markdown",
137 url="https://github.com/beyond-stake/sybil-network", # TODO(developer): Change this url to your module subnet github url.
138 author="mentor.eth", # TODO(developer): Change this value to your module subnet author name.
139 packages=find_packages(),
140 include_package_data=True,
141 author_email="", # TODO(developer): Change this value to your module subnet author email.
142 license="MIT",
143 python_requires=">=3.8",
144 install_requires=requirements,
145 classifiers=[
146 "Development Status :: 3 - Alpha",
147 "Intended Audience :: Developers",
148 "Topic :: Software Development :: Build Tools",
149 # Pick your license as you wish
150 "License :: OSI Approved :: MIT License",
151 "Programming Language :: Python :: 3 :: Only",
152 "Programming Language :: Python :: 3.8",
153 "Programming Language :: Python :: 3.9",
154 "Programming Language :: Python :: 3.10",
155 "Topic :: Scientific/Engineering",
156 "Topic :: Scientific/Engineering :: Mathematics",
157 "Topic :: Scientific/Engineering :: Artificial Intelligence",
158 "Topic :: Software Development",
159 "Topic :: Software Development :: Libraries",
160 "Topic :: Software Development :: Libraries :: Python Modules",
161 ],
162)
163
164
165---
166target_repo/verify/generate.py
167---
168from datetime import datetime
169
170import bittensor
171
172# Hardcode or set the environment variable WALLET_PASS to the password for the wallet
173# environ["WALLET_PASS"] = ""
174
175
176def main(args):
177 wallet = bittensor.wallet(name=args.name)
178 keypair = wallet.coldkey
179
180 timestamp = datetime.now()
181 timezone = timestamp.astimezone().tzname()
182
183 # ensure compatiblity with polkadotjs messages, as polkadotjs always wraps message
184 message = (
185 "<Bytes>" + f"On {timestamp} {timezone} {args.message}" + "</Bytes>"
186 )
187 signature = keypair.sign(data=message)
188
189 file_contents = f"{message}\n\tSigned by: {keypair.ss58_address}\n\tSignature: {signature.hex()}"
190 print(file_contents)
191 open("message_and_signature.txt", "w").write(file_contents)
192
193 print("Signature generated and saved to message_and_signature.txt")
194
195
196if __name__ == "__main__":
197 import argparse
198
199 parser = argparse.ArgumentParser(description="Generate a signature")
200 parser.add_argument("--message", help="The message to sign", type=str)
201 parser.add_argument("--name", help="The wallet name", type=str)
202 args = parser.parse_args()
203
204 main(args)
205
206
207---
208target_repo/verify/verify.py
209---
210from binascii import unhexlify
211
212from substrateinterface import Keypair
213
214
215def main(args):
216 file_data = open(args.file).read()
217 file_split = file_data.split("\n\t")
218
219 address_line = file_split[1]
220 address_prefix = "Signed by: "
221 if address_line.startswith(address_prefix):
222 address = address_line[len(address_prefix) :]
223 else:
224 address = address_line
225
226 keypair = Keypair(ss58_address=address, ss58_format=42)
227
228 message = file_split[0]
229 if not message.startswith("<Bytes>") or not message.endswith("</Bytes>"):
230 raise ValueError("Message is not properly wrapped in <Bytes>.")
231
232 signature_line = file_split[2]
233 signature_prefix = "Signature: "
234 if signature_line.startswith(signature_prefix):
235 signature = signature_line[len(signature_prefix) :]
236 else:
237 signature = signature_line
238
239 real_signature = unhexlify(signature.encode())
240
241 if not keypair.verify(data=message, signature=real_signature):
242 raise ValueError(f"Invalid signature for address={address}")
243 else:
244 print(f"Signature verified, signed by {address}")
245
246
247if __name__ == "__main__":
248 import argparse
249
250 parser = argparse.ArgumentParser(description="Verify a signature")
251 parser.add_argument(
252 "--file", help="The file containing the message and signature"
253 )
254 args = parser.parse_args()
255 main(args)
256
257
258---
259target_repo/sybil/__init__.py
260---
261# The MIT License (MIT)
262# Copyright Β© 2023 Yuma Rao
263# TODO(developer): Set your name
264# Copyright Β© 2023 <your name>
265
266# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
267# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
268# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
269# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
270
271# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
272# the Software.
273
274# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
275# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
276# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
277# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
278# DEALINGS IN THE SOFTWARE.
279
280# TODO(developer): Change this value when updating your code base.
281# Define the version of the template module.
282__version__ = "0.0.0"
283version_split = __version__.split(".")
284__spec_version__ = (
285 (1000 * int(version_split[0]))
286 + (10 * int(version_split[1]))
287 + (1 * int(version_split[2]))
288)
289
290# Import all submodules.
291from . import protocol
292from . import base
293from . import validator
294from . import api
295from .subnet_links import SUBNET_LINKS
296
297
298---
299target_repo/sybil/mock.py
300---
301import time
302
303import asyncio
304import random
305import bittensor as bt
306
307from typing import List
308
309
310class MockSubtensor(bt.MockSubtensor):
311 def __init__(self, netuid, n=16, wallet=None, network="mock"):
312 super().__init__(network=network)
313
314 if not self.subnet_exists(netuid):
315 self.create_subnet(netuid)
316
317 # Register ourself (the validator) as a neuron at uid=0
318 if wallet is not None:
319 self.force_register_neuron(
320 netuid=netuid,
321 hotkey=wallet.hotkey.ss58_address,
322 coldkey=wallet.coldkey.ss58_address,
323 balance=100000,
324 stake=100000,
325 )
326
327 # Register n mock neurons who will be miners
328 for i in range(1, n + 1):
329 self.force_register_neuron(
330 netuid=netuid,
331 hotkey=f"miner-hotkey-{i}",
332 coldkey="mock-coldkey",
333 balance=100000,
334 stake=100000,
335 )
336
337
338class MockMetagraph(bt.metagraph):
339 def __init__(self, netuid=1, network="mock", subtensor=None):
340 super().__init__(netuid=netuid, network=network, sync=False)
341
342 if subtensor is not None:
343 self.subtensor = subtensor
344 self.sync(subtensor=subtensor)
345
346 for axon in self.axons:
347 axon.ip = "127.0.0.0"
348 axon.port = 8091
349
350 bt.logging.info(f"Metagraph: {self}")
351 bt.logging.info(f"Axons: {self.axons}")
352
353
354class MockDendrite(bt.dendrite):
355 """
356 Replaces a real bittensor network request with a mock request that just returns some static response for all axons that are passed and adds some random delay.
357 """
358
359 def __init__(self, wallet):
360 super().__init__(wallet)
361
362 async def forward(
363 self,
364 axons: List[bt.axon],
365 synapse: bt.Synapse = bt.Synapse(),
366 timeout: float = 12,
367 deserialize: bool = True,
368 run_async: bool = True,
369 streaming: bool = False,
370 ):
371 if streaming:
372 raise NotImplementedError("Streaming not implemented yet.")
373
374 async def query_all_axons(streaming: bool):
375 """Queries all axons for responses."""
376
377 async def single_axon_response(i, axon):
378 """Queries a single axon for a response."""
379
380 start_time = time.time()
381 s = synapse.copy()
382 # Attach some more required data so it looks real
383 s = self.preprocess_synapse_for_request(axon, s, timeout)
384 # We just want to mock the response, so we'll just fill in some data
385 process_time = random.random()
386 if process_time < timeout:
387 s.dendrite.process_time = str(time.time() - start_time)
388 # Update the status code and status message of the dendrite to match the axon
389 # TODO (developer): replace with your own expected synapse data
390 s.dummy_output = s.dummy_input * 2
391 s.dendrite.status_code = 200
392 s.dendrite.status_message = "OK"
393 synapse.dendrite.process_time = str(process_time)
394 else:
395 s.dummy_output = 0
396 s.dendrite.status_code = 408
397 s.dendrite.status_message = "Timeout"
398 synapse.dendrite.process_time = str(timeout)
399
400 # Return the updated synapse object after deserializing if requested
401 if deserialize:
402 return s.deserialize()
403 else:
404 return s
405
406 return await asyncio.gather(
407 *(
408 single_axon_response(i, target_axon)
409 for i, target_axon in enumerate(axons)
410 )
411 )
412
413 return await query_all_axons(streaming)
414
415 def __str__(self) -> str:
416 """
417 Returns a string representation of the Dendrite object.
418
419 Returns:
420 str: The string representation of the Dendrite object in the format "dendrite(<user_wallet_address>)".
421 """
422 return "MockDendrite({})".format(self.keypair.ss58_address)
423
424
425---
426target_repo/sybil/protocol.py
427---
428# The MIT License (MIT)
429# Copyright Β© 2023 Yuma Rao
430# TODO(developer): Set your name
431# Copyright Β© 2023 <your name>
432
433# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
434# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
435# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
436# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
437
438# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
439# the Software.
440
441# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
442# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
443# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
444# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
445# DEALINGS IN THE SOFTWARE.
446
447import typing
448import bittensor as bt
449
450# TODO(developer): Rewrite with your protocol definition.
451
452# This is the protocol for the dummy miner and validator.
453# It is a simple request-response protocol where the validator sends a request
454# to the miner, and the miner responds with a dummy response.
455
456# ---- miner ----
457# Example usage:
458# def dummy( synapse: Dummy ) -> Dummy:
459# synapse.dummy_output = synapse.dummy_input + 1
460# return synapse
461# axon = bt.axon().attach( dummy ).serve(netuid=...).start()
462
463# ---- validator ---
464# Example usage:
465# dendrite = bt.dendrite()
466# dummy_output = dendrite.query( Dummy( dummy_input = 1 ) )
467# assert dummy_output == 2
468
469
470class Dummy(bt.Synapse):
471 """
472 A simple dummy protocol representation which uses bt.Synapse as its base.
473 This protocol helps in handling dummy request and response communication between
474 the miner and the validator.
475
476 Attributes:
477 - dummy_input: An integer value representing the input request sent by the validator.
478 - dummy_output: An optional integer value which, when filled, represents the response from the miner.
479 """
480
481 # Required request input, filled by sending dendrite caller.
482 dummy_input: int
483
484 # Optional request output, filled by receiving axon.
485 dummy_output: typing.Optional[int] = None
486
487 def deserialize(self) -> int:
488 """
489 Deserialize the dummy output. This method retrieves the response from
490 the miner in the form of dummy_output, deserializes it and returns it
491 as the output of the dendrite.query() call.
492
493 Returns:
494 - int: The deserialized response, which in this case is the value of dummy_output.
495
496 Example:
497 Assuming a Dummy instance has a dummy_output value of 5:
498 >>> dummy_instance = Dummy(dummy_input=4)
499 >>> dummy_instance.dummy_output = 5
500 >>> dummy_instance.deserialize()
501 5
502 """
503 return self.dummy_output
504
505
506class Challenge(bt.Synapse):
507 """
508 A challenge protocol representation which uses bt.Synapse as its base.
509 This protocol helps in handling challenge request and response communication between
510 the miner and the validator.
511 """
512
513 challenge: str
514 challenge_url: str
515 challenge_response: typing.Optional[str]=None
516
517 def deserialize(self) -> str:
518 """
519 Deserialize the challenge response. This method retrieves the response from
520 the miner in the form of challenge_response, deserializes it and returns it
521 as the output of the dendrite.query() call.
522 """
523
524 return self.challenge_response
525
526
527---
528target_repo/sybil/subnet_links.py
529---
530SUBNET_LINKS = [
531 {"name": "sn0", "url": ""},
532 {"name": "sn1", "url": "https://github.com/opentensor/prompting/"},
533 {
534 "name": "sn2",
535 "url": "https://github.com/inference-labs-inc/omron-subnet/",
536 },
537 {
538 "name": "sn3",
539 "url": "https://github.com/myshell-ai/MyShell-TTS-Subnet/",
540 },
541 {"name": "sn4", "url": "https://github.com/manifold-inc/targon/"},
542 {"name": "sn5", "url": "https://github.com/OpenKaito/openkaito/"},
543 {
544 "name": "sn6",
545 "url": "https://github.com/amedeo-gigaver/infinite_games/",
546 },
547 {"name": "sn7", "url": "https://github.com/eclipsevortex/SubVortex/"},
548 {
549 "name": "sn8",
550 "url": "https://github.com/taoshidev/proprietary-trading-network/",
551 },
552 {"name": "sn9", "url": "https://github.com/unconst/pretrain-subnet/"},
553 {
554 "name": "sn10",
555 "url": "https://github.com/Sturdy-Subnet/sturdy-subnet/",
556 },
557 {
558 "name": "sn11",
559 "url": "https://github.com/impel-intelligence/dippy-bittensor-subnet/",
560 },
561 {
562 "name": "sn12",
563 "url": "https://github.com/backend-developers-ltd/ComputeHorde/",
564 },
565 {"name": "sn13", "url": "https://github.com/macrocosm-os/data-universe/"},
566 {
567 "name": "sn14",
568 "url": "https://github.com/synapsec-ai/llm-defender-subnet/",
569 },
570 {
571 "name": "sn15",
572 "url": "https://github.com/blockchain-insights/blockchain-data-subnet/",
573 },
574 {"name": "sn16", "url": "https://github.com/eseckft/BitAds.ai/"},
575 {"name": "sn17", "url": "https://github.com/404-Repo/three-gen-subnet/"},
576 {"name": "sn18", "url": "https://github.com/corcel-api/cortex.t/"},
577 {"name": "sn19", "url": "https://github.com/namoray/vision/"},
578 {"name": "sn20", "url": "https://github.com/RogueTensor/bitagent_subnet/"},
579 {
580 "name": "sn21",
581 "url": "https://github.com/omegalabsinc/omegalabs-anytoany-bittensor",
582 },
583 {"name": "sn22", "url": "https://github.com/Datura-ai/smart-scrape/"},
584 {
585 "name": "sn23",
586 "url": "https://github.com/SocialTensor/SocialTensorSubnet/",
587 },
588 {
589 "name": "sn24",
590 "url": "https://github.com/omegalabsinc/omegalabs-bittensor-subnet/",
591 },
592 {"name": "sn25", "url": "https://github.com/macrocosm-os/folding/"},
593 {
594 "name": "sn26",
595 "url": "https://github.com/TensorAlchemy/TensorAlchemy/",
596 },
597 {
598 "name": "sn27",
599 "url": "https://github.com/neuralinternet/compute-subnet/",
600 },
601 {"name": "sn28", "url": "https://github.com/foundryservices/snpOracle/"},
602 {"name": "sn29", "url": "https://github.com/fractal-net/fractal/"},
603 {"name": "sn30", "url": "https://github.com/Bettensor/bettensor/"},
604 {
605 "name": "sn31",
606 "url": "https://github.com/nimaaghli/NASChain/",
607 },
608 {"name": "sn32", "url": "https://github.com/It-s-AI/llm-detection/"},
609 {
610 "name": "sn33",
611 "url": "https://github.com/afterpartyai/bittensor-conversation-genome-project/",
612 },
613 {"name": "sn34", "url": "https://github.com/Healthi-Labs/healthi-subnet/"},
614 {
615 "name": "sn35",
616 "url": "https://github.com/LogicNet-Subnet/LogicNet-prod/",
617 },
618 {"name": "sn36", "url": "https://github.com/HIP-Labs/HIP-Subnet/"},
619 {"name": "sn37", "url": "https://github.com/macrocosm-os/finetuning/"},
620]
621
622
623---
624target_repo/sybil/validator/__init__.py
625---
626from .forward import forward
627from .reward import reward
628
629
630---
631target_repo/sybil/validator/forward.py
632---
633# The MIT License (MIT)
634# Copyright Β© 2023 Yuma Rao
635# TODO(developer): Set your name
636# Copyright Β© 2023 <your name>
637
638# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
639# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
640# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
641# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
642
643# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
644# the Software.
645
646# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
647# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
648# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
649# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
650# DEALINGS IN THE SOFTWARE.
651
652import time
653import bittensor as bt
654import asyncio
655import aiohttp
656
657from sybil.protocol import Challenge
658from sybil.validator.utils import generate_challenges
659from sybil.utils.uids import get_random_uids
660from sybil.validator.reward import get_rewards
661async def forward(self):
662 """
663 The forward function is called by the validator every time step.
664
665 It is responsible for querying the network and scoring the responses.
666
667 Args:
668 self (:obj:`bittensor.neuron.Neuron`): The neuron object which contains all the necessary state for the validator.
669
670 """
671
672 # Post miner and validator info to the container
673 miners_info = []
674 validators_info = []
675 for uid in range(self.metagraph.n.item()):
676 if self.metagraph.axons[uid].is_serving:
677 miners_info.append({
678 "uid": uid,
679 "ip": self.metagraph.axons[uid].ip,
680 })
681 elif self.metagraph.validator_permit[uid]:
682 validators_info.append({
683 "uid": uid,
684 "ip": self.metagraph.axons[uid].ip,
685 "stake": self.metagraph.S[uid],
686 })
687 try:
688 async with aiohttp.ClientSession() as session:
689 async with session.post(
690 f"{self.validator_server_url}/protocol/broadcast/miners",
691 json={"miners": miners_info}
692 ) as resp:
693 result = await resp.json()
694 if result["success"]:
695 bt.logging.info(f"Broadcasted miners info: {len(miners_info)} miners")
696 else:
697 bt.logging.error(f"Failed to broadcast miners info")
698
699 async with aiohttp.ClientSession() as session:
700 async with session.post(
701 f"{self.validator_server_url}/protocol/broadcast/validators",
702 json={"validators": validators_info}
703 ) as resp:
704 result = await resp.json()
705 if result["success"]:
706 bt.logging.info(f"Broadcasted validators info: {len(validators_info)} validators")
707 else:
708 bt.logging.error(f"Failed to broadcast validators info")
709 except Exception as e:
710 bt.logging.error(f"Failed to broadcast miners or validators info: {e}")
711
712 # get_random_uids is an example method, but you can replace it with your own.
713 miner_uids = get_random_uids(self, k=self.config.neuron.sample_size)
714 bt.logging.info(f"Miner uids: {miner_uids}")
715
716 # Generate k challenges
717 challenges = await generate_challenges(miner_uids=miner_uids, validator_server_url=self.validator_server_url)
718 bt.logging.info(f"Generated challenges:\n" + "\n".join([str(challenge) for challenge in challenges]))
719
720 if challenges is None:
721 bt.logging.error("Failed to generate challenges")
722 time.sleep(10)
723 return
724
725 # Create concurrent queries, one for each challenge-miner pair
726 async_queries = [
727 self.dendrite(
728 axons=[self.metagraph.axons[uid]],
729 synapse=challenge,
730 deserialize=True,
731 )
732 for uid, challenge in zip(miner_uids, challenges)
733 ]
734
735 # Execute all queries concurrently
736 responses = await asyncio.gather(*async_queries)
737
738 bt.logging.info(f"Received Raw responses: {responses}")
739 # Flatten the responses list since each query returns a list with one item
740 responses = [resp[0] for resp in responses]
741
742 # Log the results for monitoring purposes.
743 bt.logging.info(f"Received responses: {responses}")
744
745 # Get scores for the responses
746 rewards = await get_rewards([challenge.challenge for challenge in challenges], responses, validator_server_url=self.validator_server_url)
747 bt.logging.info(f"Scores: {rewards}")
748
749 if rewards is None:
750 bt.logging.error("Failed to get rewards")
751 time.sleep(10)
752 return
753
754 # Update the scores based on the rewards. You may want to define your own update_scores function for custom behavior.
755 self.update_scores(rewards, miner_uids)
756
757 time.sleep(10)
758
759
760---
761target_repo/sybil/validator/reward.py
762---
763# The MIT License (MIT)
764# Copyright Β© 2023 Yuma Rao
765# TODO(developer): Set your name
766# Copyright Β© 2023 <your name>
767
768# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
769# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
770# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
771# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
772
773# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
774# the Software.
775
776# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
777# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
778# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
779# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
780# DEALINGS IN THE SOFTWARE.
781import numpy as np
782from typing import List
783import bittensor as bt
784import aiohttp
785import asyncio
786
787def reward(query: int, response: int) -> float:
788 """
789 Reward the miner response to the dummy request. This method returns a reward
790 value for the miner, which is used to update the miner's score.
791
792 Returns:
793 - float: The reward value for the miner.
794 """
795 bt.logging.info(
796 f"In rewards, query val: {query}, response val: {response}, rewards val: {1.0 if response == query * 2 else 0}"
797 )
798 return 1.0 if response == query * 2 else 0
799
800
801async def get_rewards(challenges: List[str], responses: List[str], validator_server_url: str) -> List[float]:
802 try:
803 """
804 Get the scores for the responses.
805 """
806 async def fetch_score(challenge, response) -> float:
807 bt.logging.info(f"Getting score at: {validator_server_url}/challenge/{challenge}/{response}")
808 if response is None:
809 return 0
810 async with aiohttp.ClientSession() as session:
811 async with session.get(
812 f"{validator_server_url}/challenge/{challenge}/{response}"
813 ) as resp:
814 result = await resp.json()
815 if result["score"]:
816 bt.logging.info(f"Score: {result['score']}")
817 else:
818 bt.logging.info(f"No score found in response: {result}")
819 return result["score"] if "score" in result else 0
820
821 # Concurrently fetch all scores
822 scores = await asyncio.gather(
823 *[fetch_score(challenge, response) for challenge, response in zip(challenges, responses)]
824 )
825
826 # Convert None to 0
827 scores = [0 if score is None else score for score in scores]
828
829 return scores
830 except Exception as e:
831 print(f"Error getting rewards: {e}")
832 return None
833
834
835---
836target_repo/sybil/validator/utils.py
837---
838import asyncio
839import aiohttp
840from sybil.protocol import Challenge
841from typing import List
842import bittensor as bt
843
844
845# Fetch a challenge from a given URL
846async def fetch(url):
847 async with aiohttp.ClientSession() as session:
848 async with session.get(url) as response:
849 return await response.json()
850
851# Generate one challenge per miner_uid, appending ?miner_uid=<uid> to each request
852async def generate_challenges(miner_uids: List[int], validator_server_url: str) -> List[Challenge]:
853 try:
854 tasks = []
855 for uid in miner_uids:
856 bt.logging.info(f"Generating challenge for miner uid: {uid}")
857 url = f"{validator_server_url}/challenge/new?miner_uid={uid}"
858 tasks.append(fetch(url))
859
860 responses = await asyncio.gather(*tasks)
861
862 challenges = [
863 Challenge(
864 challenge=response["challenge"],
865 challenge_url=response["challenge_url"]
866 ) for response in responses
867 ]
868
869 return challenges
870 except Exception as e:
871 print(f"Error generating challenges: {e}")
872 return None
873
874
875---
876target_repo/sybil/utils/__init__.py
877---
878from . import config
879from . import misc
880from . import uids
881
882
883---
884target_repo/sybil/utils/config.py
885---
886# The MIT License (MIT)
887# Copyright Β© 2023 Yuma Rao
888# Copyright Β© 2023 Opentensor Foundation
889
890# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
891# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
892# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
893# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
894
895# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
896# the Software.
897
898# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
899# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
900# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
901# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
902# DEALINGS IN THE SOFTWARE.
903
904import os
905import subprocess
906import argparse
907import bittensor as bt
908from .logging import setup_events_logger
909
910
911def is_cuda_available():
912 try:
913 output = subprocess.check_output(
914 ["nvidia-smi", "-L"], stderr=subprocess.STDOUT
915 )
916 if "NVIDIA" in output.decode("utf-8"):
917 return "cuda"
918 except Exception:
919 pass
920 try:
921 output = subprocess.check_output(["nvcc", "--version"]).decode("utf-8")
922 if "release" in output:
923 return "cuda"
924 except Exception:
925 pass
926 return "cpu"
927
928
929def check_config(cls, config: "bt.Config"):
930 r"""Checks/validates the config namespace object."""
931 bt.logging.check_config(config)
932
933 full_path = os.path.expanduser(
934 "{}/{}/{}/netuid{}/{}".format(
935 config.logging.logging_dir, # TODO: change from ~/.bittensor/miners to ~/.bittensor/neurons
936 config.wallet.name,
937 config.wallet.hotkey,
938 config.netuid,
939 config.neuron.name,
940 )
941 )
942 print("full path:", full_path)
943 config.neuron.full_path = os.path.expanduser(full_path)
944 if not os.path.exists(config.neuron.full_path):
945 os.makedirs(config.neuron.full_path, exist_ok=True)
946
947 if not config.neuron.dont_save_events:
948 # Add custom event logger for the events.
949 events_logger = setup_events_logger(
950 config.neuron.full_path, config.neuron.events_retention_size
951 )
952 bt.logging.register_primary_logger(events_logger.name)
953
954
955def add_args(cls, parser):
956 """
957 Adds relevant arguments to the parser for operation.
958 """
959
960 parser.add_argument("--netuid", type=int, help="Subnet netuid", default=1)
961
962 parser.add_argument(
963 "--neuron.device",
964 type=str,
965 help="Device to run on.",
966 default=is_cuda_available(),
967 )
968
969 parser.add_argument(
970 "--neuron.epoch_length",
971 type=int,
972 help="The default epoch length (how often we set weights, measured in 12 second blocks).",
973 default=360,
974 )
975
976 parser.add_argument(
977 "--mock",
978 action="store_true",
979 help="Mock neuron and all network components.",
980 default=False,
981 )
982
983 parser.add_argument(
984 "--neuron.events_retention_size",
985 type=str,
986 help="Events retention size.",
987 default=2 * 1024 * 1024 * 1024, # 2 GB
988 )
989
990 parser.add_argument(
991 "--neuron.dont_save_events",
992 action="store_true",
993 help="If set, we dont save events to a log file.",
994 default=False,
995 )
996
997 parser.add_argument(
998 "--wandb.off",
999 action="store_true",
1000 help="Turn off wandb.",
1001 default=False,
1002 )
1003
1004 parser.add_argument(
1005 "--wandb.offline",
1006 action="store_true",
1007 help="Runs wandb in offline mode.",
1008 default=False,
1009 )
1010
1011 parser.add_argument(
1012 "--wandb.notes",
1013 type=str,
1014 help="Notes to add to the wandb run.",
1015 default="",
1016 )
1017
1018
1019def add_miner_args(cls, parser):
1020 """Add miner specific arguments to the parser."""
1021
1022 parser.add_argument(
1023 "--neuron.name",
1024 type=str,
1025 help="Trials for this neuron go in neuron.root / (wallet_cold - wallet_hot) / neuron.name. ",
1026 default="miner",
1027 )
1028
1029 parser.add_argument(
1030 "--blacklist.force_validator_permit",
1031 action="store_true",
1032 help="If set, we will force incoming requests to have a permit.",
1033 default=False,
1034 )
1035
1036 parser.add_argument(
1037 "--blacklist.allow_non_registered",
1038 action="store_true",
1039 help="If set, miners will accept queries from non registered entities. (Dangerous!)",
1040 default=False,
1041 )
1042
1043 parser.add_argument(
1044 "--wandb.project_name",
1045 type=str,
1046 default="sybil-miners",
1047 help="Wandb project to log to.",
1048 )
1049
1050 parser.add_argument(
1051 "--wandb.entity",
1052 type=str,
1053 default="opentensor-dev",
1054 help="Wandb entity to log to.",
1055 )
1056
1057 parser.add_argument(
1058 "--miner.server",
1059 type=str,
1060 help="The url of the miner server.",
1061 default="http://127.0.0.1:3001",
1062 )
1063
1064
1065def add_validator_args(cls, parser):
1066 """Add validator specific arguments to the parser."""
1067
1068 parser.add_argument(
1069 "--neuron.name",
1070 type=str,
1071 help="Trials for this neuron go in neuron.root / (wallet_cold - wallet_hot) / neuron.name. ",
1072 default="validator",
1073 )
1074
1075 parser.add_argument(
1076 "--neuron.timeout",
1077 type=float,
1078 help="The timeout for each forward call in seconds.",
1079 default=60,
1080 )
1081
1082 parser.add_argument(
1083 "--neuron.num_concurrent_forwards",
1084 type=int,
1085 help="The number of concurrent forwards running at any time.",
1086 default=1,
1087 )
1088
1089 parser.add_argument(
1090 "--neuron.sample_size",
1091 type=int,
1092 help="The number of miners to query in a single step.",
1093 default=10,
1094 )
1095
1096 parser.add_argument(
1097 "--neuron.disable_set_weights",
1098 action="store_true",
1099 help="Disables setting weights.",
1100 default=False,
1101 )
1102
1103 parser.add_argument(
1104 "--neuron.moving_average_alpha",
1105 type=float,
1106 help="Moving average alpha parameter, how much to add of the new observation.",
1107 default=0.1,
1108 )
1109
1110 parser.add_argument(
1111 "--neuron.axon_off",
1112 "--axon_off",
1113 action="store_true",
1114 # Note: the validator needs to serve an Axon with their IP or they may
1115 # be blacklisted by the firewall of serving peers on the network.
1116 help="Set this flag to not attempt to serve an Axon.",
1117 default=False,
1118 )
1119
1120 parser.add_argument(
1121 "--neuron.vpermit_tao_limit",
1122 type=int,
1123 help="The maximum number of TAO allowed to query a validator with a vpermit.",
1124 default=4096,
1125 )
1126
1127 parser.add_argument(
1128 "--wandb.project_name",
1129 type=str,
1130 help="The name of the project where you are sending the new run.",
1131 default="sybil-validators",
1132 )
1133
1134 parser.add_argument(
1135 "--wandb.entity",
1136 type=str,
1137 help="The name of the project where you are sending the new run.",
1138 default="opentensor-dev",
1139 )
1140
1141 parser.add_argument(
1142 "--validator_server_url",
1143 type=str,
1144 help="The url of the validator server.",
1145 default="http://127.0.0.1:3000",
1146 )
1147
1148
1149def config(cls):
1150 """
1151 Returns the configuration object specific to this miner or validator after adding relevant arguments.
1152 """
1153 parser = argparse.ArgumentParser()
1154 bt.wallet.add_args(parser)
1155 bt.subtensor.add_args(parser)
1156 bt.logging.add_args(parser)
1157 bt.axon.add_args(parser)
1158 cls.add_args(parser)
1159 return bt.config(parser)
1160
1161
1162---
1163target_repo/sybil/utils/logging.py
1164---
1165import os
1166import logging
1167from logging.handlers import RotatingFileHandler
1168
1169EVENTS_LEVEL_NUM = 38
1170DEFAULT_LOG_BACKUP_COUNT = 10
1171
1172
1173def setup_events_logger(full_path, events_retention_size):
1174 logging.addLevelName(EVENTS_LEVEL_NUM, "EVENT")
1175
1176 logger = logging.getLogger("event")
1177 logger.setLevel(EVENTS_LEVEL_NUM)
1178
1179 def event(self, message, *args, **kws):
1180 if self.isEnabledFor(EVENTS_LEVEL_NUM):
1181 self._log(EVENTS_LEVEL_NUM, message, args, **kws)
1182
1183 logging.Logger.event = event
1184
1185 formatter = logging.Formatter(
1186 "%(asctime)s | %(levelname)s | %(message)s",
1187 datefmt="%Y-%m-%d %H:%M:%S",
1188 )
1189
1190 file_handler = RotatingFileHandler(
1191 os.path.join(full_path, "events.log"),
1192 maxBytes=events_retention_size,
1193 backupCount=DEFAULT_LOG_BACKUP_COUNT,
1194 )
1195 file_handler.setFormatter(formatter)
1196 file_handler.setLevel(EVENTS_LEVEL_NUM)
1197 logger.addHandler(file_handler)
1198
1199 return logger
1200
1201
1202---
1203target_repo/sybil/utils/misc.py
1204---
1205# The MIT License (MIT)
1206# Copyright Β© 2023 Yuma Rao
1207# Copyright Β© 2023 Opentensor Foundation
1208
1209# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1210# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1211# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1212# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1213
1214# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1215# the Software.
1216
1217# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1218# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1219# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1220# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1221# DEALINGS IN THE SOFTWARE.
1222
1223import time
1224import math
1225import hashlib as rpccheckhealth
1226from math import floor
1227from typing import Callable, Any
1228from functools import lru_cache, update_wrapper
1229
1230
1231# LRU Cache with TTL
1232def ttl_cache(maxsize: int = 128, typed: bool = False, ttl: int = -1):
1233 """
1234 Decorator that creates a cache of the most recently used function calls with a time-to-live (TTL) feature.
1235 The cache evicts the least recently used entries if the cache exceeds the `maxsize` or if an entry has
1236 been in the cache longer than the `ttl` period.
1237
1238 Args:
1239 maxsize (int): Maximum size of the cache. Once the cache grows to this size, subsequent entries
1240 replace the least recently used ones. Defaults to 128.
1241 typed (bool): If set to True, arguments of different types will be cached separately. For example,
1242 f(3) and f(3.0) will be treated as distinct calls with distinct results. Defaults to False.
1243 ttl (int): The time-to-live for each cache entry, measured in seconds. If set to a non-positive value,
1244 the TTL is set to a very large number, effectively making the cache entries permanent. Defaults to -1.
1245
1246 Returns:
1247 Callable: A decorator that can be applied to functions to cache their return values.
1248
1249 The decorator is useful for caching results of functions that are expensive to compute and are called
1250 with the same arguments frequently within short periods of time. The TTL feature helps in ensuring
1251 that the cached values are not stale.
1252
1253 Example:
1254 @ttl_cache(ttl=10)
1255 def get_data(param):
1256 # Expensive data retrieval operation
1257 return data
1258 """
1259 if ttl <= 0:
1260 ttl = 65536
1261 hash_gen = _ttl_hash_gen(ttl)
1262
1263 def wrapper(func: Callable) -> Callable:
1264 @lru_cache(maxsize, typed)
1265 def ttl_func(ttl_hash, *args, **kwargs):
1266 return func(*args, **kwargs)
1267
1268 def wrapped(*args, **kwargs) -> Any:
1269 th = next(hash_gen)
1270 return ttl_func(th, *args, **kwargs)
1271
1272 return update_wrapper(wrapped, func)
1273
1274 return wrapper
1275
1276
1277def _ttl_hash_gen(seconds: int):
1278 """
1279 Internal generator function used by the `ttl_cache` decorator to generate a new hash value at regular
1280 time intervals specified by `seconds`.
1281
1282 Args:
1283 seconds (int): The number of seconds after which a new hash value will be generated.
1284
1285 Yields:
1286 int: A hash value that represents the current time interval.
1287
1288 This generator is used to create time-based hash values that enable the `ttl_cache` to determine
1289 whether cached entries are still valid or if they have expired and should be recalculated.
1290 """
1291 start_time = time.time()
1292 while True:
1293 yield floor((time.time() - start_time) / seconds)
1294
1295
1296# 12 seconds updating block.
1297@ttl_cache(maxsize=1, ttl=12)
1298def ttl_get_block(self) -> int:
1299 """
1300 Retrieves the current block number from the blockchain. This method is cached with a time-to-live (TTL)
1301 of 12 seconds, meaning that it will only refresh the block number from the blockchain at most every 12 seconds,
1302 reducing the number of calls to the underlying blockchain interface.
1303
1304 Returns:
1305 int: The current block number on the blockchain.
1306
1307 This method is useful for applications that need to access the current block number frequently and can
1308 tolerate a delay of up to 12 seconds for the latest information. By using a cache with TTL, the method
1309 efficiently reduces the workload on the blockchain interface.
1310
1311 Example:
1312 current_block = ttl_get_block(self)
1313
1314 Note: self here is the miner or validator instance
1315 """
1316 return self.subtensor.get_current_block()
1317
1318
1319---
1320target_repo/sybil/utils/uids.py
1321---
1322import random
1323import bittensor as bt
1324import numpy as np
1325from typing import List
1326
1327
1328def check_uid_availability(
1329 metagraph: "bt.metagraph.Metagraph", uid: int, vpermit_tao_limit: int
1330) -> bool:
1331 """Check if uid is available. The UID should be available if it is serving and has less than vpermit_tao_limit stake
1332 Args:
1333 metagraph (:obj: bt.metagraph.Metagraph): Metagraph object
1334 uid (int): uid to be checked
1335 vpermit_tao_limit (int): Validator permit tao limit
1336 Returns:
1337 bool: True if uid is available, False otherwise
1338 """
1339 # # Filter non serving axons.
1340 if not metagraph.axons[uid].is_serving:
1341 return False
1342 # # Filter validator permit > 1024 stake.
1343 # if metagraph.validator_permit[uid]:
1344 # if metagraph.S[uid] > vpermit_tao_limit:
1345 # return False
1346 # # Available otherwise.
1347 # return True
1348 return True
1349
1350
1351def get_random_uids(self, k: int, exclude: List[int] = None) -> np.ndarray:
1352 """Returns k available random uids from the metagraph.
1353 Args:
1354 k (int): Number of uids to return.
1355 exclude (List[int]): List of uids to exclude from the random sampling.
1356 Returns:
1357 uids (np.ndarray): Randomly sampled available uids.
1358 Notes:
1359 If `k` is larger than the number of available `uids`, set `k` to the number of available `uids`.
1360 """
1361 candidate_uids = []
1362 avail_uids = []
1363
1364 for uid in range(self.metagraph.n.item()):
1365 uid_is_available = check_uid_availability(
1366 self.metagraph, uid, self.config.neuron.vpermit_tao_limit
1367 )
1368 uid_is_not_excluded = exclude is None or uid not in exclude
1369
1370 if uid_is_available:
1371 avail_uids.append(uid)
1372 if uid_is_not_excluded:
1373 candidate_uids.append(uid)
1374 # If k is larger than the number of available uids, set k to the number of available uids.
1375 k = min(k, len(avail_uids))
1376 # Remove uids with duplicate IPs
1377 unique_ips = set()
1378 unique_candidate_uids = []
1379 for uid in candidate_uids:
1380 ip = self.metagraph.axons[uid].ip
1381 if ip not in unique_ips:
1382 unique_ips.add(ip)
1383 unique_candidate_uids.append(uid)
1384 # Check if candidate_uids contain enough for querying, if not grab all avaliable uids
1385 available_uids = unique_candidate_uids
1386 if len(unique_candidate_uids) < k:
1387 available_uids += random.sample(
1388 [uid for uid in avail_uids if uid not in candidate_uids],
1389 k - len(candidate_uids),
1390 )
1391 uids = np.array(random.sample(available_uids, k))
1392 return uids
1393
1394
1395---
1396target_repo/sybil/api/__init__.py
1397---
1398
1399
1400---
1401target_repo/sybil/api/dummy.py
1402---
1403# The MIT License (MIT)
1404# Copyright Β© 2021 Yuma Rao
1405# Copyright Β© 2023 Opentensor Foundation
1406# Copyright Β© 2023 Opentensor Technologies Inc
1407
1408# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1409# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1410# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1411# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1412
1413# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1414# the Software.
1415
1416# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1417# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1418# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1419# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1420# DEALINGS IN THE SOFTWARE.
1421
1422import bittensor as bt
1423from typing import List, Optional, Union, Any, Dict
1424from sybil.protocol import Dummy
1425from bittensor.subnets import SubnetsAPI
1426
1427
1428class DummyAPI(SubnetsAPI):
1429 def __init__(self, wallet: "bt.wallet"):
1430 super().__init__(wallet)
1431 self.netuid = 33
1432 self.name = "dummy"
1433
1434 def prepare_synapse(self, dummy_input: int) -> Dummy:
1435 synapse.dummy_input = dummy_input
1436 return synapse
1437
1438 def process_responses(
1439 self, responses: List[Union["bt.Synapse", Any]]
1440 ) -> List[int]:
1441 outputs = []
1442 for response in responses:
1443 if response.dendrite.status_code != 200:
1444 continue
1445 return outputs.append(response.dummy_output)
1446 return outputs
1447
1448
1449---
1450target_repo/sybil/api/get_query_axons.py
1451---
1452# The MIT License (MIT)
1453# Copyright Β© 2021 Yuma Rao
1454# Copyright Β© 2023 Opentensor Foundation
1455# Copyright Β© 2023 Opentensor Technologies Inc
1456
1457# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1458# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1459# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1460# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1461
1462# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1463# the Software.
1464
1465# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1466# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1467# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1468# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1469# DEALINGS IN THE SOFTWARE.
1470import numpy as np
1471import random
1472import bittensor as bt
1473
1474
1475async def ping_uids(dendrite, metagraph, uids, timeout=3):
1476 """
1477 Pings a list of UIDs to check their availability on the Bittensor network.
1478
1479 Args:
1480 dendrite (bittensor.dendrite): The dendrite instance to use for pinging nodes.
1481 metagraph (bittensor.metagraph): The metagraph instance containing network information.
1482 uids (list): A list of UIDs (unique identifiers) to ping.
1483 timeout (int, optional): The timeout in seconds for each ping. Defaults to 3.
1484
1485 Returns:
1486 tuple: A tuple containing two lists:
1487 - The first list contains UIDs that were successfully pinged.
1488 - The second list contains UIDs that failed to respond.
1489 """
1490 axons = [metagraph.axons[uid] for uid in uids]
1491 try:
1492 responses = await dendrite(
1493 axons,
1494 bt.Synapse(), # TODO: potentially get the synapses available back?
1495 deserialize=False,
1496 timeout=timeout,
1497 )
1498 successful_uids = [
1499 uid
1500 for uid, response in zip(uids, responses)
1501 if response.dendrite.status_code == 200
1502 ]
1503 failed_uids = [
1504 uid
1505 for uid, response in zip(uids, responses)
1506 if response.dendrite.status_code != 200
1507 ]
1508 except Exception as e:
1509 bt.logging.error(f"Dendrite ping failed: {e}")
1510 successful_uids = []
1511 failed_uids = uids
1512 bt.logging.debug(f"ping() successful uids: {successful_uids}")
1513 bt.logging.debug(f"ping() failed uids : {failed_uids}")
1514 return successful_uids, failed_uids
1515
1516
1517async def get_query_api_nodes(dendrite, metagraph, n=0.1, timeout=3):
1518 """
1519 Fetches the available API nodes to query for the particular subnet.
1520
1521 Args:
1522 wallet (bittensor.wallet): The wallet instance to use for querying nodes.
1523 metagraph (bittensor.metagraph): The metagraph instance containing network information.
1524 n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1.
1525 timeout (int, optional): The timeout in seconds for pinging nodes. Defaults to 3.
1526
1527 Returns:
1528 list: A list of UIDs representing the available API nodes.
1529 """
1530 bt.logging.debug(
1531 f"Fetching available API nodes for subnet {metagraph.netuid}"
1532 )
1533 vtrust_uids = [
1534 uid.item()
1535 for uid in metagraph.uids
1536 if metagraph.validator_trust[uid] > 0
1537 ]
1538 top_uids = np.where(metagraph.S > np.quantile(metagraph.S, 1 - n))[
1539 0
1540 ].tolist()
1541 init_query_uids = set(top_uids).intersection(set(vtrust_uids))
1542 query_uids, _ = await ping_uids(
1543 dendrite, metagraph, list(init_query_uids), timeout=timeout
1544 )
1545 bt.logging.debug(
1546 f"Available API node UIDs for subnet {metagraph.netuid}: {query_uids}"
1547 )
1548 if len(query_uids) > 3:
1549 query_uids = random.sample(query_uids, 3)
1550 return query_uids
1551
1552
1553async def get_query_api_axons(
1554 wallet, metagraph=None, n=0.1, timeout=3, uids=None
1555):
1556 """
1557 Retrieves the axons of query API nodes based on their availability and stake.
1558
1559 Args:
1560 wallet (bittensor.wallet): The wallet instance to use for querying nodes.
1561 metagraph (bittensor.metagraph, optional): The metagraph instance containing network information.
1562 n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1.
1563 timeout (int, optional): The timeout in seconds for pinging nodes. Defaults to 3.
1564 uids (Union[List[int], int], optional): The specific UID(s) of the API node(s) to query. Defaults to None.
1565
1566 Returns:
1567 list: A list of axon objects for the available API nodes.
1568 """
1569 dendrite = bt.dendrite(wallet=wallet)
1570
1571 if metagraph is None:
1572 metagraph = bt.metagraph(netuid=21)
1573
1574 if uids is not None:
1575 query_uids = [uids] if isinstance(uids, int) else uids
1576 else:
1577 query_uids = await get_query_api_nodes(
1578 dendrite, metagraph, n=n, timeout=timeout
1579 )
1580 return [metagraph.axons[uid] for uid in query_uids]
1581
1582
1583---
1584target_repo/sybil/base/__init__.py
1585---
1586
1587
1588---
1589target_repo/sybil/base/miner.py
1590---
1591# The MIT License (MIT)
1592# Copyright Β© 2023 Yuma Rao
1593
1594# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1595# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1596# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1597# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1598
1599# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1600# the Software.
1601
1602# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1603# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1604# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1605# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1606# DEALINGS IN THE SOFTWARE.
1607
1608import time
1609import asyncio
1610import threading
1611import argparse
1612import traceback
1613
1614import bittensor as bt
1615
1616from sybil.base.neuron import BaseNeuron
1617from sybil.utils.config import add_miner_args
1618
1619from typing import Union
1620
1621
1622class BaseMinerNeuron(BaseNeuron):
1623 """
1624 Base class for Bittensor miners.
1625 """
1626
1627 neuron_type: str = "MinerNeuron"
1628
1629 @classmethod
1630 def add_args(cls, parser: argparse.ArgumentParser):
1631 super().add_args(parser)
1632 add_miner_args(cls, parser)
1633
1634 def __init__(self, config=None):
1635 super().__init__(config=config)
1636
1637 # Warn if allowing incoming requests from anyone.
1638 if not self.config.blacklist.force_validator_permit:
1639 bt.logging.warning(
1640 "You are allowing non-validators to send requests to your miner. This is a security risk."
1641 )
1642 if self.config.blacklist.allow_non_registered:
1643 bt.logging.warning(
1644 "You are allowing non-registered entities to send requests to your miner. This is a security risk."
1645 )
1646 # The axon handles request processing, allowing validators to send this miner requests.
1647 self.axon = bt.axon(
1648 wallet=self.wallet,
1649 config=self.config() if callable(self.config) else self.config,
1650 )
1651
1652 self.miner_server = self.config.miner.server
1653
1654 # Attach determiners which functions are called when servicing a request.
1655 bt.logging.info(f"Attaching forward function to miner axon.")
1656 self.axon.attach(
1657 forward_fn=self.forward,
1658 blacklist_fn=self.blacklist,
1659 priority_fn=self.priority,
1660 )
1661 bt.logging.info(f"Axon created: {self.axon}")
1662
1663 # Instantiate runners
1664 self.should_exit: bool = False
1665 self.is_running: bool = False
1666 self.thread: Union[threading.Thread, None] = None
1667 self.lock = asyncio.Lock()
1668
1669 def run(self):
1670 """
1671 Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors.
1672
1673 This function performs the following primary tasks:
1674 1. Check for registration on the Bittensor network.
1675 2. Starts the miner's axon, making it active on the network.
1676 3. Periodically resynchronizes with the chain; updating the metagraph with the latest network state and setting weights.
1677
1678 The miner continues its operations until `should_exit` is set to True or an external interruption occurs.
1679 During each epoch of its operation, the miner waits for new blocks on the Bittensor network, updates its
1680 knowledge of the network (metagraph), and sets its weights. This process ensures the miner remains active
1681 and up-to-date with the network's latest state.
1682
1683 Note:
1684 - The function leverages the global configurations set during the initialization of the miner.
1685 - The miner's axon serves as its interface to the Bittensor network, handling incoming and outgoing requests.
1686
1687 Raises:
1688 KeyboardInterrupt: If the miner is stopped by a manual interruption.
1689 Exception: For unforeseen errors during the miner's operation, which are logged for diagnosis.
1690 """
1691
1692 # Check that miner is registered on the network.
1693 self.sync()
1694
1695 # Serve passes the axon information to the network + netuid we are hosting on.
1696 # This will auto-update if the axon port of external ip have changed.
1697 bt.logging.info(
1698 f"Serving miner axon {self.axon} on network: {self.config.subtensor.chain_endpoint} with netuid: {self.config.netuid}"
1699 )
1700 self.axon.serve(netuid=self.config.netuid, subtensor=self.subtensor)
1701
1702 # Start starts the miner's axon, making it active on the network.
1703 self.axon.start()
1704
1705 bt.logging.info(f"Miner starting at block: {self.block}")
1706
1707 # This loop maintains the miner's operations until intentionally stopped.
1708 try:
1709 while not self.should_exit:
1710 while (
1711 self.block - self.metagraph.last_update[self.uid]
1712 < self.config.neuron.epoch_length
1713 ):
1714 # Wait before checking again.
1715 time.sleep(1)
1716
1717 # Check if we should exit.
1718 if self.should_exit:
1719 break
1720
1721 # Sync metagraph and potentially set weights.
1722 self.sync()
1723 self.step += 1
1724
1725 # If someone intentionally stops the miner, it'll safely terminate operations.
1726 except KeyboardInterrupt:
1727 self.axon.stop()
1728 bt.logging.success("Miner killed by keyboard interrupt.")
1729 exit()
1730
1731 # In case of unforeseen errors, the miner will log the error and continue operations.
1732 except Exception as e:
1733 bt.logging.error(traceback.format_exc())
1734
1735 def run_in_background_thread(self):
1736 """
1737 Starts the miner's operations in a separate background thread.
1738 This is useful for non-blocking operations.
1739 """
1740 if not self.is_running:
1741 bt.logging.debug("Starting miner in background thread.")
1742 self.should_exit = False
1743 self.thread = threading.Thread(target=self.run, daemon=True)
1744 self.thread.start()
1745 self.is_running = True
1746 bt.logging.debug("Started")
1747
1748 def stop_run_thread(self):
1749 """
1750 Stops the miner's operations that are running in the background thread.
1751 """
1752 if self.is_running:
1753 bt.logging.debug("Stopping miner in background thread.")
1754 self.should_exit = True
1755 if self.thread is not None:
1756 self.thread.join(5)
1757 self.is_running = False
1758 bt.logging.debug("Stopped")
1759
1760 def __enter__(self):
1761 """
1762 Starts the miner's operations in a background thread upon entering the context.
1763 This method facilitates the use of the miner in a 'with' statement.
1764 """
1765 self.run_in_background_thread()
1766 return self
1767
1768 def __exit__(self, exc_type, exc_value, traceback):
1769 """
1770 Stops the miner's background operations upon exiting the context.
1771 This method facilitates the use of the miner in a 'with' statement.
1772
1773 Args:
1774 exc_type: The type of the exception that caused the context to be exited.
1775 None if the context was exited without an exception.
1776 exc_value: The instance of the exception that caused the context to be exited.
1777 None if the context was exited without an exception.
1778 traceback: A traceback object encoding the stack trace.
1779 None if the context was exited without an exception.
1780 """
1781 self.stop_run_thread()
1782
1783 def resync_metagraph(self):
1784 """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph."""
1785 bt.logging.info("resync_metagraph()")
1786
1787 # Sync the metagraph.
1788 self.metagraph.sync(subtensor=self.subtensor)
1789
1790
1791---
1792target_repo/sybil/base/neuron.py
1793---
1794# The MIT License (MIT)
1795# Copyright Β© 2023 Yuma Rao
1796
1797# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1798# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1799# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1800# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1801
1802# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1803# the Software.
1804
1805# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1806# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1807# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1808# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1809# DEALINGS IN THE SOFTWARE.
1810
1811import copy
1812import typing
1813
1814import bittensor as bt
1815
1816from abc import ABC, abstractmethod
1817
1818# Sync calls set weights and also resyncs the metagraph.
1819from sybil.utils.config import check_config, add_args, config
1820from sybil.utils.misc import ttl_get_block
1821from sybil import __spec_version__ as spec_version
1822from sybil.mock import MockSubtensor, MockMetagraph
1823
1824
1825class BaseNeuron(ABC):
1826 """
1827 Base class for Bittensor miners. This class is abstract and should be inherited by a subclass. It contains the core logic for all neurons; validators and miners.
1828
1829 In addition to creating a wallet, subtensor, and metagraph, this class also handles the synchronization of the network state via a basic checkpointing mechanism based on epoch length.
1830 """
1831
1832 neuron_type: str = "BaseNeuron"
1833
1834 @classmethod
1835 def check_config(cls, config: "bt.Config"):
1836 check_config(cls, config)
1837
1838 @classmethod
1839 def add_args(cls, parser):
1840 add_args(cls, parser)
1841
1842 @classmethod
1843 def config(cls):
1844 return config(cls)
1845
1846 subtensor: "bt.subtensor"
1847 wallet: "bt.wallet"
1848 metagraph: "bt.metagraph"
1849 spec_version: int = spec_version
1850
1851 @property
1852 def block(self):
1853 return ttl_get_block(self)
1854
1855 def __init__(self, config=None):
1856 base_config = copy.deepcopy(config or BaseNeuron.config())
1857 self.config = self.config()
1858 self.config.merge(base_config)
1859 self.check_config(self.config)
1860
1861 # Set up logging with the provided configuration.
1862 bt.logging.set_config(config=self.config.logging)
1863
1864 # If a gpu is required, set the device to cuda:N (e.g. cuda:0)
1865 self.device = self.config.neuron.device
1866
1867 # Log the configuration for reference.
1868 bt.logging.info(self.config)
1869
1870 # Build Bittensor objects
1871 # These are core Bittensor classes to interact with the network.
1872 bt.logging.info("Setting up bittensor objects.")
1873
1874 # The wallet holds the cryptographic key pairs for the miner.
1875 if self.config.mock:
1876 self.wallet = bt.MockWallet(config=self.config)
1877 self.subtensor = MockSubtensor(
1878 self.config.netuid, wallet=self.wallet
1879 )
1880 self.metagraph = MockMetagraph(
1881 self.config.netuid, subtensor=self.subtensor
1882 )
1883 else:
1884 self.wallet = bt.wallet(config=self.config)
1885 self.subtensor = bt.subtensor(config=self.config)
1886 self.metagraph = self.subtensor.metagraph(self.config.netuid)
1887
1888 bt.logging.info(f"Wallet: {self.wallet}")
1889 bt.logging.info(f"Subtensor: {self.subtensor}")
1890 bt.logging.info(f"Metagraph: {self.metagraph}")
1891
1892 self.validator_server_url = self.config.validator_server_url
1893
1894 # Check if the miner is registered on the Bittensor network before proceeding further.
1895 self.check_registered()
1896
1897 # Each miner gets a unique identity (UID) in the network for differentiation.
1898 self.uid = self.metagraph.hotkeys.index(
1899 self.wallet.hotkey.ss58_address
1900 )
1901 bt.logging.info(
1902 f"Running neuron on subnet: {self.config.netuid} with uid {self.uid} using network: {self.subtensor.chain_endpoint}"
1903 )
1904 self.step = 0
1905
1906 @abstractmethod
1907 async def forward(self, synapse: bt.Synapse) -> bt.Synapse:
1908 ...
1909
1910 @abstractmethod
1911 def run(self):
1912 ...
1913
1914 def sync(self):
1915 """
1916 Wrapper for synchronizing the state of the network for the given miner or validator.
1917 """
1918 # Ensure miner or validator hotkey is still registered on the network.
1919 self.check_registered()
1920
1921 if self.should_sync_metagraph():
1922 self.resync_metagraph()
1923
1924 if self.should_set_weights():
1925 self.set_weights()
1926
1927 # Always save state.
1928 self.save_state()
1929
1930 def check_registered(self):
1931 # --- Check for registration.
1932 if not self.subtensor.is_hotkey_registered(
1933 netuid=self.config.netuid,
1934 hotkey_ss58=self.wallet.hotkey.ss58_address,
1935 ):
1936 bt.logging.error(
1937 f"Wallet: {self.wallet} is not registered on netuid {self.config.netuid}."
1938 f" Please register the hotkey using `btcli subnets register` before trying again"
1939 )
1940 exit()
1941
1942 def should_sync_metagraph(self):
1943 """
1944 Check if enough epoch blocks have elapsed since the last checkpoint to sync.
1945 """
1946 return (
1947 self.block - self.metagraph.last_update[self.uid]
1948 ) > self.config.neuron.epoch_length
1949
1950 def should_set_weights(self) -> bool:
1951 # Don't set weights on initialization.
1952 if self.step == 0:
1953 return False
1954
1955 # Check if enough epoch blocks have elapsed since the last epoch.
1956 if self.config.neuron.disable_set_weights:
1957 return False
1958
1959 # Define appropriate logic for when set weights.
1960 return (
1961 (self.block - self.metagraph.last_update[self.uid])
1962 > self.config.neuron.epoch_length
1963 and self.neuron_type != "MinerNeuron"
1964 ) # don't set weights if you're a miner
1965
1966 def save_state(self):
1967 # bt.logging.warning(
1968 # "save_state() not implemented for this neuron. You can implement this function to save model checkpoints or other useful data."
1969 # )
1970 pass
1971
1972 def load_state(self):
1973 bt.logging.warning(
1974 "load_state() not implemented for this neuron. You can implement this function to load model checkpoints or other useful data."
1975 )
1976
1977
1978---
1979target_repo/sybil/base/validator.py
1980---
1981# The MIT License (MIT)
1982# Copyright Β© 2023 Yuma Rao
1983# TODO(developer): Set your name
1984# Copyright Β© 2023 <your name>
1985
1986# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
1987# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
1988# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
1989# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
1990
1991# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
1992# the Software.
1993
1994# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
1995# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1996# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
1997# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1998# DEALINGS IN THE SOFTWARE.
1999
2000
2001import copy
2002import numpy as np
2003import asyncio
2004import argparse
2005import threading
2006import bittensor as bt
2007
2008from typing import List, Union
2009from traceback import print_exception
2010
2011from sybil.base.neuron import BaseNeuron
2012from sybil.base.utils.weight_utils import (
2013 process_weights_for_netuid,
2014 convert_weights_and_uids_for_emit,
2015) # TODO: Replace when bittensor switches to numpy
2016from sybil.mock import MockDendrite
2017from sybil.utils.config import add_validator_args
2018
2019
2020class BaseValidatorNeuron(BaseNeuron):
2021 """
2022 Base class for Bittensor validators. Your validator should inherit from this class.
2023 """
2024
2025 neuron_type: str = "ValidatorNeuron"
2026
2027 @classmethod
2028 def add_args(cls, parser: argparse.ArgumentParser):
2029 super().add_args(parser)
2030 add_validator_args(cls, parser)
2031
2032 def __init__(self, config=None):
2033 super().__init__(config=config)
2034
2035 # Save a copy of the hotkeys to local memory.
2036 self.hotkeys = copy.deepcopy(self.metagraph.hotkeys)
2037
2038 # Dendrite lets us send messages to other nodes (axons) in the network.
2039 if self.config.mock:
2040 self.dendrite = MockDendrite(wallet=self.wallet)
2041 else:
2042 self.dendrite = bt.dendrite(wallet=self.wallet)
2043 bt.logging.info(f"Dendrite: {self.dendrite}")
2044
2045 # Set up initial scoring weights for validation
2046 bt.logging.info("Building validation weights.")
2047 self.scores = np.zeros(self.metagraph.n, dtype=np.float32)
2048
2049 # Init sync with the network. Updates the metagraph.
2050 self.sync()
2051
2052 # # Serve axon to enable external connections.
2053 # if not self.config.neuron.axon_off:
2054 # self.serve_axon()
2055 # else:
2056 # bt.logging.warning("axon off, not serving ip to chain.")
2057
2058 # Create asyncio event loop to manage async tasks.
2059 self.loop = asyncio.get_event_loop()
2060
2061 # Instantiate runners
2062 self.should_exit: bool = False
2063 self.is_running: bool = False
2064 self.thread: Union[threading.Thread, None] = None
2065 self.lock = asyncio.Lock()
2066
2067 def serve_axon(self):
2068 """Serve axon to enable external connections."""
2069
2070 bt.logging.info("serving ip to chain...")
2071 try:
2072 self.axon = bt.axon(wallet=self.wallet, config=self.config)
2073
2074 try:
2075 self.subtensor.serve_axon(
2076 netuid=self.config.netuid,
2077 axon=self.axon,
2078 )
2079 bt.logging.info(
2080 f"Running validator {self.axon} on network: {self.config.subtensor.chain_endpoint} with netuid: {self.config.netuid}"
2081 )
2082 except Exception as e:
2083 bt.logging.error(f"Failed to serve Axon with exception: {e}")
2084 pass
2085
2086 except Exception as e:
2087 bt.logging.error(
2088 f"Failed to create Axon initialize with exception: {e}"
2089 )
2090 pass
2091
2092 async def concurrent_forward(self):
2093 coroutines = [
2094 self.forward()
2095 for _ in range(self.config.neuron.num_concurrent_forwards)
2096 ]
2097 await asyncio.gather(*coroutines)
2098
2099 def run(self):
2100 """
2101 Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors.
2102
2103 This function performs the following primary tasks:
2104 1. Check for registration on the Bittensor network.
2105 2. Continuously forwards queries to the miners on the network, rewarding their responses and updating the scores accordingly.
2106 3. Periodically resynchronizes with the chain; updating the metagraph with the latest network state and setting weights.
2107
2108 The essence of the validator's operations is in the forward function, which is called every step. The forward function is responsible for querying the network and scoring the responses.
2109
2110 Note:
2111 - The function leverages the global configurations set during the initialization of the miner.
2112 - The miner's axon serves as its interface to the Bittensor network, handling incoming and outgoing requests.
2113
2114 Raises:
2115 KeyboardInterrupt: If the miner is stopped by a manual interruption.
2116 Exception: For unforeseen errors during the miner's operation, which are logged for diagnosis.
2117 """
2118
2119 # Check that validator is registered on the network.
2120 self.sync()
2121
2122 bt.logging.info(f"Validator starting at block: {self.block}")
2123
2124 # This loop maintains the validator's operations until intentionally stopped.
2125 try:
2126 while True:
2127 bt.logging.info(f"step({self.step}) block({self.block})")
2128
2129 # Run multiple forwards concurrently.
2130 self.loop.run_until_complete(self.concurrent_forward())
2131
2132 # Check if we should exit.
2133 if self.should_exit:
2134 break
2135
2136 # Sync metagraph and potentially set weights.
2137 self.sync()
2138
2139 self.step += 1
2140
2141 # If someone intentionally stops the validator, it'll safely terminate operations.
2142 except KeyboardInterrupt:
2143 self.axon.stop()
2144 bt.logging.success("Validator killed by keyboard interrupt.")
2145 exit()
2146
2147 # In case of unforeseen errors, the validator will log the error and continue operations.
2148 except Exception as err:
2149 bt.logging.error(f"Error during validation: {str(err)}")
2150 bt.logging.debug(
2151 str(print_exception(type(err), err, err.__traceback__))
2152 )
2153
2154 def run_in_background_thread(self):
2155 """
2156 Starts the validator's operations in a background thread upon entering the context.
2157 This method facilitates the use of the validator in a 'with' statement.
2158 """
2159 if not self.is_running:
2160 bt.logging.debug("Starting validator in background thread.")
2161 self.should_exit = False
2162 self.thread = threading.Thread(target=self.run, daemon=True)
2163 self.thread.start()
2164 self.is_running = True
2165 bt.logging.debug("Started")
2166
2167 def stop_run_thread(self):
2168 """
2169 Stops the validator's operations that are running in the background thread.
2170 """
2171 if self.is_running:
2172 bt.logging.debug("Stopping validator in background thread.")
2173 self.should_exit = True
2174 self.thread.join(5)
2175 self.is_running = False
2176 bt.logging.debug("Stopped")
2177
2178 def __enter__(self):
2179 self.run_in_background_thread()
2180 return self
2181
2182 def __exit__(self, exc_type, exc_value, traceback):
2183 """
2184 Stops the validator's background operations upon exiting the context.
2185 This method facilitates the use of the validator in a 'with' statement.
2186
2187 Args:
2188 exc_type: The type of the exception that caused the context to be exited.
2189 None if the context was exited without an exception.
2190 exc_value: The instance of the exception that caused the context to be exited.
2191 None if the context was exited without an exception.
2192 traceback: A traceback object encoding the stack trace.
2193 None if the context was exited without an exception.
2194 """
2195 if self.is_running:
2196 bt.logging.debug("Stopping validator in background thread.")
2197 self.should_exit = True
2198 self.thread.join(5)
2199 self.is_running = False
2200 bt.logging.debug("Stopped")
2201
2202 def set_weights(self):
2203 """
2204 Sets the validator weights to the metagraph hotkeys based on the scores it has received from the miners. The weights determine the trust and incentive level the validator assigns to miner nodes on the network.
2205 """
2206
2207 # Check if self.scores contains any NaN values and log a warning if it does.
2208 if np.isnan(self.scores).any():
2209 bt.logging.warning(
2210 f"Scores contain NaN values. This may be due to a lack of responses from miners, or a bug in your reward functions."
2211 )
2212
2213 # Calculate the average reward for each uid across non-zero values.
2214 # Replace any NaN values with 0.
2215 # Compute the norm of the scores
2216 norm = np.linalg.norm(self.scores, ord=1, axis=0, keepdims=True)
2217
2218 # Check if the norm is zero or contains NaN values
2219 if np.any(norm == 0) or np.isnan(norm).any():
2220 norm = np.ones_like(norm) # Avoid division by zero or NaN
2221
2222 # Compute raw_weights safely
2223 raw_weights = self.scores / norm
2224
2225 bt.logging.debug("raw_weights", raw_weights)
2226 bt.logging.debug("raw_weight_uids", str(self.metagraph.uids.tolist()))
2227 # Process the raw weights to final_weights via subtensor limitations.
2228 (
2229 processed_weight_uids,
2230 processed_weights,
2231 ) = process_weights_for_netuid(
2232 uids=self.metagraph.uids,
2233 weights=raw_weights,
2234 netuid=self.config.netuid,
2235 subtensor=self.subtensor,
2236 metagraph=self.metagraph,
2237 )
2238 bt.logging.debug("processed_weights", processed_weights)
2239 bt.logging.debug("processed_weight_uids", processed_weight_uids)
2240
2241 # Convert to uint16 weights and uids.
2242 (
2243 uint_uids,
2244 uint_weights,
2245 ) = convert_weights_and_uids_for_emit(
2246 uids=processed_weight_uids, weights=processed_weights
2247 )
2248 bt.logging.debug("uint_weights", uint_weights)
2249 bt.logging.debug("uint_uids", uint_uids)
2250
2251 # Set the weights on chain via our subtensor connection.
2252 # Retry 20 times if it fails
2253 for _ in range(20):
2254 result, msg = self.subtensor.set_weights(
2255 wallet=self.wallet,
2256 netuid=self.config.netuid,
2257 uids=uint_uids,
2258 weights=uint_weights,
2259 wait_for_finalization=False,
2260 wait_for_inclusion=False,
2261 version_key=self.spec_version,
2262 )
2263 if result is True:
2264 bt.logging.info("set_weights on chain successfully!")
2265 break
2266 else:
2267 bt.logging.error("set_weights failed. Retrying... ", msg)
2268
2269 def resync_metagraph(self):
2270 """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph."""
2271 bt.logging.info("resync_metagraph()")
2272
2273 # Copies state of metagraph before syncing.
2274 previous_metagraph = copy.deepcopy(self.metagraph)
2275
2276 # Sync the metagraph.
2277 self.metagraph.sync(subtensor=self.subtensor)
2278
2279 # Check if the metagraph axon info has changed.
2280 if previous_metagraph.axons == self.metagraph.axons:
2281 return
2282
2283 bt.logging.info(
2284 "Metagraph updated, re-syncing hotkeys, dendrite pool and moving averages"
2285 )
2286 # Zero out all hotkeys that have been replaced.
2287 for uid, hotkey in enumerate(self.hotkeys):
2288 if hotkey != self.metagraph.hotkeys[uid]:
2289 self.scores[uid] = 0 # hotkey has been replaced
2290
2291 # Check to see if the metagraph has changed size.
2292 # If so, we need to add new hotkeys and moving averages.
2293 if len(self.hotkeys) < len(self.metagraph.hotkeys):
2294 # Update the size of the moving average scores.
2295 new_moving_average = np.zeros((self.metagraph.n))
2296 min_len = min(len(self.hotkeys), len(self.scores))
2297 new_moving_average[:min_len] = self.scores[:min_len]
2298 self.scores = new_moving_average
2299
2300 # Update the hotkeys.
2301 self.hotkeys = copy.deepcopy(self.metagraph.hotkeys)
2302
2303 def update_scores(self, rewards: np.ndarray, uids: List[int]):
2304 """Performs exponential moving average on the scores based on the rewards received from the miners."""
2305
2306 # Check if rewards contains NaN values.
2307 if np.isnan(rewards).any():
2308 bt.logging.warning(f"NaN values detected in rewards: {rewards}")
2309 # Replace any NaN values in rewards with 0.
2310 rewards = np.nan_to_num(rewards, nan=0)
2311
2312 # Ensure rewards is a numpy array.
2313 rewards = np.asarray(rewards)
2314
2315 # Check if `uids` is already a numpy array and copy it to avoid the warning.
2316 if isinstance(uids, np.ndarray):
2317 uids_array = uids.copy()
2318 else:
2319 uids_array = np.array(uids)
2320
2321 # Handle edge case: If either rewards or uids_array is empty.
2322 if rewards.size == 0 or uids_array.size == 0:
2323 bt.logging.info(f"rewards: {rewards}, uids_array: {uids_array}")
2324 bt.logging.warning(
2325 "Either rewards or uids_array is empty. No updates will be performed."
2326 )
2327 return
2328
2329 # Check if sizes of rewards and uids_array match.
2330 if rewards.size != uids_array.size:
2331 raise ValueError(
2332 f"Shape mismatch: rewards array of shape {rewards.shape} "
2333 f"cannot be broadcast to uids array of shape {uids_array.shape}"
2334 )
2335
2336 # Compute forward pass rewards, assumes uids are mutually exclusive.
2337 # shape: [ metagraph.n ]
2338 scattered_rewards: np.ndarray = np.zeros_like(self.scores)
2339 scattered_rewards[uids_array] = rewards
2340 bt.logging.debug(f"Scattered rewards: {rewards}")
2341
2342 # Update scores with rewards produced by this step.
2343 # shape: [ metagraph.n ]
2344 alpha: float = self.config.neuron.moving_average_alpha
2345 self.scores: np.ndarray = (
2346 alpha * scattered_rewards + (1 - alpha) * self.scores
2347 )
2348 bt.logging.debug(f"Updated moving avg scores: {self.scores}")
2349
2350 def save_state(self):
2351 """Saves the state of the validator to a file."""
2352 bt.logging.info("Saving validator state.")
2353
2354 # Save the state of the validator to file.
2355 np.savez(
2356 self.config.neuron.full_path + "/state.npz",
2357 step=self.step,
2358 scores=self.scores,
2359 hotkeys=self.hotkeys,
2360 )
2361
2362 def load_state(self):
2363 """Loads the state of the validator from a file."""
2364 bt.logging.info("Loading validator state.")
2365
2366 # Load the state of the validator from file.
2367 state = np.load(self.config.neuron.full_path + "/state.npz")
2368 self.step = state["step"]
2369 self.scores = state["scores"]
2370 self.hotkeys = state["hotkeys"]
2371
2372
2373---
2374target_repo/sybil/base/utils/__init__.py
2375---
2376
2377
2378---
2379target_repo/sybil/base/utils/weight_utils.py
2380---
2381import numpy as np
2382from typing import Tuple, List, Union, Any
2383import bittensor
2384from numpy import ndarray, dtype, floating, complexfloating
2385
2386U32_MAX = 4294967295
2387U16_MAX = 65535
2388
2389
2390def normalize_max_weight(x: np.ndarray, limit: float = 0.1) -> np.ndarray:
2391 r"""Normalizes the numpy array x so that sum(x) = 1 and the max value is not greater than the limit.
2392 Args:
2393 x (:obj:`np.ndarray`):
2394 Array to be max_value normalized.
2395 limit: float:
2396 Max value after normalization.
2397 Returns:
2398 y (:obj:`np.ndarray`):
2399 Normalized x array.
2400 """
2401 epsilon = 1e-7 # For numerical stability after normalization
2402
2403 weights = x.copy()
2404 values = np.sort(weights)
2405
2406 if x.sum() == 0 or len(x) * limit <= 1:
2407 return np.ones_like(x) / x.size
2408 else:
2409 estimation = values / values.sum()
2410
2411 if estimation.max() <= limit:
2412 return weights / weights.sum()
2413
2414 # Find the cumulative sum and sorted array
2415 cumsum = np.cumsum(estimation, 0)
2416
2417 # Determine the index of cutoff
2418 estimation_sum = np.array(
2419 [(len(values) - i - 1) * estimation[i] for i in range(len(values))]
2420 )
2421 n_values = (
2422 estimation / (estimation_sum + cumsum + epsilon) < limit
2423 ).sum()
2424
2425 # Determine the cutoff based on the index
2426 cutoff_scale = (limit * cumsum[n_values - 1] - epsilon) / (
2427 1 - (limit * (len(estimation) - n_values))
2428 )
2429 cutoff = cutoff_scale * values.sum()
2430
2431 # Applying the cutoff
2432 weights[weights > cutoff] = cutoff
2433
2434 y = weights / weights.sum()
2435
2436 return y
2437
2438
2439def convert_weights_and_uids_for_emit(
2440 uids: np.ndarray, weights: np.ndarray
2441) -> Tuple[List[int], List[int]]:
2442 r"""Converts weights into integer u32 representation that sum to MAX_INT_WEIGHT.
2443 Args:
2444 uids (:obj:`np.ndarray,`):
2445 Array of uids as destinations for passed weights.
2446 weights (:obj:`np.ndarray,`):
2447 Array of weights.
2448 Returns:
2449 weight_uids (List[int]):
2450 Uids as a list.
2451 weight_vals (List[int]):
2452 Weights as a list.
2453 """
2454 # Checks.
2455 uids = np.asarray(uids)
2456 weights = np.asarray(weights)
2457
2458 # Get non-zero weights and corresponding uids
2459 non_zero_weights = weights[weights > 0]
2460 non_zero_weight_uids = uids[weights > 0]
2461
2462 # Debugging information
2463 bittensor.logging.debug(f"weights: {weights}")
2464 bittensor.logging.debug(f"non_zero_weights: {non_zero_weights}")
2465 bittensor.logging.debug(f"uids: {uids}")
2466 bittensor.logging.debug(f"non_zero_weight_uids: {non_zero_weight_uids}")
2467
2468 if np.min(weights) < 0:
2469 raise ValueError(
2470 "Passed weight is negative cannot exist on chain {}".format(
2471 weights
2472 )
2473 )
2474 if np.min(uids) < 0:
2475 raise ValueError(
2476 "Passed uid is negative cannot exist on chain {}".format(uids)
2477 )
2478 if len(uids) != len(weights):
2479 raise ValueError(
2480 "Passed weights and uids must have the same length, got {} and {}".format(
2481 len(uids), len(weights)
2482 )
2483 )
2484 if np.sum(weights) == 0:
2485 bittensor.logging.debug("nothing to set on chain")
2486 return [], [] # Nothing to set on chain.
2487 else:
2488 max_weight = float(np.max(weights))
2489 weights = [
2490 float(value) / max_weight for value in weights
2491 ] # max-upscale values (max_weight = 1).
2492 bittensor.logging.debug(
2493 f"setting on chain max: {max_weight} and weights: {weights}"
2494 )
2495
2496 weight_vals = []
2497 weight_uids = []
2498 for i, (weight_i, uid_i) in enumerate(list(zip(weights, uids))):
2499 uint16_val = round(
2500 float(weight_i) * int(U16_MAX)
2501 ) # convert to int representation.
2502
2503 # Filter zeros
2504 if uint16_val != 0: # Filter zeros
2505 weight_vals.append(uint16_val)
2506 weight_uids.append(uid_i)
2507 bittensor.logging.debug(f"final params: {weight_uids} : {weight_vals}")
2508 return weight_uids, weight_vals
2509
2510
2511def process_weights_for_netuid(
2512 uids,
2513 weights: np.ndarray,
2514 netuid: int,
2515 subtensor: "bittensor.subtensor",
2516 metagraph: "bittensor.metagraph" = None,
2517 exclude_quantile: int = 0,
2518) -> Union[
2519 tuple[
2520 ndarray[Any, dtype[Any]],
2521 Union[
2522 Union[
2523 ndarray[Any, dtype[floating[Any]]],
2524 ndarray[Any, dtype[complexfloating[Any, Any]]],
2525 ],
2526 Any,
2527 ],
2528 ],
2529 tuple[ndarray[Any, dtype[Any]], ndarray],
2530 tuple[Any, ndarray],
2531]:
2532 bittensor.logging.debug("process_weights_for_netuid()")
2533 bittensor.logging.debug("weights", weights)
2534 bittensor.logging.debug("netuid", netuid)
2535 bittensor.logging.debug("subtensor", subtensor)
2536 bittensor.logging.debug("metagraph", metagraph)
2537
2538 # Get latest metagraph from chain if metagraph is None.
2539 if metagraph is None:
2540 metagraph = subtensor.metagraph(netuid)
2541
2542 # Cast weights to floats.
2543 if not isinstance(weights, np.ndarray) or weights.dtype != np.float32:
2544 weights = weights.astype(np.float32)
2545
2546 # Network configuration parameters from an subtensor.
2547 # These parameters determine the range of acceptable weights for each neuron.
2548 quantile = exclude_quantile / U16_MAX
2549 min_allowed_weights = subtensor.min_allowed_weights(netuid=netuid)
2550 max_weight_limit = subtensor.max_weight_limit(netuid=netuid)
2551 bittensor.logging.debug("quantile", quantile)
2552 bittensor.logging.debug("min_allowed_weights", min_allowed_weights)
2553 bittensor.logging.debug("max_weight_limit", max_weight_limit)
2554
2555 # Find all non zero weights.
2556 non_zero_weight_idx = np.argwhere(weights > 0).squeeze()
2557 non_zero_weight_idx = np.atleast_1d(non_zero_weight_idx)
2558 non_zero_weight_uids = uids[non_zero_weight_idx]
2559 non_zero_weights = weights[non_zero_weight_idx]
2560 if non_zero_weights.size == 0 or metagraph.n < min_allowed_weights:
2561 bittensor.logging.warning("No non-zero weights returning all ones.")
2562 final_weights = np.ones(metagraph.n) / metagraph.n
2563 bittensor.logging.debug("final_weights", final_weights)
2564 return np.arange(len(final_weights)), final_weights
2565
2566 elif non_zero_weights.size < min_allowed_weights:
2567 bittensor.logging.warning(
2568 "No non-zero weights less then min allowed weight, returning all ones."
2569 )
2570 weights = (
2571 np.ones(metagraph.n) * 1e-5
2572 ) # creating minimum even non-zero weights
2573 weights[non_zero_weight_idx] += non_zero_weights
2574 bittensor.logging.debug("final_weights", weights)
2575 normalized_weights = normalize_max_weight(
2576 x=weights, limit=max_weight_limit
2577 )
2578 return np.arange(len(normalized_weights)), normalized_weights
2579
2580 bittensor.logging.debug("non_zero_weights", non_zero_weights)
2581
2582 # Compute the exclude quantile and find the weights in the lowest quantile
2583 max_exclude = max(0, len(non_zero_weights) - min_allowed_weights) / len(
2584 non_zero_weights
2585 )
2586 exclude_quantile = min([quantile, max_exclude])
2587 lowest_quantile = np.quantile(non_zero_weights, exclude_quantile)
2588 bittensor.logging.debug("max_exclude", max_exclude)
2589 bittensor.logging.debug("exclude_quantile", exclude_quantile)
2590 bittensor.logging.debug("lowest_quantile", lowest_quantile)
2591
2592 # Exclude all weights below the allowed quantile.
2593 non_zero_weight_uids = non_zero_weight_uids[
2594 lowest_quantile <= non_zero_weights
2595 ]
2596 non_zero_weights = non_zero_weights[lowest_quantile <= non_zero_weights]
2597 bittensor.logging.debug("non_zero_weight_uids", non_zero_weight_uids)
2598 bittensor.logging.debug("non_zero_weights", non_zero_weights)
2599
2600 # Normalize weights and return.
2601 normalized_weights = normalize_max_weight(
2602 x=non_zero_weights, limit=max_weight_limit
2603 )
2604 bittensor.logging.debug("final_weights", normalized_weights)
2605
2606 return non_zero_weight_uids, normalized_weights
2607
2608
2609---
2610target_repo/tests/__init__.py
2611---
2612
2613
2614---
2615target_repo/tests/helpers.py
2616---
2617# The MIT License (MIT)
2618# Copyright Β© 2023 Opentensor Foundation
2619
2620# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
2621# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
2622# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
2623# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
2624
2625# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
2626# the Software.
2627
2628# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
2629# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
2630# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
2631# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
2632# DEALINGS IN THE SOFTWARE.
2633
2634from typing import Union
2635from bittensor import (
2636 Balance,
2637 NeuronInfo,
2638 AxonInfo,
2639 PrometheusInfo,
2640 __ss58_format__,
2641)
2642from bittensor.mock.wallet_mock import MockWallet as _MockWallet
2643from bittensor.mock.wallet_mock import get_mock_coldkey as _get_mock_coldkey
2644from bittensor.mock.wallet_mock import get_mock_hotkey as _get_mock_hotkey
2645from bittensor.mock.wallet_mock import get_mock_keypair as _get_mock_keypair
2646from bittensor.mock.wallet_mock import get_mock_wallet as _get_mock_wallet
2647
2648from rich.console import Console
2649from rich.text import Text
2650
2651
2652def __mock_wallet_factory__(*args, **kwargs) -> _MockWallet:
2653 """Returns a mock wallet object."""
2654
2655 mock_wallet = _get_mock_wallet()
2656
2657 return mock_wallet
2658
2659
2660class CLOSE_IN_VALUE:
2661 value: Union[float, int, Balance]
2662 tolerance: Union[float, int, Balance]
2663
2664 def __init__(
2665 self,
2666 value: Union[float, int, Balance],
2667 tolerance: Union[float, int, Balance] = 0.0,
2668 ) -> None:
2669 self.value = value
2670 self.tolerance = tolerance
2671
2672 def __eq__(self, __o: Union[float, int, Balance]) -> bool:
2673 # True if __o \in [value - tolerance, value + tolerance]
2674 # or if value \in [__o - tolerance, __o + tolerance]
2675 return (
2676 (self.value - self.tolerance) <= __o
2677 and __o <= (self.value + self.tolerance)
2678 ) or (
2679 (__o - self.tolerance) <= self.value
2680 and self.value <= (__o + self.tolerance)
2681 )
2682
2683
2684def get_mock_neuron(**kwargs) -> NeuronInfo:
2685 """
2686 Returns a mock neuron with the given kwargs overriding the default values.
2687 """
2688
2689 mock_neuron_d = dict(
2690 {
2691 "netuid": -1, # mock netuid
2692 "axon_info": AxonInfo(
2693 block=0,
2694 version=1,
2695 ip=0,
2696 port=0,
2697 ip_type=0,
2698 protocol=0,
2699 placeholder1=0,
2700 placeholder2=0,
2701 ),
2702 "prometheus_info": PrometheusInfo(
2703 block=0, version=1, ip=0, port=0, ip_type=0
2704 ),
2705 "validator_permit": True,
2706 "uid": 1,
2707 "hotkey": "some_hotkey",
2708 "coldkey": "some_coldkey",
2709 "active": 0,
2710 "last_update": 0,
2711 "stake": {"some_coldkey": 1e12},
2712 "total_stake": 1e12,
2713 "rank": 0.0,
2714 "trust": 0.0,
2715 "consensus": 0.0,
2716 "validator_trust": 0.0,
2717 "incentive": 0.0,
2718 "dividends": 0.0,
2719 "emission": 0.0,
2720 "bonds": [],
2721 "weights": [],
2722 "stake_dict": {},
2723 "pruning_score": 0.0,
2724 "is_null": False,
2725 }
2726 )
2727
2728 mock_neuron_d.update(kwargs) # update with kwargs
2729
2730 if kwargs.get("stake") is None and kwargs.get("coldkey") is not None:
2731 mock_neuron_d["stake"] = {kwargs.get("coldkey"): 1e12}
2732
2733 if kwargs.get("total_stake") is None:
2734 mock_neuron_d["total_stake"] = sum(mock_neuron_d["stake"].values())
2735
2736 mock_neuron = NeuronInfo._neuron_dict_to_namespace(mock_neuron_d)
2737
2738 return mock_neuron
2739
2740
2741def get_mock_neuron_by_uid(uid: int, **kwargs) -> NeuronInfo:
2742 return get_mock_neuron(
2743 uid=uid,
2744 hotkey=_get_mock_hotkey(uid),
2745 coldkey=_get_mock_coldkey(uid),
2746 **kwargs
2747 )
2748
2749
2750class MockStatus:
2751 def __enter__(self):
2752 return self
2753
2754 def __exit__(self, exc_type, exc_value, traceback):
2755 pass
2756
2757 def start(self):
2758 pass
2759
2760 def stop(self):
2761 pass
2762
2763 def update(self, *args, **kwargs):
2764 MockConsole().print(*args, **kwargs)
2765
2766
2767class MockConsole:
2768 """
2769 Mocks the console object for status and print.
2770 Captures the last print output as a string.
2771 """
2772
2773 captured_print = None
2774
2775 def status(self, *args, **kwargs):
2776 return MockStatus()
2777
2778 def print(self, *args, **kwargs):
2779 console = Console(
2780 width=1000, no_color=True, markup=False
2781 ) # set width to 1000 to avoid truncation
2782 console.begin_capture()
2783 console.print(*args, **kwargs)
2784 self.captured_print = console.end_capture()
2785
2786 def clear(self, *args, **kwargs):
2787 pass
2788
2789 @staticmethod
2790 def remove_rich_syntax(text: str) -> str:
2791 """
2792 Removes rich syntax from the given text.
2793 Removes markup and ansi syntax.
2794 """
2795 output_no_syntax = Text.from_ansi(Text.from_markup(text).plain).plain
2796
2797 return output_no_syntax
2798
2799
2800---
2801target_repo/tests/test_mock.py
2802---
2803import pytest
2804import asyncio
2805import bittensor as bt
2806from prompting.mock import MockDendrite, MockMetagraph, MockSubtensor
2807from prompting.protocol import PromptingSynapse
2808
2809
2810@pytest.mark.parametrize("netuid", [1, 2, 3])
2811@pytest.mark.parametrize("n", [2, 4, 8, 16, 32, 64])
2812@pytest.mark.parametrize("wallet", [bt.MockWallet(), None])
2813def test_mock_subtensor(netuid, n, wallet):
2814 subtensor = MockSubtensor(netuid=netuid, n=n, wallet=wallet)
2815 neurons = subtensor.neurons(netuid=netuid)
2816 # Check netuid
2817 assert subtensor.subnet_exists(netuid)
2818 # Check network
2819 assert subtensor.network == "mock"
2820 assert subtensor.chain_endpoint == "mock_endpoint"
2821 # Check number of neurons
2822 assert len(neurons) == (n + 1 if wallet is not None else n)
2823 # Check wallet
2824 if wallet is not None:
2825 assert subtensor.is_hotkey_registered(
2826 netuid=netuid, hotkey_ss58=wallet.hotkey.ss58_address
2827 )
2828
2829 for neuron in neurons:
2830 assert type(neuron) == bt.NeuronInfo
2831 assert subtensor.is_hotkey_registered(
2832 netuid=netuid, hotkey_ss58=neuron.hotkey
2833 )
2834
2835
2836@pytest.mark.parametrize("n", [16, 32, 64])
2837def test_mock_metagraph(n):
2838 mock_subtensor = MockSubtensor(netuid=1, n=n)
2839 mock_metagraph = MockMetagraph(subtensor=mock_subtensor)
2840 # Check axons
2841 axons = mock_metagraph.axons
2842 assert len(axons) == n
2843 # Check ip and port
2844 for axon in axons:
2845 assert type(axon) == bt.AxonInfo
2846 assert axon.ip == mock_metagraph.default_ip
2847 assert axon.port == mock_metagraph.default_port
2848
2849
2850def test_mock_reward_pipeline():
2851 pass
2852
2853
2854def test_mock_neuron():
2855 pass
2856
2857
2858@pytest.mark.parametrize("timeout", [0.1, 0.2])
2859@pytest.mark.parametrize("min_time", [0, 0.05, 0.1])
2860@pytest.mark.parametrize("max_time", [0.1, 0.15, 0.2])
2861@pytest.mark.parametrize("n", [4, 16, 64])
2862def test_mock_dendrite_timings(timeout, min_time, max_time, n):
2863 mock_wallet = None
2864 mock_dendrite = MockDendrite(mock_wallet)
2865 mock_dendrite.min_time = min_time
2866 mock_dendrite.max_time = max_time
2867 mock_subtensor = MockSubtensor(netuid=1, n=n)
2868 mock_metagraph = MockMetagraph(subtensor=mock_subtensor)
2869 axons = mock_metagraph.axons
2870
2871 async def run():
2872 return await mock_dendrite(
2873 axons,
2874 synapse=PromptingSynapse(
2875 roles=["user"], messages=["What is the capital of France?"]
2876 ),
2877 timeout=timeout,
2878 )
2879
2880 responses = asyncio.run(run())
2881 for synapse in responses:
2882 assert (
2883 hasattr(synapse, "dendrite")
2884 and type(synapse.dendrite) == bt.TerminalInfo
2885 )
2886
2887 dendrite = synapse.dendrite
2888 # check synapse.dendrite has (process_time, status_code, status_message)
2889 for field in ("process_time", "status_code", "status_message"):
2890 assert (
2891 hasattr(dendrite, field)
2892 and getattr(dendrite, field) is not None
2893 )
2894
2895 # check that the dendrite take between min_time and max_time
2896 assert min_time <= dendrite.process_time
2897 assert dendrite.process_time <= max_time + 0.1
2898 # check that responses which take longer than timeout have 408 status code
2899 if dendrite.process_time >= timeout + 0.1:
2900 assert dendrite.status_code == 408
2901 assert dendrite.status_message == "Timeout"
2902 assert synapse.dummy_output == synapse.dummy_input
2903 # check that responses which take less than timeout have 200 status code
2904 elif dendrite.process_time < timeout:
2905 assert dendrite.status_code == 200
2906 assert dendrite.status_message == "OK"
2907 # check that outputs are not empty for successful responses
2908 assert synapse.dummy_output == synapse.dummy_input * 2
2909 # dont check for responses which take between timeout and max_time because they are not guaranteed to have a status code of 200 or 408
2910
2911
2912---
2913target_repo/tests/test_sybil_validator.py
2914---
2915# The MIT License (MIT)
2916# Copyright Β© 2023 Yuma Rao
2917# Copyright Β© 2023 Opentensor Foundation
2918
2919# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
2920# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
2921# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
2922# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
2923
2924# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
2925# the Software.
2926
2927# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
2928# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
2929# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
2930# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
2931# DEALINGS IN THE SOFTWARE.
2932
2933import sys
2934import unittest
2935
2936import bittensor as bt
2937import torch
2938
2939from neurons.validator import Validator
2940from sybil.base.validator import BaseValidatorNeuron
2941from sybil.protocol import Dummy
2942from sybil.utils.uids import get_random_uids
2943from sybil.validator.reward import get_rewards
2944
2945
2946class SybilValidatorNeuronTestCase(unittest.TestCase):
2947 """
2948 This class contains unit tests for the RewardEvent classes.
2949
2950 The tests cover different scenarios where completions may or may not be successful and the reward events are checked that they don't contain missing values.
2951 The `reward` attribute of all RewardEvents is expected to be a float, and the `is_filter_model` attribute is expected to be a boolean.
2952 """
2953
2954 def setUp(self):
2955 sys.argv = sys.argv[0] + ["--config", "tests/configs/validator.json"]
2956
2957 config = BaseValidatorNeuron.config()
2958 config.wallet._mock = True
2959 config.metagraph._mock = True
2960 config.subtensor._mock = True
2961 self.neuron = Validator(config)
2962 self.miner_uids = get_random_uids(self, k=10)
2963
2964 def test_run_single_step(self):
2965 # TODO: Test a single step
2966 pass
2967
2968 def test_sync_error_if_not_registered(self):
2969 # TODO: Test that the validator throws an error if it is not registered on metagraph
2970 pass
2971
2972 def test_forward(self):
2973 # TODO: Test that the forward function returns the correct value
2974 pass
2975
2976 def test_dummy_responses(self):
2977 # TODO: Test that the dummy responses are correctly constructed
2978
2979 responses = self.neuron.dendrite.query(
2980 # Send the query to miners in the network.
2981 axons=[
2982 self.neuron.metagraph.axons[uid] for uid in self.miner_uids
2983 ],
2984 # Construct a dummy query.
2985 synapse=Dummy(dummy_input=self.neuron.step),
2986 # All responses have the deserialize function called on them before returning.
2987 deserialize=True,
2988 )
2989
2990 for i, response in enumerate(responses):
2991 self.assertEqual(response, self.neuron.step * 2)
2992
2993 def test_reward(self):
2994 # TODO: Test that the reward function returns the correct value
2995 responses = self.dendrite.query(
2996 # Send the query to miners in the network.
2997 axons=[self.metagraph.axons[uid] for uid in self.miner_uids],
2998 # Construct a dummy query.
2999 synapse=Dummy(dummy_input=self.neuron.step),
3000 # All responses have the deserialize function called on them before returning.
3001 deserialize=True,
3002 )
3003
3004 rewards = get_rewards(self.neuron, responses)
3005 expected_rewards = torch.FloatTensor([1.0] * len(responses))
3006 self.assertEqual(rewards, expected_rewards)
3007
3008 def test_reward_with_nan(self):
3009 # TODO: Test that NaN rewards are correctly sanitized
3010 # TODO: Test that a bt.logging.warning is thrown when a NaN reward is sanitized
3011 responses = self.dendrite.query(
3012 # Send the query to miners in the network.
3013 axons=[self.metagraph.axons[uid] for uid in self.miner_uids],
3014 # Construct a dummy query.
3015 synapse=Dummy(dummy_input=self.neuron.step),
3016 # All responses have the deserialize function called on them before returning.
3017 deserialize=True,
3018 )
3019
3020 rewards = get_rewards(self.neuron, responses)
3021 expected_rewards = rewards.clone()
3022 # Add NaN values to rewards
3023 rewards[0] = float("nan")
3024
3025 with self.assertLogs(bt.logging, level="WARNING") as cm:
3026 self.neuron.update_scores(rewards, self.miner_uids)
3027
3028
3029---
3030target_repo/docs/stream_tutorial/client.py
3031---
3032import argparse
3033import asyncio
3034import bittensor as bt
3035
3036from protocol import StreamPrompting
3037
3038"""
3039This has assumed you have:
30401. Registered your miner on the chain (finney/test)
30412. Are serving your miner on an open port (e.g. 12345)
3042
3043Steps:
3044- Instantiate your synapse subclass with the relevant information. E.g. messages, roles, etc.
3045- Instantiate your wallet and a dendrite client
3046- Query the dendrite client with your synapse object
3047- Iterate over the async generator to extract the yielded tokens on the server side
3048"""
3049
3050
3051async def query_synapse(my_uid, wallet_name, hotkey, network, netuid):
3052 syn = StreamPrompting(
3053 roles=["user"],
3054 messages=[
3055 "hello this is a test of a streaming response. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
3056 ],
3057 )
3058
3059 # create a wallet instance with provided wallet name and hotkey
3060 wallet = bt.wallet(name=wallet_name, hotkey=hotkey)
3061
3062 # instantiate the metagraph with provided network and netuid
3063 metagraph = bt.metagraph(
3064 netuid=netuid, network=network, sync=True, lite=False
3065 )
3066
3067 # Grab the axon you're serving
3068 axon = metagraph.axons[my_uid]
3069
3070 # Create a Dendrite instance to handle client-side communication.
3071 dendrite = bt.dendrite(wallet=wallet)
3072
3073 async def main():
3074 responses = await dendrite(
3075 [axon], syn, deserialize=False, streaming=True
3076 )
3077
3078 for resp in responses:
3079 i = 0
3080 async for chunk in resp:
3081 i += 1
3082 if i % 5 == 0:
3083 print()
3084 if isinstance(chunk, list):
3085 print(chunk[0], end="", flush=True)
3086 else:
3087 # last object yielded is the synapse itself with completion filled
3088 synapse = chunk
3089 break
3090
3091 # Run the main function with asyncio
3092 await main()
3093
3094
3095if __name__ == "__main__":
3096 parser = argparse.ArgumentParser(
3097 description="Query a Bittensor synapse with given parameters."
3098 )
3099
3100 # Adding arguments
3101 parser.add_argument(
3102 "--my_uid",
3103 type=int,
3104 required=True,
3105 help="Your unique miner ID on the chain",
3106 )
3107 parser.add_argument(
3108 "--netuid", type=int, required=True, help="Network Unique ID"
3109 )
3110 parser.add_argument(
3111 "--wallet_name", type=str, default="default", help="Name of the wallet"
3112 )
3113 parser.add_argument(
3114 "--hotkey", type=str, default="default", help="Hotkey for the wallet"
3115 )
3116 parser.add_argument(
3117 "--network",
3118 type=str,
3119 default="test",
3120 help='Network type, e.g., "test" or "mainnet"',
3121 )
3122
3123 # Parse arguments
3124 args = parser.parse_args()
3125
3126 # Running the async function with provided arguments
3127 asyncio.run(
3128 query_synapse(
3129 args.my_uid,
3130 args.wallet_name,
3131 args.hotkey,
3132 args.network,
3133 args.netuid,
3134 )
3135 )
3136
3137
3138---
3139target_repo/docs/stream_tutorial/config.py
3140---
3141import bittensor as bt
3142import argparse
3143import os
3144
3145
3146def check_config(cls, config: "bt.Config"):
3147 bt.axon.check_config(config)
3148 bt.logging.check_config(config)
3149 full_path = os.path.expanduser(
3150 "{}/{}/{}/{}".format(
3151 config.logging.logging_dir,
3152 config.wallet.get("name", bt.defaults.wallet.name),
3153 config.wallet.get("hotkey", bt.defaults.wallet.hotkey),
3154 config.miner.name,
3155 )
3156 )
3157 config.miner.full_path = os.path.expanduser(full_path)
3158 if not os.path.exists(config.miner.full_path):
3159 os.makedirs(config.miner.full_path)
3160
3161
3162def get_config() -> "bt.Config":
3163 parser = argparse.ArgumentParser()
3164 parser.add_argument(
3165 "--axon.port", type=int, default=8098, help="Port to run the axon on."
3166 )
3167 # Subtensor network to connect to
3168 parser.add_argument(
3169 "--subtensor.network",
3170 default="finney",
3171 help="Bittensor network to connect to.",
3172 )
3173 # Chain endpoint to connect to
3174 parser.add_argument(
3175 "--subtensor.chain_endpoint",
3176 default="wss://entrypoint-finney.opentensor.ai:443",
3177 help="Chain endpoint to connect to.",
3178 )
3179 # Adds override arguments for network and netuid.
3180 parser.add_argument(
3181 "--netuid", type=int, default=1, help="The chain subnet uid."
3182 )
3183
3184 parser.add_argument(
3185 "--miner.root",
3186 type=str,
3187 help="Trials for this miner go in miner.root / (wallet_cold - wallet_hot) / miner.name ",
3188 default="~/.bittensor/miners/",
3189 )
3190 parser.add_argument(
3191 "--miner.name",
3192 type=str,
3193 help="Trials for this miner go in miner.root / (wallet_cold - wallet_hot) / miner.name ",
3194 default="Bittensor Miner",
3195 )
3196
3197 # Run config.
3198 parser.add_argument(
3199 "--miner.blocks_per_epoch",
3200 type=str,
3201 help="Blocks until the miner repulls the metagraph from the chain",
3202 default=100,
3203 )
3204
3205 # Switches.
3206 parser.add_argument(
3207 "--miner.no_serve",
3208 action="store_true",
3209 help="If True, the miner doesnt serve the axon.",
3210 default=False,
3211 )
3212 parser.add_argument(
3213 "--miner.no_start_axon",
3214 action="store_true",
3215 help="If True, the miner doesnt start the axon.",
3216 default=False,
3217 )
3218
3219 # Mocks.
3220 parser.add_argument(
3221 "--miner.mock_subtensor",
3222 action="store_true",
3223 help="If True, the miner will allow non-registered hotkeys to mine.",
3224 default=False,
3225 )
3226
3227 # Adds subtensor specific arguments i.e. --subtensor.chain_endpoint ... --subtensor.network ...
3228 bt.subtensor.add_args(parser)
3229
3230 # Adds logging specific arguments i.e. --logging.debug ..., --logging.trace .. or --logging.logging_dir ...
3231 bt.logging.add_args(parser)
3232
3233 # Adds wallet specific arguments i.e. --wallet.name ..., --wallet.hotkey ./. or --wallet.path ...
3234 bt.wallet.add_args(parser)
3235
3236 # Adds axon specific arguments i.e. --axon.port ...
3237 bt.axon.add_args(parser)
3238
3239 # Activating the parser to read any command-line inputs.
3240 # To print help message, run python3 template/miner.py --help
3241 config = bt.config(parser)
3242
3243 # Logging captures events for diagnosis or understanding miner's behavior.
3244 config.full_path = os.path.expanduser(
3245 "{}/{}/{}/netuid{}/{}".format(
3246 config.logging.logging_dir,
3247 config.wallet.name,
3248 config.wallet.hotkey,
3249 config.netuid,
3250 "miner",
3251 )
3252 )
3253 # Ensure the directory for logging exists, else create one.
3254 if not os.path.exists(config.full_path):
3255 os.makedirs(config.full_path, exist_ok=True)
3256 return config
3257
3258
3259---
3260target_repo/docs/stream_tutorial/miner.py
3261---
3262import copy
3263import time
3264import asyncio
3265import argparse
3266import threading
3267import traceback
3268from abc import ABC, abstractmethod
3269from functools import partial
3270from starlette.types import Send
3271
3272import bittensor as bt
3273from transformers import GPT2Tokenizer
3274from typing import List, Dict, Tuple, Union, Callable, Awaitable
3275
3276from protocol import StreamPrompting
3277from config import get_config, check_config
3278
3279
3280class StreamMiner(ABC):
3281 def __init__(self, config=None, axon=None, wallet=None, subtensor=None):
3282 # Setup base config from Miner.config() and merge with subclassed config.
3283 base_config = copy.deepcopy(config or get_config())
3284 self.config = self.config()
3285 self.config.merge(base_config)
3286
3287 check_config(StreamMiner, self.config)
3288 bt.logging.info(self.config) # TODO: duplicate print?
3289
3290 self.prompt_cache: Dict[str, Tuple[str, int]] = {}
3291
3292 # Activating Bittensor's logging with the set configurations.
3293 bt.logging.set_config(config=self.config.logging)
3294
3295 # Wallet holds cryptographic information, ensuring secure transactions and communication.
3296 self.wallet = wallet or bt.wallet(config=self.config)
3297 bt.logging.info(f"Wallet {self.wallet}")
3298
3299 # subtensor manages the blockchain connection, facilitating interaction with the Bittensor blockchain.
3300 self.subtensor = subtensor or bt.subtensor(config=self.config)
3301 bt.logging.info(f"Subtensor: {self.subtensor}")
3302 bt.logging.info(
3303 f"Running miner for subnet: {self.config.netuid} on network: {self.subtensor.chain_endpoint} with config:"
3304 )
3305
3306 # metagraph provides the network's current state, holding state about other participants in a subnet.
3307 self.metagraph = self.subtensor.metagraph(self.config.netuid)
3308 bt.logging.info(f"Metagraph: {self.metagraph}")
3309
3310 if self.wallet.hotkey.ss58_address not in self.metagraph.hotkeys:
3311 bt.logging.error(
3312 f"\nYour validator: {self.wallet} if not registered to chain connection: {self.subtensor} \nRun btcli register and try again. "
3313 )
3314 exit()
3315 else:
3316 # Each miner gets a unique identity (UID) in the network for differentiation.
3317 self.my_subnet_uid = self.metagraph.hotkeys.index(
3318 self.wallet.hotkey.ss58_address
3319 )
3320 bt.logging.info(f"Running miner on uid: {self.my_subnet_uid}")
3321
3322 # The axon handles request processing, allowing validators to send this process requests.
3323 self.axon = axon or bt.axon(
3324 wallet=self.wallet, port=self.config.axon.port
3325 )
3326 # Attach determiners which functions are called when servicing a request.
3327 bt.logging.info(f"Attaching forward function to axon.")
3328 print(f"Attaching forward function to axon. {self._prompt}")
3329 self.axon.attach(
3330 forward_fn=self._prompt,
3331 )
3332 bt.logging.info(f"Axon created: {self.axon}")
3333
3334 # Instantiate runners
3335 self.should_exit: bool = False
3336 self.is_running: bool = False
3337 self.thread: threading.Thread = None
3338 self.lock = asyncio.Lock()
3339 self.request_timestamps: Dict = {}
3340
3341 @abstractmethod
3342 def config(self) -> "bt.Config":
3343 ...
3344
3345 @classmethod
3346 @abstractmethod
3347 def add_args(cls, parser: argparse.ArgumentParser):
3348 ...
3349
3350 def _prompt(self, synapse: StreamPrompting) -> StreamPrompting:
3351 """
3352 A wrapper method around the `prompt` method that will be defined by the subclass.
3353
3354 This method acts as an intermediary layer to perform pre-processing before calling the
3355 actual `prompt` method implemented in the subclass. Specifically, it checks whether a
3356 prompt is in cache to avoid reprocessing recent requests. If the prompt is not in the
3357 cache, the subclass `prompt` method is called.
3358
3359 Args:
3360 synapse (StreamPrompting): The incoming request object encapsulating the details of the request.
3361
3362 Returns:
3363 StreamPrompting: The response object to be sent back in reply to the incoming request, essentially
3364 the filled synapse request object.
3365
3366 Raises:
3367 ValueError: If the prompt is found in the cache indicating it was sent recently.
3368
3369 Example:
3370 This method is not meant to be called directly but is invoked internally when a request
3371 is received, and it subsequently calls the `prompt` method of the subclass.
3372 """
3373 return self.prompt(synapse)
3374
3375 @abstractmethod
3376 def prompt(self, synapse: StreamPrompting) -> StreamPrompting:
3377 """
3378 Abstract method to handle and respond to incoming requests to the miner.
3379
3380 Subclasses should implement this method to define their custom logic for processing and
3381 responding to requests. This method is designed to be overridden, and its behavior will
3382 be dependent on the specific implementation provided in the subclass.
3383
3384 Args:
3385 synapse (StreamPrompting): The incoming request object encapsulating the details
3386 of the request. This must contain `messages` and `roles` as fields.
3387
3388 Returns:
3389 StreamPrompting: The response object that should be sent back in reply to the
3390 incoming request. This is essentially the filled synapse request object.
3391
3392 Example:
3393 class CustomMiner(Miner):
3394 def prompt(self, synapse: StreamPrompting) -> StreamPrompting:
3395 # Custom logic to process and respond to the request.
3396 synapse.completion = "The meaning of life is 42."
3397 return synapse
3398 """
3399 ...
3400
3401 def run(self):
3402 """
3403 Runs the miner logic. This method starts the miner's operations, including
3404 listening for incoming requests and periodically updating the miner's knowledge
3405 of the network graph.
3406 """
3407 if not self.subtensor.is_hotkey_registered(
3408 netuid=self.config.netuid,
3409 hotkey_ss58=self.wallet.hotkey.ss58_address,
3410 ):
3411 bt.logging.error(
3412 f"Wallet: {self.wallet} is not registered on netuid {self.config.netuid}"
3413 f"Please register the hotkey using `btcli subnets register` before trying again"
3414 )
3415 exit()
3416
3417 # Serve passes the axon information to the network + netuid we are hosting on.
3418 # This will auto-update if the axon port of external ip have changed.
3419 bt.logging.info(
3420 f"Serving axon {StreamPrompting} on network: {self.config.subtensor.chain_endpoint} with netuid: {self.config.netuid}"
3421 )
3422 self.axon.serve(netuid=self.config.netuid, subtensor=self.subtensor)
3423
3424 # Start starts the miner's axon, making it active on the network.
3425 bt.logging.info(
3426 f"Starting axon server on port: {self.config.axon.port}"
3427 )
3428 self.axon.start()
3429
3430 # --- Run until should_exit = True.
3431 self.last_epoch_block = self.subtensor.get_current_block()
3432 bt.logging.info(f"Miner starting at block: {self.last_epoch_block}")
3433
3434 # This loop maintains the miner's operations until intentionally stopped.
3435 bt.logging.info(f"Starting main loop")
3436 step = 0
3437 try:
3438 while not self.should_exit:
3439 start_epoch = time.time()
3440
3441 # --- Wait until next epoch.
3442 current_block = self.subtensor.get_current_block()
3443 while (
3444 current_block - self.last_epoch_block
3445 < self.config.miner.blocks_per_epoch
3446 ):
3447 # --- Wait for next bloc.
3448 time.sleep(1)
3449 current_block = self.subtensor.get_current_block()
3450
3451 # --- Check if we should exit.
3452 if self.should_exit:
3453 break
3454
3455 # --- Update the metagraph with the latest network state.
3456 self.last_epoch_block = self.subtensor.get_current_block()
3457
3458 metagraph = self.subtensor.metagraph(
3459 netuid=self.config.netuid,
3460 lite=True,
3461 block=self.last_epoch_block,
3462 )
3463 log = (
3464 f"Step:{step} | "
3465 f"Block:{metagraph.block.item()} | "
3466 f"Stake:{metagraph.S[self.my_subnet_uid]} | "
3467 f"Rank:{metagraph.R[self.my_subnet_uid]} | "
3468 f"Trust:{metagraph.T[self.my_subnet_uid]} | "
3469 f"Consensus:{metagraph.C[self.my_subnet_uid] } | "
3470 f"Incentive:{metagraph.I[self.my_subnet_uid]} | "
3471 f"Emission:{metagraph.E[self.my_subnet_uid]}"
3472 )
3473 bt.logging.info(log)
3474
3475 step += 1
3476
3477 # If someone intentionally stops the miner, it'll safely terminate operations.
3478 except KeyboardInterrupt:
3479 self.axon.stop()
3480 bt.logging.success("Miner killed by keyboard interrupt.")
3481 exit()
3482
3483 # In case of unforeseen errors, the miner will log the error and continue operations.
3484 except Exception as e:
3485 bt.logging.error(traceback.format_exc())
3486
3487 def run_in_background_thread(self):
3488 """
3489 Starts the miner's operations in a separate background thread.
3490 This is useful for non-blocking operations.
3491 """
3492 if not self.is_running:
3493 bt.logging.debug("Starting miner in background thread.")
3494 self.should_exit = False
3495 self.thread = threading.Thread(target=self.run, daemon=True)
3496 self.thread.start()
3497 self.is_running = True
3498 bt.logging.debug("Started")
3499
3500 def stop_run_thread(self):
3501 """
3502 Stops the miner's operations that are running in the background thread.
3503 """
3504 if self.is_running:
3505 bt.logging.debug("Stopping miner in background thread.")
3506 self.should_exit = True
3507 self.thread.join(5)
3508 self.is_running = False
3509 bt.logging.debug("Stopped")
3510
3511 def __enter__(self):
3512 """
3513 Starts the miner's operations in a background thread upon entering the context.
3514 This method facilitates the use of the miner in a 'with' statement.
3515 """
3516 self.run_in_background_thread()
3517
3518 def __exit__(self, exc_type, exc_value, traceback):
3519 """
3520 Stops the miner's background operations upon exiting the context.
3521 This method facilitates the use of the miner in a 'with' statement.
3522
3523 Args:
3524 exc_type: The type of the exception that caused the context to be exited.
3525 None if the context was exited without an exception.
3526 exc_value: The instance of the exception that caused the context to be exited.
3527 None if the context was exited without an exception.
3528 traceback: A traceback object encoding the stack trace.
3529 None if the context was exited without an exception.
3530 """
3531 self.stop_run_thread()
3532
3533
3534class StreamingTemplateMiner(StreamMiner):
3535 def config(self) -> "bt.Config":
3536 """
3537 Returns the configuration object specific to this miner.
3538
3539 Implement and extend this method to provide custom configurations for the miner.
3540 Currently, it sets up a basic configuration parser.
3541
3542 Returns:
3543 bt.Config: A configuration object with the miner's operational parameters.
3544 """
3545 parser = argparse.ArgumentParser(description="Streaming Miner Configs")
3546 self.add_args(parser)
3547 return bt.config(parser)
3548
3549 def add_args(cls, parser: argparse.ArgumentParser):
3550 """
3551 Adds custom arguments to the command line parser.
3552
3553 Developers can introduce additional command-line arguments specific to the miner's
3554 functionality in this method. These arguments can then be used to configure the miner's operation.
3555
3556 Args:
3557 parser (argparse.ArgumentParser):
3558 The command line argument parser to which custom arguments should be added.
3559 """
3560 pass
3561
3562 def prompt(self, synapse: StreamPrompting) -> StreamPrompting:
3563 """
3564 Generates a streaming response for the provided synapse.
3565
3566 This function serves as the main entry point for handling streaming prompts. It takes
3567 the incoming synapse which contains messages to be processed and returns a streaming
3568 response. The function uses the GPT-2 tokenizer and a simulated model to tokenize and decode
3569 the incoming message, and then sends the response back to the client token by token.
3570
3571 Args:
3572 synapse (StreamPrompting): The incoming StreamPrompting instance containing the messages to be processed.
3573
3574 Returns:
3575 StreamPrompting: The streaming response object which can be used by other functions to
3576 stream back the response to the client.
3577
3578 Usage:
3579 This function can be extended and customized based on specific requirements of the
3580 miner. Developers can swap out the tokenizer, model, or adjust how streaming responses
3581 are generated to suit their specific applications.
3582 """
3583 bt.logging.trace("HI. PROMPT()")
3584 tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
3585
3586 # Simulated function to decode token IDs into strings. In a real-world scenario,
3587 # this can be replaced with an actual model inference step.
3588 def model(ids):
3589 return (tokenizer.decode(id) for id in ids)
3590
3591 async def _prompt(text: str, send: Send):
3592 """
3593 Asynchronously processes the input text and sends back tokens as a streaming response.
3594
3595 This function takes an input text, tokenizes it using the GPT-2 tokenizer, and then
3596 uses the simulated model to decode token IDs into strings. It then sends each token
3597 back to the client as a streaming response, with a delay between tokens to simulate
3598 the effect of real-time streaming.
3599
3600 Args:
3601 text (str): The input text message to be processed.
3602 send (Send): An asynchronous function that allows sending back the streaming response.
3603
3604 Usage:
3605 This function can be adjusted based on the streaming requirements, speed of
3606 response, or the model being used. Developers can also introduce more sophisticated
3607 processing steps or modify how tokens are sent back to the client.
3608 """
3609 bt.logging.trace("HI. _PROMPT()")
3610 input_ids = tokenizer(
3611 text, return_tensors="pt"
3612 ).input_ids.squeeze()
3613 buffer = []
3614 bt.logging.debug(f"Input text: {text}")
3615 bt.logging.debug(f"Input ids: {input_ids}")
3616
3617 N = 3 # Number of tokens to send back to the client at a time
3618 for token in model(input_ids):
3619 bt.logging.trace(f"appending token: {token}")
3620 buffer.append(token)
3621 # If buffer has N tokens, send them back to the client.
3622 if len(buffer) == N:
3623 time.sleep(0.1)
3624 joined_buffer = "".join(buffer)
3625 bt.logging.debug(f"sedning tokens: {joined_buffer}")
3626 await send(
3627 {
3628 "type": "http.response.body",
3629 "body": joined_buffer.encode("utf-8"),
3630 "more_body": True,
3631 }
3632 )
3633 bt.logging.debug(f"Streamed tokens: {joined_buffer}")
3634 buffer = [] # Clear the buffer for next batch of tokens
3635
3636 # Send any remaining tokens in the buffer
3637 if buffer:
3638 joined_buffer = "".join(buffer)
3639 await send(
3640 {
3641 "type": "http.response.body",
3642 "body": joined_buffer.encode("utf-8"),
3643 "more_body": False, # No more tokens to send
3644 }
3645 )
3646 bt.logging.trace(f"Streamed tokens: {joined_buffer}")
3647
3648 message = synapse.messages[0]
3649 bt.logging.trace(f"message in _prompt: {message}")
3650 token_streamer = partial(_prompt, message)
3651 bt.logging.trace(f"token streamer: {token_streamer}")
3652 return synapse.create_streaming_response(token_streamer)
3653
3654
3655# This is the main function, which runs the miner.
3656if __name__ == "__main__":
3657 with StreamingTemplateMiner():
3658 while True:
3659 time.sleep(1)
3660
3661
3662---
3663target_repo/docs/stream_tutorial/protocol.py
3664---
3665import pydantic
3666import bittensor as bt
3667
3668from abc import ABC, abstractmethod
3669from typing import List, Union, Callable, Awaitable
3670from starlette.responses import StreamingResponse
3671
3672
3673class StreamPrompting(bt.StreamingSynapse):
3674 """
3675 StreamPrompting is a specialized implementation of the `StreamingSynapse` tailored for prompting functionalities within
3676 the Bittensor network. This class is intended to interact with a streaming response that contains a sequence of tokens,
3677 which represent prompts or messages in a certain scenario.
3678
3679 As a developer, when using or extending the `StreamPrompting` class, you should be primarily focused on the structure
3680 and behavior of the prompts you are working with. The class has been designed to seamlessly handle the streaming,
3681 decoding, and accumulation of tokens that represent these prompts.
3682
3683 Attributes:
3684 - `roles` (List[str]): A list of roles involved in the prompting scenario. This could represent different entities
3685 or agents involved in the conversation or use-case. They are immutable to ensure consistent
3686 interaction throughout the lifetime of the object.
3687
3688 - `messages` (List[str]): These represent the actual prompts or messages in the prompting scenario. They are also
3689 immutable to ensure consistent behavior during processing.
3690
3691 - `completion` (str): Stores the processed result of the streaming tokens. As tokens are streamed, decoded, and
3692 processed, they are accumulated in the completion attribute. This represents the "final"
3693 product or result of the streaming process.
3694 - `required_hash_fields` (List[str]): A list of fields that are required for the hash.
3695
3696 Methods:
3697 - `process_streaming_response`: This method asynchronously processes the incoming streaming response by decoding
3698 the tokens and accumulating them in the `completion` attribute.
3699
3700 - `deserialize`: Converts the `completion` attribute into its desired data format, in this case, a string.
3701
3702 - `extract_response_json`: Extracts relevant JSON data from the response, useful for gaining insights on the response's
3703 metadata or for debugging purposes.
3704
3705 Note: While you can directly use the `StreamPrompting` class, it's designed to be extensible. Thus, you can create
3706 subclasses to further customize behavior for specific prompting scenarios or requirements.
3707 """
3708
3709 roles: List[str] = pydantic.Field(
3710 ...,
3711 title="Roles",
3712 description="A list of roles in the StreamPrompting scenario. Immuatable.",
3713 allow_mutation=False,
3714 )
3715
3716 messages: List[str] = pydantic.Field(
3717 ...,
3718 title="Messages",
3719 description="A list of messages in the StreamPrompting scenario. Immutable.",
3720 allow_mutation=False,
3721 )
3722
3723 required_hash_fields: List[str] = pydantic.Field(
3724 ["messages"],
3725 title="Required Hash Fields",
3726 description="A list of required fields for the hash.",
3727 allow_mutation=False,
3728 )
3729
3730 completion: str = pydantic.Field(
3731 "",
3732 title="Completion",
3733 description="Completion status of the current StreamPrompting object. This attribute is mutable and can be updated.",
3734 )
3735
3736 async def process_streaming_response(self, response: StreamingResponse):
3737 """
3738 `process_streaming_response` is an asynchronous method designed to process the incoming streaming response from the
3739 Bittensor network. It's the heart of the StreamPrompting class, ensuring that streaming tokens, which represent
3740 prompts or messages, are decoded and appropriately managed.
3741
3742 As the streaming response is consumed, the tokens are decoded from their 'utf-8' encoded format, split based on
3743 newline characters, and concatenated into the `completion` attribute. This accumulation of decoded tokens in the
3744 `completion` attribute allows for a continuous and coherent accumulation of the streaming content.
3745
3746 Args:
3747 response: The streaming response object containing the content chunks to be processed. Each chunk in this
3748 response is expected to be a set of tokens that can be decoded and split into individual messages or prompts.
3749 """
3750 if self.completion is None:
3751 self.completion = ""
3752 bt.logging.debug(
3753 "Processing streaming response (StreamingSynapse base class)."
3754 )
3755 async for chunk in response.content.iter_any():
3756 bt.logging.debug(f"Processing chunk: {chunk}")
3757 tokens = chunk.decode("utf-8").split("\n")
3758 for token in tokens:
3759 bt.logging.debug(f"--processing token: {token}")
3760 if token:
3761 self.completion += token
3762 bt.logging.debug(f"yielding tokens {tokens}")
3763 yield tokens
3764
3765 def deserialize(self) -> str:
3766 """
3767 Deserializes the response by returning the completion attribute.
3768
3769 Returns:
3770 str: The completion result.
3771 """
3772 return self.completion
3773
3774 def extract_response_json(self, response: StreamingResponse) -> dict:
3775 """
3776 `extract_response_json` is a method that performs the crucial task of extracting pertinent JSON data from the given
3777 response. The method is especially useful when you need a detailed insight into the streaming response's metadata
3778 or when debugging response-related issues.
3779
3780 Beyond just extracting the JSON data, the method also processes and structures the data for easier consumption
3781 and understanding. For instance, it extracts specific headers related to dendrite and axon, offering insights
3782 about the Bittensor network's internal processes. The method ultimately returns a dictionary with a structured
3783 view of the extracted data.
3784
3785 Args:
3786 response: The response object from which to extract the JSON data. This object typically includes headers and
3787 content which can be used to glean insights about the response.
3788
3789 Returns:
3790 dict: A structured dictionary containing:
3791 - Basic response metadata such as name, timeout, total_size, and header_size.
3792 - Dendrite and Axon related information extracted from headers.
3793 - Roles and Messages pertaining to the current StreamPrompting instance.
3794 - The accumulated completion.
3795 """
3796 headers = {
3797 k.decode("utf-8"): v.decode("utf-8")
3798 for k, v in response.__dict__["_raw_headers"]
3799 }
3800
3801 def extract_info(prefix):
3802 return {
3803 key.split("_")[-1]: value
3804 for key, value in headers.items()
3805 if key.startswith(prefix)
3806 }
3807
3808 return {
3809 "name": headers.get("name", ""),
3810 "timeout": float(headers.get("timeout", 0)),
3811 "total_size": int(headers.get("total_size", 0)),
3812 "header_size": int(headers.get("header_size", 0)),
3813 "dendrite": extract_info("bt_header_dendrite"),
3814 "axon": extract_info("bt_header_axon"),
3815 "roles": self.roles,
3816 "messages": self.messages,
3817 "completion": self.completion,
3818 }
3819
3820
3821---
3822target_repo/neurons/__init__.py
3823---
3824
3825
3826---
3827target_repo/neurons/miner.py
3828---
3829# The MIT License (MIT)
3830# Copyright Β© 2023 Yuma Rao
3831# TODO(developer): Set your name
3832# Copyright Β© 2023 <your name>
3833
3834# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
3835# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
3836# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
3837# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
3838
3839# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
3840# the Software.
3841
3842# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
3843# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
3844# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
3845# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
3846# DEALINGS IN THE SOFTWARE.
3847
3848import time
3849import typing
3850import asyncio
3851import aiohttp
3852import bittensor as bt
3853
3854import sybil
3855
3856# import base miner class which takes care of most of the boilerplate
3857from sybil.base.miner import BaseMinerNeuron
3858
3859
3860class Miner(BaseMinerNeuron):
3861 """
3862 Your miner neuron class. You should use this class to define your miner's behavior. In particular, you should replace the forward function with your own logic. You may also want to override the blacklist and priority functions according to your needs.
3863
3864 This class inherits from the BaseMinerNeuron class, which in turn inherits from BaseNeuron. The BaseNeuron class takes care of routine tasks such as setting up wallet, subtensor, metagraph, logging directory, parsing config, etc. You can override any of the methods in BaseNeuron if you need to customize the behavior.
3865
3866 This class provides reasonable default behavior for a miner such as blacklisting unrecognized hotkeys, prioritizing requests based on stake, and forwarding requests to the forward function. If you need to define custom
3867 """
3868
3869 def __init__(self, config=None):
3870 super(Miner, self).__init__(config=config)
3871
3872 # TODO(developer): Anything specific to your use case you can do here
3873
3874 async def forward(
3875 self, synapse: sybil.protocol.Challenge
3876 ) -> sybil.protocol.Challenge:
3877 """
3878 Processes the incoming 'Challenge' synapse by performing a predefined operation on the input data.
3879 This method should be replaced with actual logic relevant to the miner's purpose.
3880
3881 Args:
3882 synapse (sybil.protocol.Challenge): The synapse object containing the 'challenge_url' data.
3883 """
3884
3885 bt.logging.info(f"Received challenge: {synapse.challenge_url}")
3886
3887 challenge_url = synapse.challenge_url
3888
3889 try:
3890 async with aiohttp.ClientSession() as session:
3891 bt.logging.info(f"Sending challenge to {self.miner_server}/challenge")
3892 async with session.post(
3893 f"{self.miner_server}/challenge",
3894 json={"url": challenge_url},
3895 headers={"Content-Type": "application/json"},
3896 ) as response:
3897 response = (await response.json())["response"]
3898 synapse.challenge_response = response
3899 bt.logging.info(f"Solved challenge: {synapse.challenge_response}")
3900 return synapse
3901 except Exception as e:
3902 bt.logging.error(f"Error solving challenge: {e}")
3903 return synapse
3904
3905 async def blacklist(
3906 self, synapse: sybil.protocol.Challenge
3907 ) -> typing.Tuple[bool, str]:
3908 """
3909 Determines whether an incoming request should be blacklisted and thus ignored. Your implementation should
3910 define the logic for blacklisting requests based on your needs and desired security parameters.
3911
3912 Blacklist runs before the synapse data has been deserialized (i.e. before synapse.data is available).
3913 The synapse is instead contracted via the headers of the request. It is important to blacklist
3914 requests before they are deserialized to avoid wasting resources on requests that will be ignored.
3915
3916 Args:
3917 synapse (sybil.protocol.Dummy): A synapse object constructed from the headers of the incoming request.
3918
3919 Returns:
3920 Tuple[bool, str]: A tuple containing a boolean indicating whether the synapse's hotkey is blacklisted,
3921 and a string providing the reason for the decision.
3922
3923 This function is a security measure to prevent resource wastage on undesired requests. It should be enhanced
3924 to include checks against the metagraph for entity registration, validator status, and sufficient stake
3925 before deserialization of synapse data to minimize processing overhead.
3926
3927 Example blacklist logic:
3928 - Reject if the hotkey is not a registered entity within the metagraph.
3929 - Consider blacklisting entities that are not validators or have insufficient stake.
3930
3931 In practice it would be wise to blacklist requests from entities that are not validators, or do not have
3932 enough stake. This can be checked via metagraph.S and metagraph.validator_permit. You can always attain
3933 the uid of the sender via a metagraph.hotkeys.index( synapse.dendrite.hotkey ) call.
3934
3935 Otherwise, allow the request to be processed further.
3936 """
3937
3938 if synapse.dendrite is None or synapse.dendrite.hotkey is None:
3939 bt.logging.warning(
3940 "Received a request without a dendrite or hotkey."
3941 )
3942 return True, "Missing dendrite or hotkey"
3943
3944 # TODO(developer): Define how miners should blacklist requests.
3945 uid = self.metagraph.hotkeys.index(synapse.dendrite.hotkey)
3946 if (
3947 not self.config.blacklist.allow_non_registered
3948 and synapse.dendrite.hotkey not in self.metagraph.hotkeys
3949 ):
3950 # Ignore requests from un-registered entities.
3951 bt.logging.trace(
3952 f"Blacklisting un-registered hotkey {synapse.dendrite.hotkey}"
3953 )
3954 return True, "Unrecognized hotkey"
3955
3956 if self.config.blacklist.force_validator_permit:
3957 # If the config is set to force validator permit, then we should only allow requests from validators.
3958 if not self.metagraph.validator_permit[uid]:
3959 bt.logging.warning(
3960 f"Blacklisting a request from non-validator hotkey {synapse.dendrite.hotkey}"
3961 )
3962 return True, "Non-validator hotkey"
3963
3964 bt.logging.trace(
3965 f"Not Blacklisting recognized hotkey {synapse.dendrite.hotkey}"
3966 )
3967 return False, "Hotkey recognized!"
3968
3969 async def priority(self, synapse: sybil.protocol.Challenge) -> float:
3970 """
3971 The priority function determines the order in which requests are handled. More valuable or higher-priority
3972 requests are processed before others. You should design your own priority mechanism with care.
3973
3974 This implementation assigns priority to incoming requests based on the calling entity's stake in the metagraph.
3975
3976 Args:
3977 synapse (sybil.protocol.Dummy): The synapse object that contains metadata about the incoming request.
3978
3979 Returns:
3980 float: A priority score derived from the stake of the calling entity.
3981
3982 Miners may receive messages from multiple entities at once. This function determines which request should be
3983 processed first. Higher values indicate that the request should be processed first. Lower values indicate
3984 that the request should be processed later.
3985
3986 Example priority logic:
3987 - A higher stake results in a higher priority value.
3988 """
3989 if synapse.dendrite is None or synapse.dendrite.hotkey is None:
3990 bt.logging.warning(
3991 "Received a request without a dendrite or hotkey."
3992 )
3993 return 0.0
3994
3995 # TODO(developer): Define how miners should prioritize requests.
3996 caller_uid = self.metagraph.hotkeys.index(
3997 synapse.dendrite.hotkey
3998 ) # Get the caller index.
3999 priority = float(
4000 self.metagraph.S[caller_uid]
4001 ) # Return the stake as the priority.
4002 bt.logging.trace(
4003 f"Prioritizing {synapse.dendrite.hotkey} with value: {priority}"
4004 )
4005 return priority
4006
4007
4008# This is the main function, which runs the miner.
4009if __name__ == "__main__":
4010 with Miner() as miner:
4011 while True:
4012 bt.logging.info(f"Miner running... {time.time()}")
4013 time.sleep(20)
4014
4015
4016---
4017target_repo/neurons/validator.py
4018---
4019# The MIT License (MIT)
4020# Copyright Β© 2023 Yuma Rao
4021# TODO(developer): Set your name
4022# Copyright Β© 2023 <your name>
4023
4024# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
4025# documentation files (the β€œSoftware”), to deal in the Software without restriction, including without limitation
4026# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
4027# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4028
4029# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
4030# the Software.
4031
4032# THE SOFTWARE IS PROVIDED β€œAS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
4033# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
4034# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
4035# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
4036# DEALINGS IN THE SOFTWARE.
4037
4038
4039import os
4040import time
4041import datetime
4042
4043# Bittensor
4044import bittensor as bt
4045import wandb
4046
4047# import base validator class which takes care of most of the boilerplate
4048from sybil.base.validator import BaseValidatorNeuron
4049
4050# Bittensor Validator Template:
4051from sybil.validator import forward
4052
4053
4054class Validator(BaseValidatorNeuron):
4055 """
4056 Your validator neuron class. You should use this class to define your validator's behavior. In particular, you should replace the forward function with your own logic.
4057
4058 This class inherits from the BaseValidatorNeuron class, which in turn inherits from BaseNeuron. The BaseNeuron class takes care of routine tasks such as setting up wallet, subtensor, metagraph, logging directory, parsing config, etc. You can override any of the methods in BaseNeuron if you need to customize the behavior.
4059
4060 This class provides reasonable default behavior for a validator such as keeping a moving average of the scores of the miners and using them to set weights at the end of each epoch. Additionally, the scores are reset for new hotkeys at the end of each epoch.
4061 """
4062
4063 def __init__(self, config=None):
4064 super(Validator, self).__init__(config=config)
4065
4066 bt.logging.info("load_state()")
4067 self.load_state()
4068
4069 self.wandb_run_start = None
4070 if not self.config.wandb.off:
4071 if os.getenv("WANDB_API_KEY"):
4072 self.new_wandb_run()
4073 else:
4074 bt.logging.exception(
4075 "WANDB_API_KEY not found. Set it with `export WANDB_API_KEY=<your API key>`. Alternatively, you can disable W&B with --wandb.off, but it is strongly recommended to run with W&B enabled."
4076 )
4077 self.config.wandb.off = True
4078 else:
4079 bt.logging.warning(
4080 "Running with --wandb.off. It is strongly recommended to run with W&B enabled."
4081 )
4082
4083 def new_wandb_run(self):
4084 """Creates a new wandb run to save information to."""
4085 # Create a unique run id for this run.
4086 now = datetime.datetime.now()
4087 self.wandb_run_start = now
4088 run_id = now.strftime("%Y-%m-%d_%H-%M-%S")
4089 name = "validator-" + str(self.uid) + "-" + run_id
4090 self.wandb_run = wandb.init(
4091 name=name,
4092 project="tpn-validators",
4093 entity="tpn-subnet",
4094 config={
4095 "uid": self.uid,
4096 "hotkey": self.wallet.hotkey.ss58_address,
4097 "run_name": run_id,
4098 "type": "validator",
4099 },
4100 allow_val_change=True,
4101 anonymous="allow",
4102 )
4103
4104 bt.logging.debug(f"Started a new wandb run: {name}")
4105
4106 async def forward(self):
4107 """
4108 Validator forward pass. Consists of:
4109 - Generating the query
4110 - Querying the miners
4111 - Getting the responses
4112 - Rewarding the miners
4113 - Updating the scores
4114 """
4115 # TODO(developer): Rewrite this function based on your protocol definition.
4116 return await forward(self)
4117
4118
4119# The main function parses the configuration and runs the validator.
4120if __name__ == "__main__":
4121 with Validator() as validator:
4122 while True:
4123 bt.logging.info(f"Validator running... {time.time()}")
4124 time.sleep(5)
4125
4126
4127---