From a092b58cef73722000efbad7f0075d235697c08e Mon Sep 17 00:00:00 2001 From: Raj Karamchedu Date: Thu, 25 Jan 2024 14:11:42 +0530 Subject: [PATCH 01/23] Sixth docstrings formatting PR --- bittensor/btlogging.py | 23 +++--- bittensor/chain_data.py | 35 ++++----- bittensor/config.py | 16 ++-- bittensor/dendrite.py | 165 ++++++++++++++++++++-------------------- bittensor/keyfile.py | 128 ++++++++++++++++++------------- 5 files changed, 198 insertions(+), 169 deletions(-) diff --git a/bittensor/btlogging.py b/bittensor/btlogging.py index a42c55612e..d17a0dd00a 100644 --- a/bittensor/btlogging.py +++ b/bittensor/btlogging.py @@ -1,4 +1,4 @@ -""" Standardize logging for bittensor +""" Standardized logging for Bittensor. """ # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -42,7 +42,7 @@ def _remove_loguru_ansi_directive(text: str) -> str: class logging: - """Standardize logging for bittensor""" + """Standardized logging for Bittensor.""" __has_been_inited__: bool = False __debug_on__: bool = False @@ -58,17 +58,18 @@ def __new__( record_log: bool = None, logging_dir: str = None, ): - r"""Instantiate bittensor logging system backend. + r"""Instantiate Bittensor logging system backend. + Args: - config (:obj:`bittensor.config`, `optional`): + config (bittensor.config, optional): bittensor.logging.config() - debug (:obj:`bool`, `optional`): + debug (bool, optional): Turn on debug. - trace (:obj:`bool`, `optional`): + trace (bool, optional): Turn on trace. - record_log (:obj:`bool`, `optional`): + record_log (bool, optional): If true, logs are saved to loggind dir. - logging_dir (:obj:`str`, `optional`): + logging_dir (str, optional): Directory where logs are sunk. """ @@ -129,8 +130,10 @@ def __new__( @classmethod def config(cls): - """Get config from the argument parser - Return: bittensor.config object + """Get config from the argument parser. + + Return: + bittensor.config object """ parser = argparse.ArgumentParser() logging.add_args(parser) diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index eeb946e20f..be2486c8b7 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -203,7 +203,7 @@ def is_serving(self) -> bool: return True def ip_str(self) -> str: - """Return the whole ip as string""" + """Return the whole IP as string""" return net.ip__str__(self.ip_type, self.ip, self.port) def __eq__(self, other: "AxonInfo"): @@ -419,7 +419,7 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfo": - r"""Returns a NeuronInfo object from a vec_u8.""" + r"""Returns a NeuronInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return NeuronInfo._null_neuron() @@ -433,7 +433,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfo": @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfo"]: - r"""Returns a list of NeuronInfo objects from a vec_u8.""" + r"""Returns a list of NeuronInfo objects from a ``vec_u8``.""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfo, is_vec=True @@ -589,7 +589,7 @@ def fix_decoded_values(cls, neuron_info_decoded: Any) -> "NeuronInfoLite": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfoLite": - r"""Returns a NeuronInfoLite object from a vec_u8.""" + r"""Returns a NeuronInfoLite object from a ``vec_u8``.""" if len(vec_u8) == 0: return NeuronInfoLite._null_neuron() @@ -603,7 +603,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> "NeuronInfoLite": @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["NeuronInfoLite"]: - r"""Returns a list of NeuronInfoLite objects from a vec_u8.""" + r"""Returns a list of NeuronInfoLite objects from a ``vec_u8``.""" decoded_list = from_scale_encoding( vec_u8, ChainDataType.NeuronInfoLite, is_vec=True @@ -736,7 +736,7 @@ def fix_decoded_values(cls, decoded: Any) -> "DelegateInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["DelegateInfo"]: - r"""Returns a DelegateInfo object from a vec_u8.""" + r"""Returns a DelegateInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None @@ -751,7 +751,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["DelegateInfo"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["DelegateInfo"]: - r"""Returns a list of DelegateInfo objects from a vec_u8.""" + r"""Returns a list of DelegateInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.DelegateInfo, is_vec=True) if decoded is None: @@ -765,7 +765,8 @@ def list_from_vec_u8(cls, vec_u8: List[int]) -> List["DelegateInfo"]: def delegated_list_from_vec_u8( cls, vec_u8: List[int] ) -> List[Tuple["DelegateInfo", Balance]]: - r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a vec_u8. + r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. + This is the list of delegates that the user has delegated to, and the amount of stake delegated. """ decoded = from_scale_encoding(vec_u8, ChainDataType.DelegatedInfo, is_vec=True) @@ -802,7 +803,7 @@ def fix_decoded_values(cls, decoded: Any) -> "StakeInfo": @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["StakeInfo"]: - r"""Returns a StakeInfo object from a vec_u8.""" + r"""Returns a StakeInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None @@ -819,7 +820,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["StakeInfo"]: def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: - r"""Returns a list of StakeInfo objects from a vec_u8.""" + r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" decoded: Optional[ List[Tuple(str, List[object])] ] = from_scale_encoding_using_type_string( @@ -840,7 +841,7 @@ def list_of_tuple_from_vec_u8( @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["StakeInfo"]: - r"""Returns a list of StakeInfo objects from a vec_u8.""" + r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.StakeInfo, is_vec=True) if decoded is None: @@ -878,7 +879,7 @@ class SubnetInfo: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetInfo"]: - r"""Returns a SubnetInfo object from a vec_u8.""" + r"""Returns a SubnetInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None @@ -891,7 +892,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetInfo"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetInfo"]: - r"""Returns a list of SubnetInfo objects from a vec_u8.""" + r"""Returns a list of SubnetInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding( vec_u8, ChainDataType.SubnetInfo, is_vec=True, is_option=True ) @@ -970,7 +971,7 @@ class SubnetHyperparameters: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: - r"""Returns a SubnetHyperparameters object from a vec_u8.""" + r"""Returns a SubnetHyperparameters object from a ``vec_u8``.""" if len(vec_u8) == 0: return None @@ -983,7 +984,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["SubnetHyperparameters"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["SubnetHyperparameters"]: - r"""Returns a list of SubnetHyperparameters objects from a vec_u8.""" + r"""Returns a list of SubnetHyperparameters objects from a ``vec_u8``.""" decoded = from_scale_encoding( vec_u8, ChainDataType.SubnetHyperparameters, is_vec=True, is_option=True ) @@ -1053,7 +1054,7 @@ def encode(self) -> Dict[str, Any]: @classmethod def from_vec_u8(cls, vec_u8: List[int]) -> Optional["IPInfo"]: - r"""Returns a IPInfo object from a vec_u8.""" + r"""Returns a IPInfo object from a ``vec_u8``.""" if len(vec_u8) == 0: return None @@ -1066,7 +1067,7 @@ def from_vec_u8(cls, vec_u8: List[int]) -> Optional["IPInfo"]: @classmethod def list_from_vec_u8(cls, vec_u8: List[int]) -> List["IPInfo"]: - r"""Returns a list of IPInfo objects from a vec_u8.""" + r"""Returns a list of IPInfo objects from a ``vec_u8``.""" decoded = from_scale_encoding(vec_u8, ChainDataType.IPInfo, is_vec=True) if decoded is None: diff --git a/bittensor/config.py b/bittensor/config.py index 8fea624830..3962615c25 100644 --- a/bittensor/config.py +++ b/bittensor/config.py @@ -1,5 +1,5 @@ """ -Implementation of the config class, which manages the config of different bittensor modules. +Implementation of the config class, which manages the configuration of different Bittensor modules. """ # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -37,21 +37,22 @@ class InvalidConfigFile(Exception): class config(DefaultMunch): """ - Implementation of the config class, which manages the config of different bittensor modules. + Implementation of the config class, which manages the configuration of different Bittensor modules. """ __is_set: Dict[str, bool] r""" Translates the passed parser into a nested Bittensor config. + Args: parser (argparse.ArgumentParser): Command line parser object. strict (bool): - If true, the command line arguments are strictly parsed. + If ``true``, the command line arguments are strictly parsed. args (list of str): Command line arguments. default (Optional[Any]): - Default value for the Config. Defaults to None. + Default value for the Config. Defaults to ``None``. This default will be returned for attributes that are undefined. Returns: config (bittensor.config): @@ -98,7 +99,7 @@ def __init__( parser.add_argument( "--no_version_checking", action="store_true", - help="Set true to stop cli version checking.", + help="Set ``true`` to stop cli version checking.", default=False, ) except: @@ -110,7 +111,7 @@ def __init__( "--no_prompt", dest="no_prompt", action="store_true", - help="Set true to stop cli from prompting the user.", + help="Set ``true`` to stop cli from prompting the user.", default=False, ) except: @@ -246,13 +247,14 @@ def __parse_args__( args: List[str], parser: argparse.ArgumentParser = None, strict: bool = False ) -> argparse.Namespace: """Parses the passed args use the passed parser. + Args: args (List[str]): List of arguments to parse. parser (argparse.ArgumentParser): Command line parser object. strict (bool): - If true, the command line arguments are strictly parsed. + If ``true``, the command line arguments are strictly parsed. Returns: Namespace: Namespace object created from parser arguments. diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 056977d975..bb41b81929 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -31,8 +31,9 @@ class dendrite(torch.nn.Module): """ - The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted - implementation of a network client module. In the brain analogy, dendrites receive signals + The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module. + + In the brain analogy, dendrites receive signals from other neurons (in this case, network servers or axons), and the Dendrite class here is designed to send requests to those endpoint to recieve inputs. @@ -40,15 +41,14 @@ class dendrite(torch.nn.Module): HTTP requests to the network servers. It also provides functionalities such as logging network requests and processing server responses. - Attributes: + Args: keypair: The wallet or keypair used for signing messages. external_ip (str): The external IP address of the local system. synapse_history (list): A list of Synapse objects representing the historical responses. Methods: __str__(): Returns a string representation of the Dendrite object. - __repr__(): Returns a string representation of the Dendrite object, acting as a fallback - for __str__(). + __repr__(): Returns a string representation of the Dendrite object, acting as a fallback for __str__(). query(self, *args, **kwargs) -> Union[bittensor.Synapse, List[bittensor.Synapse]]: Makes synchronous requests to one or multiple target Axons and returns responses. @@ -59,8 +59,7 @@ class dendrite(torch.nn.Module): Asynchronously sends a request to a specified Axon and processes the response. call_stream(self, target_axon, synapse=bittensor.Synapse(), timeout=12.0, deserialize=True) -> AsyncGenerator[bittensor.Synapse, None]: - Sends a request to a specified Axon and yields an AsyncGenerator that contains streaming - response chunks before finally yielding the filled Synapse as the final element. + Sends a request to a specified Axon and yields an AsyncGenerator that contains streaming response chunks before finally yielding the filled Synapse as the final element. preprocess_synapse_for_request(self, target_axon_info, synapse, timeout=12.0) -> bittensor.Synapse: Preprocesses the synapse for making a request, including building headers and signing. @@ -74,18 +73,21 @@ class dendrite(torch.nn.Module): aclose_session(self): Asynchronously closes the internal aiohttp client session. - NOTE: When working with async aiohttp client sessions, it is recommended to use a context manager. + NOTE: + When working with async `aiohttp `_ client sessions, it is recommended to use a context manager. - Example with a context manager: + Example with a context manager:: + >>> aysnc with dendrite(wallet = bittensor.wallet()) as d: >>> print(d) >>> d( ) # ping axon >>> d( [] ) # ping multiple >>> d( bittensor.axon(), bittensor.Synapse ) - However, you are able to safely call dendrite.query() without a context manager in a synchronous setting. + However, you are able to safely call :func:`dendrite.query()` without a context manager in a synchronous setting. - Example without a context manager: + Example without a context manager:: + >>> d = dendrite(wallet = bittensor.wallet() ) >>> print(d) >>> d( ) # ping axon @@ -101,8 +103,7 @@ def __init__( Args: wallet (Optional[Union['bittensor.wallet', 'bittensor.keypair']], optional): - The user's wallet or keypair used for signing messages. Defaults to None, - in which case a new bittensor.wallet().hotkey is generated and used. + The user's wallet or keypair used for signing messages. Defaults to ``None``, in which case a new :func:`bittensor.wallet().hotkey` is generated and used. """ # Initialize the parent class super(dendrite, self).__init__() @@ -125,21 +126,22 @@ def __init__( @property async def session(self) -> aiohttp.ClientSession: """ - An asynchronous property that provides access to the internal aiohttp client session. + An asynchronous property that provides access to the internal `aiohttp `_ client session. This property ensures the management of HTTP connections in an efficient way. It lazily - initializes the aiohttp.ClientSession on its first use. The session is then reused for subsequent + initializes the `aiohttp.ClientSession `_ on its first use. The session is then reused for subsequent HTTP requests, offering performance benefits by reusing underlying connections. This is used internally by the dendrite when querying axons, and should not be used directly unless absolutely necessary for your application. Returns: - aiohttp.ClientSession: The active aiohttp client session instance. If no session exists, a + aiohttp.ClientSession: The active `aiohttp `_ client session instance. If no session exists, a new one is created and returned. This session is used for asynchronous HTTP requests within the dendrite, adhering to the async nature of the network interactions in the Bittensor framework. - Example usage: + Example usage:: + import bittensor as bt # Import bittensor wallet = bt.wallet( ... ) # Initialize a wallet dendrite = bt.dendrite( wallet ) # Initialize a dendrite instance with the wallet @@ -159,18 +161,18 @@ async def session(self) -> aiohttp.ClientSession: def close_session(self): """ - Closes the internal aiohttp client session synchronously. + Closes the internal `aiohttp `_ client session synchronously. This method ensures the proper closure and cleanup of the aiohttp client session, releasing any resources like open connections and internal buffers. It is crucial for preventing resource leakage and should be called when the dendrite instance is no longer in use, especially in synchronous contexts. - Note: This method utilizes asyncio's event loop to close the session asynchronously from a synchronous - context. It is advisable to use this method only when asynchronous context management is not feasible. + Note: + This method utilizes asyncio's event loop to close the session asynchronously from a synchronous context. It is advisable to use this method only when asynchronous context management is not feasible. Usage: - # When finished with dendrite in a synchronous context - dendrite_instance.close_session() + When finished with dendrite in a synchronous context + :func:`dendrite_instance.close_session()`. """ if self._session: loop = asyncio.get_event_loop() @@ -179,18 +181,19 @@ def close_session(self): async def aclose_session(self): """ - Asynchronously closes the internal aiohttp client session. + Asynchronously closes the internal `aiohttp `_ client session. - This method is the asynchronous counterpart to the `close_session` method. It should be used in + This method is the asynchronous counterpart to the :func:`close_session` method. It should be used in asynchronous contexts to ensure that the aiohttp client session is closed properly. The method releases resources associated with the session, such as open connections and internal buffers, which is essential for resource management in asynchronous applications. Usage: - # When finished with dendrite in an asynchronous context - await dendrite_instance.aclose_session() + When finished with dendrite in an asynchronous context + await :func:`dendrite_instance.aclose_session()`. - Example: + Example:: + async with dendrite_instance: # Operations using dendrite pass @@ -224,8 +227,7 @@ def _get_endpoint_url(self, target_axon, request_name): def _handle_request_errors(self, synapse, request_name, exception): """ - Handles exceptions that occur during network requests, updating the synapse with appropriate status - codes and messages. + Handles exceptions that occur during network requests, updating the synapse with appropriate status codes and messages. This method interprets different types of exceptions and sets the corresponding status code and message in the synapse object. It covers common network errors such as connection issues and timeouts. @@ -235,7 +237,8 @@ def _handle_request_errors(self, synapse, request_name, exception): request_name: The name of the request during which the exception occurred. exception: The exception object caught during the request. - Note: This method updates the synapse object in-place. + Note: + This method updates the synapse object in-place. """ if isinstance(exception, aiohttp.ClientConnectorError): synapse.dendrite.status_code = "503" @@ -259,12 +262,10 @@ def _log_outgoing_request(self, synapse): request, the name of the synapse, the axon's details, and a success indicator. This information is crucial for monitoring and debugging network activity within the Bittensor network. - To turn on debug messages, set the environment variable BITTENSOR_DEBUG to 1. or call the bittensor - debug method like so: - ```python - import bittensor - bittensor.debug() - ``` + To turn on debug messages, set the environment variable BITTENSOR_DEBUG to ``1``, or call the bittensor debug method like so:: + + import bittensor + bittensor.debug() Args: synapse: The synapse object representing the request being sent. @@ -277,7 +278,7 @@ def _log_incoming_response(self, synapse): """ Logs information about incoming responses for debugging and monitoring. - Similar to `_log_outgoing_request`, this method logs essential details of the incoming responses, + Similar to :func:`_log_outgoing_request`, this method logs essential details of the incoming responses, including the size of the response, synapse name, axon details, status code, and status message. This logging is vital for troubleshooting and understanding the network interactions in Bittensor. @@ -304,13 +305,11 @@ def query( Args: axons (Union[List[Union['bittensor.AxonInfo', 'bittensor.axon']], Union['bittensor.AxonInfo', 'bittensor.axon']]): The list of target Axon information. - synapse (bittensor.Synapse, optional): The Synapse object. Defaults to bittensor.Synapse(). + synapse (bittensor.Synapse, optional): The Synapse object. Defaults to :func:`bittensor.Synapse()`. timeout (float, optional): The request timeout duration in seconds. - Defaults to 12.0 seconds. + Defaults to ``12.0`` seconds. Returns: - Union[bittensor.Synapse, List[bittensor.Synapse]]: If a single target axon is provided, - returns the response from that axon. If multiple target axons are provided, - returns a list of responses from all target axons. + Union[bittensor.Synapse, List[bittensor.Synapse]]: If a single target axon is provided, returns the response from that axon. If multiple target axons are provided, returns a list of responses from all target axons. """ result = None try: @@ -349,7 +348,8 @@ async def forward( containing the response data. If multiple Axons are queried, a list of Synapse objects is returned, each containing the response from the corresponding Axon. - For example: + For example:: + >>> ... >>> wallet = bittensor.wallet() # Initialize a wallet >>> synapse = bittensor.Synapse(...) # Create a synapse object that contains query data @@ -361,7 +361,8 @@ async def forward( returns an AsyncGenerator that yields each chunk as it is received. The generator can be iterated over to process each chunk individually. - For example: + For example:: + >>> ... >>> dendrte = bittensor.dendrite(wallet = wallet) >>> async for chunk in dendrite.forward(axons, synapse, timeout, deserialize, run_async, streaming): @@ -371,11 +372,11 @@ async def forward( Args: axons (Union[List[Union['bittensor.AxonInfo', 'bittensor.axon']], Union['bittensor.AxonInfo', 'bittensor.axon']]): The target Axons to send requests to. Can be a single Axon or a list of Axons. - synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new bittensor.Synapse instance. - timeout (float, optional): Maximum duration to wait for a response from an Axon in seconds. Defaults to 12.0. - deserialize (bool, optional): Determines if the received response should be deserialized. Defaults to True. - run_async (bool, optional): If True, sends requests concurrently. Otherwise, sends requests sequentially. Defaults to True. - streaming (bool, optional): Indicates if the response is expected to be in streaming format. Defaults to False. + synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new :func:`bittensor.Synapse` instance. + timeout (float, optional): Maximum duration to wait for a response from an Axon in seconds. Defaults to ``12.0``. + deserialize (bool, optional): Determines if the received response should be deserialized. Defaults to ``True``. + run_async (bool, optional): If ``True``, sends requests concurrently. Otherwise, sends requests sequentially. Defaults to ``True``. + streaming (bool, optional): Indicates if the response is expected to be in streaming format. Defaults to ``False``. Returns: Union[AsyncGenerator, bittensor.Synapse, List[bittensor.Synapse]]: If a single Axon is targeted, returns its response. @@ -401,16 +402,15 @@ async def query_all_axons( is_stream: bool, ) -> Union[AsyncGenerator[Any], bittenst.Synapse, bittensor.StreamingSynapse]: """ - Handles the processing of requests to all targeted axons, accommodating both streaming and - non-streaming responses. + Handles the processing of requests to all targeted axons, accommodating both streaming and non-streaming responses. This function manages the concurrent or sequential dispatch of requests to a list of axons. - It utilizes the `is_stream` parameter to determine the mode of response handling (streaming - or non-streaming). For each axon, it calls 'single_axon_response' and aggregates the responses. + It utilizes the ``is_stream`` parameter to determine the mode of response handling (streaming + or non-streaming). For each axon, it calls ``single_axon_response`` and aggregates the responses. Args: is_stream (bool): Flag indicating whether the axon responses are expected to be streamed. - If True, responses are handled in streaming mode. + If ``True``, responses are handled in streaming mode. Returns: List[Union[AsyncGenerator, bittensor.Synapse, bittensor.StreamingSynapse]]: A list @@ -424,17 +424,15 @@ async def single_axon_response( AsyncGenerator[Any], bittenst.Synapse, bittensor.StreamingSynapse ]: """ - Manages the request and response process for a single axon, supporting both streaming and - non-streaming modes. + Manages the request and response process for a single axon, supporting both streaming and non-streaming modes. This function is responsible for initiating a request to a single axon. Depending on the - 'is_stream' flag, it either uses 'call_stream' for streaming responses or 'call' for + ``is_stream`` flag, it either uses ``call_stream`` for streaming responses or ``call`` for standard responses. The function handles the response processing, catering to the specifics of streaming or non-streaming data. Args: - target_axon: The target axon object to which the request is to be sent. This object - contains the necessary information like IP address and port to formulate the request. + target_axon: The target axon object to which the request is to be sent. This object contains the necessary information like IP address and port to formulate the request. Returns: Union[AsyncGenerator, bittensor.Synapse, bittensor.StreamingSynapse]: The response @@ -493,9 +491,9 @@ async def call( Args: target_axon (Union['bittensor.AxonInfo', 'bittensor.axon']): The target Axon to send the request to. - synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new bittensor.Synapse instance. - timeout (float, optional): Maximum duration to wait for a response from the Axon in seconds. Defaults to 12.0. - deserialize (bool, optional): Determines if the received response should be deserialized. Defaults to True. + synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new :func:`bittensor.Synapse` instance. + timeout (float, optional): Maximum duration to wait for a response from the Axon in seconds. Defaults to ``12.0``. + deserialize (bool, optional): Determines if the received response should be deserialized. Defaults to ``True``. Returns: bittensor.Synapse: The Synapse object, updated with the response data from the Axon. @@ -562,16 +560,16 @@ async def call_stream( """ Sends a request to a specified Axon and yields streaming responses. - Similar to `call`, but designed for scenarios where the Axon sends back data in + Similar to ``call``, but designed for scenarios where the Axon sends back data in multiple chunks or streams. The function yields each chunk as it is received. This is useful for processing large responses piece by piece without waiting for the entire data to be transmitted. Args: target_axon (Union['bittensor.AxonInfo', 'bittensor.axon']): The target Axon to send the request to. - synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new bittensor.Synapse instance. - timeout (float, optional): Maximum duration to wait for a response (or a chunk of the response) from the Axon in seconds. Defaults to 12.0. - deserialize (bool, optional): Determines if each received chunk should be deserialized. Defaults to True. + synapse (bittensor.Synapse, optional): The Synapse object encapsulating the data. Defaults to a new :func:`bittensor.Synapse` instance. + timeout (float, optional): Maximum duration to wait for a response (or a chunk of the response) from the Axon in seconds. Defaults to ``12.0``. + deserialize (bool, optional): Determines if each received chunk should be deserialized. Defaults to ``True``. Yields: object: Each yielded object contains a chunk of the arbitrary response data from the Axon. @@ -651,7 +649,7 @@ def preprocess_synapse_for_request( target_axon_info (bittensor.AxonInfo): The target axon information. synapse (bittensor.Synapse): The synapse object to be preprocessed. timeout (float, optional): The request timeout duration in seconds. - Defaults to 12.0 seconds. + Defaults to ``12.0`` seconds. Returns: bittensor.Synapse: The preprocessed synapse. @@ -696,12 +694,12 @@ def process_server_response( server's state and merges headers set by the server. Args: - server_response (object): The aiohttp response object from the server. + server_response (object): The `aiohttp `_ response object from the server. json_response (dict): The parsed JSON response from the server. local_synapse (bittensor.Synapse): The local synapse object to be updated. Raises: - None, but errors in attribute setting are silently ignored. + None: But errors in attribute setting are silently ignored. """ # Check if the server responded with a successful status code if server_response.status == 200: @@ -746,16 +744,16 @@ def __str__(self) -> str: Returns a string representation of the Dendrite object. Returns: - str: The string representation of the Dendrite object in the format "dendrite()". + str: The string representation of the Dendrite object in the format :func:`dendrite()`. """ return "dendrite({})".format(self.keypair.ss58_address) def __repr__(self) -> str: """ - Returns a string representation of the Dendrite object, acting as a fallback for __str__(). + Returns a string representation of the Dendrite object, acting as a fallback for :func:`__str__()`. Returns: - str: The string representation of the Dendrite object in the format "dendrite()". + str: The string representation of the Dendrite object in the format :func:`dendrite()`. """ return self.__str__() @@ -763,13 +761,14 @@ async def __aenter__(self): """ Asynchronous context manager entry method. - Enables the use of the `async with` statement with the Dendrite instance. When entering the context, + Enables the use of the ``async with`` statement with the Dendrite instance. When entering the context, the current instance of the class is returned, making it accessible within the asynchronous context. Returns: Dendrite: The current instance of the Dendrite class. - Usage: + Usage:: + async with Dendrite() as dendrite: await dendrite.some_async_method() """ @@ -779,20 +778,21 @@ async def __aexit__(self, exc_type, exc_value, traceback): """ Asynchronous context manager exit method. - Ensures proper cleanup when exiting the `async with` context. This method will close the aiohttp client session + Ensures proper cleanup when exiting the ``async with`` context. This method will close the `aiohttp `_ client session asynchronously, releasing any tied resources. Args: exc_type (Type[BaseException], optional): The type of exception that was raised. exc_value (BaseException, optional): The instance of exception that was raised. - traceback (TracebackType, optional): A traceback object encapsulating the call stack at the point - where the exception was raised. + traceback (TracebackType, optional): A traceback object encapsulating the call stack at the point where the exception was raised. - Usage: + Usage:: + async with bt.dendrite( wallet ) as dendrite: await dendrite.some_async_method() - Note: This automatically closes the session by calling __aexit__ after the context closes. + Note: + This automatically closes the session by calling :func:`__aexit__` after the context closes. """ await self.aclose_session() @@ -803,10 +803,11 @@ def __del__(self): This method is invoked when the Dendrite instance is about to be destroyed. The destructor ensures that the aiohttp client session is closed before the instance is fully destroyed, releasing any remaining resources. - Note: Relying on the destructor for cleanup can be unpredictable. It's recommended to explicitly close sessions - using the provided methods or the `async with` context manager. + Note: + Relying on the destructor for cleanup can be unpredictable. It is recommended to explicitly close sessions using the provided methods or the ``async with`` context manager. - Usage: + Usage:: + dendrite = Dendrite() # ... some operations ... del dendrite # This will implicitly invoke the __del__ method and close the session. diff --git a/bittensor/keyfile.py b/bittensor/keyfile.py index 935d47116a..0bcadaa86b 100644 --- a/bittensor/keyfile.py +++ b/bittensor/keyfile.py @@ -44,6 +44,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: """Serializes keypair object into keyfile data. + Args: keypair (bittensor.Keypair): The keypair object to be serialized. Returns: @@ -69,6 +70,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Keypair": """Deserializes Keypair object from passed keyfile data. + Args: keyfile_data (bytes): The keyfile data as bytes to be loaded. Returns: @@ -116,10 +118,11 @@ def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Key def validate_password(password: str) -> bool: """Validates the password against a password policy. + Args: password (str): The password to verify. Returns: - valid (bool): True if the password meets validity requirements. + valid (bool): ``True`` if the password meets validity requirements. """ policy = PasswordPolicy.from_names(strength=0.20, entropybits=10, length=6) if not password: @@ -142,6 +145,7 @@ def validate_password(password: str) -> bool: def ask_password_to_encrypt() -> str: """Prompts the user to enter a password for key encryption. + Returns: password (str): The valid password entered by the user. """ @@ -154,18 +158,20 @@ def ask_password_to_encrypt() -> str: def keyfile_data_is_encrypted_nacl(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is NaCl encrypted. + Args: keyfile_data ( bytes, required ): - Bytes to validate + Bytes to validate. Returns: is_nacl (bool): - True if data is ansible encrypted. + ``True`` if data is ansible encrypted. """ return keyfile_data[: len("$NACL")] == b"$NACL" def keyfile_data_is_encrypted_ansible(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is ansible encrypted. + Args: keyfile_data (bytes): The bytes to validate. Returns: @@ -179,17 +185,18 @@ def keyfile_data_is_encrypted_legacy(keyfile_data: bytes) -> bool: Args: keyfile_data (bytes): The bytes to validate. Returns: - is_legacy (bool): True if the data is legacy encrypted. + is_legacy (bool): ``True`` if the data is legacy encrypted. """ return keyfile_data[:6] == b"gAAAAA" def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: - """Returns true if the keyfile data is encrypted. + """Returns ``true`` if the keyfile data is encrypted. + Args: keyfile_data (bytes): The bytes to validate. Returns: - is_encrypted (bool): True if the data is encrypted. + is_encrypted (bool): ``True`` if the data is encrypted. """ return ( keyfile_data_is_encrypted_nacl(keyfile_data) @@ -199,13 +206,14 @@ def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: def keyfile_data_encryption_method(keyfile_data: bytes) -> bool: - """Returns true if the keyfile data is encrypted. + """Returns ``true`` if the keyfile data is encrypted. + Args: keyfile_data ( bytes, required ): Bytes to validate Returns: encryption_method (bool): - True if data is encrypted. + ``True`` if data is encrypted. """ if keyfile_data_is_encrypted_nacl(keyfile_data): @@ -228,9 +236,10 @@ def legacy_encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> by def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: """Encrypts the passed keyfile data using ansible vault. + Args: keyfile_data (bytes): The bytes to encrypt. - password (str, optional): The password used to encrypt the data. If None, asks for user input. + password (str, optional): The password used to encrypt the data. If ``None``, asks for user input. Returns: encrypted_data (bytes): The encrypted data. """ @@ -251,10 +260,11 @@ def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: def get_coldkey_password_from_environment(coldkey_name: str) -> Optional[str]: """Retrieves the cold key password from the environment variables. + Args: coldkey_name (str): The name of the cold key. Returns: - password (str): The password retrieved from the environment variables, or None if not found. + password (str): The password retrieved from the environment variables, or ``None`` if not found. """ for env_var in os.environ: if env_var.upper().startswith("BT_COLD_PW_") and env_var.upper().endswith( @@ -269,9 +279,10 @@ def decrypt_keyfile_data( keyfile_data: bytes, password: str = None, coldkey_name: Optional[str] = None ) -> bytes: """Decrypts the passed keyfile data using ansible vault. + Args: keyfile_data (bytes): The bytes to decrypt. - password (str, optional): The password used to decrypt the data. If None, asks for user input. + password (str, optional): The password used to decrypt the data. If ``None``, asks for user input. coldkey_name (str, optional): The name of the cold key. If provided, retrieves the password from environment variables. Returns: decrypted_data (bytes): The decrypted data. @@ -362,6 +373,7 @@ def __repr__(self): @property def keypair(self) -> "bittensor.Keypair": """Returns the keypair from path, decrypts data if the file is encrypted. + Returns: keypair (bittensor.Keypair): The keypair stored under the path. Raises: @@ -372,6 +384,7 @@ def keypair(self) -> "bittensor.Keypair": @property def data(self) -> bytes: """Returns the keyfile data under path. + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -382,6 +395,7 @@ def data(self) -> bytes: @property def keyfile_data(self) -> bytes: """Returns the keyfile data under path. + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -397,11 +411,12 @@ def set_keypair( password: str = None, ): """Writes the keypair to the file and optionally encrypts data. + Args: keypair (bittensor.Keypair): The keypair to store under the path. - encrypt (bool, optional): If True, encrypts the file under the path. Default is True. - overwrite (bool, optional): If True, forces overwrite of the current file. Default is False. - password (str, optional): The password used to encrypt the file. If None, asks for user input. + encrypt (bool, optional): If ``True``, encrypts the file under the path. Default is ``True``. + overwrite (bool, optional): If ``True``, forces overwrite of the current file. Default is ``False``. + password (str, optional): The password used to encrypt the file. If ``None``, asks for user input. Raises: KeyFileError: Raised if the file does not exist, is not readable, writable, or if the password is incorrect. """ @@ -413,8 +428,9 @@ def set_keypair( def get_keypair(self, password: str = None) -> "bittensor.Keypair": """Returns the keypair from the path, decrypts data if the file is encrypted. + Args: - password (str, optional): The password used to decrypt the file. If None, asks for user input. + password (str, optional): The password used to decrypt the file. If ``None``, asks for user input. Returns: keypair (bittensor.Keypair): The keypair stored under the path. Raises: @@ -436,18 +452,20 @@ def make_dirs(self): os.makedirs(directory) def exists_on_device(self) -> bool: - """Returns True if the file exists on the device. + """Returns ``True`` if the file exists on the device. + Returns: - on_device (bool): True if the file is on the device. + on_device (bool): ``True`` if the file is on the device. """ if not os.path.isfile(self.path): return False return True def is_readable(self) -> bool: - """Returns True if the file under path is readable. + """Returns ``True`` if the file under path is readable. + Returns: - readable (bool): True if the file is readable. + readable (bool): ``True`` if the file is readable. """ if not self.exists_on_device(): return False @@ -456,18 +474,20 @@ def is_readable(self) -> bool: return True def is_writable(self) -> bool: - """Returns True if the file under path is writable. + """Returns ``True`` if the file under path is writable. + Returns: - writable (bool): True if the file is writable. + writable (bool): ``True`` if the file is writable. """ if os.access(self.path, os.W_OK): return True return False def is_encrypted(self) -> bool: - """Returns True if the file under path is encrypted. + """Returns ``True`` if the file under path is encrypted. + Returns: - encrypted (bool): True if the file is encrypted. + encrypted (bool): ``True`` if the file is encrypted. """ if not self.exists_on_device(): return False @@ -476,9 +496,10 @@ def is_encrypted(self) -> bool: return keyfile_data_is_encrypted(self._read_keyfile_data_from_file()) def _may_overwrite(self) -> bool: - """Asks the user if it's okay to overwrite the file. + """Asks the user if it is okay to overwrite the file. + Returns: - may_overwrite (bool): True if the user allows overwriting the file. + may_overwrite (bool): ``True`` if the user allows overwriting the file. """ choice = input("File {} already exists. Overwrite? (y/N) ".format(self.path)) return choice == "y" @@ -487,6 +508,7 @@ def check_and_update_encryption( self, print_result: bool = True, no_prompt: bool = False ): """Check the version of keyfile and update if needed. + Args: print_result (bool): Print the checking result or not. @@ -497,7 +519,7 @@ def check_and_update_encryption( Raised if the file does not exists, is not readable, writable. Returns: result (bool): - return True if the keyfile is the most updated with nacl, else False. + Return ``True`` if the keyfile is the most updated with nacl, else ``False``. """ if not self.exists_on_device(): if print_result: @@ -587,8 +609,9 @@ def check_and_update_encryption( def encrypt(self, password: str = None): """Encrypts the file under the path. + Args: - password (str, optional): The password for encryption. If None, asks for user input. + password (str, optional): The password for encryption. If ``None``, asks for user input. Raises: KeyFileError: Raised if the file does not exist, is not readable, or writable. """ @@ -613,8 +636,9 @@ def encrypt(self, password: str = None): def decrypt(self, password: str = None): """Decrypts the file under the path. + Args: - password (str, optional): The password for decryption. If None, asks for user input. + password (str, optional): The password for decryption. If ``None``, asks for user input. Raises: KeyFileError: Raised if the file does not exist, is not readable, writable, corrupted, or if the password is incorrect. """ @@ -641,6 +665,7 @@ def decrypt(self, password: str = None): def _read_keyfile_data_from_file(self) -> bytes: """Reads the keyfile data from the file. + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -660,9 +685,10 @@ def _read_keyfile_data_from_file(self) -> bytes: def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = False): """Writes the keyfile data to the file. + Args: keyfile_data (bytes): The byte data to store under the path. - overwrite (bool, optional): If True, overwrites the data without asking for permission from the user. Default is False. + overwrite (bool, optional): If ``True``, overwrites the data without asking for permission from the user. Default is ``False``. Raises: KeyFileError: Raised if the file is not writable or the user responds No to the overwrite prompt. """ @@ -681,6 +707,7 @@ def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = Fal class Mockkeyfile: """ The Mockkeyfile is a mock object representing a keyfile that does not exist on the device. + It is designed for use in testing scenarios and simulations where actual filesystem operations are not required. The keypair stored in the Mockkeyfile is treated as non-encrypted and the data is stored as a serialized string. """ @@ -698,8 +725,7 @@ def __init__(self, path: str): def __str__(self): """ - Returns a string representation of the Mockkeyfile. The representation will indicate if the - keyfile is empty, encrypted, or decrypted. + Returns a string representation of the Mockkeyfile. The representation will indicate if the keyfile is empty, encrypted, or decrypted. Returns: str: The string representation of the Mockkeyfile. @@ -708,7 +734,7 @@ def __str__(self): def __repr__(self): """ - Returns a string representation of the Mockkeyfile, same as __str__(). + Returns a string representation of the Mockkeyfile, same as :func:`__str__()`. Returns: str: The string representation of the Mockkeyfile. @@ -737,23 +763,23 @@ def data(self): def set_keypair(self, keypair, encrypt=True, overwrite=False, password=None): """ - Sets the mock keypair in the keyfile. The `encrypt` and `overwrite` parameters are ignored. + Sets the mock keypair in the keyfile. The ``encrypt`` and ``overwrite`` parameters are ignored. Args: keypair (bittensor.Keypair): The mock keypair to be set. - encrypt (bool, optional): Ignored in this context. Defaults to True. - overwrite (bool, optional): Ignored in this context. Defaults to False. - password (str, optional): Ignored in this context. Defaults to None. + encrypt (bool, optional): Ignored in this context. Defaults to ``True``. + overwrite (bool, optional): Ignored in this context. Defaults to ``False``. + password (str, optional): Ignored in this context. Defaults to ``None``. """ self._mock_keypair = keypair self._mock_data = None # You may need to serialize the keypair here def get_keypair(self, password=None): """ - Returns the mock keypair stored in the keyfile. The `password` parameter is ignored. + Returns the mock keypair stored in the keyfile. The ``password`` parameter is ignored. Args: - password (str, optional): Ignored in this context. Defaults to None. + password (str, optional): Ignored in this context. Defaults to ``None``. Returns: bittensor.Keypair: The mock keypair stored in the keyfile. @@ -762,47 +788,43 @@ def get_keypair(self, password=None): def make_dirs(self): """ - Creates the directories for the mock keyfile. Does nothing in this class, - since no actual filesystem operations are needed. + Creates the directories for the mock keyfile. Does nothing in this class, since no actual filesystem operations are needed. """ pass def exists_on_device(self): """ - Returns True indicating that the mock keyfile exists on the device (although - it is not created on the actual file system). + Returns ``True`` indicating that the mock keyfile exists on the device (although it is not created on the actual file system). Returns: - bool: Always returns True for Mockkeyfile. + bool: Always returns ``True`` for Mockkeyfile. """ return True def is_readable(self): """ - Returns True indicating that the mock keyfile is readable (although it is not - read from the actual file system). + Returns ``True`` indicating that the mock keyfile is readable (although it is not read from the actual file system). Returns: - bool: Always returns True for Mockkeyfile. + bool: Always returns ``True`` for Mockkeyfile. """ return True def is_writable(self): """ - Returns True indicating that the mock keyfile is writable (although it is not - written to the actual file system). + Returns ``True`` indicating that the mock keyfile is writable (although it is not written to the actual file system). Returns: - bool: Always returns True for Mockkeyfile. + bool: Always returns ``True`` for Mockkeyfile. """ return True def is_encrypted(self): """ - Returns False indicating that the mock keyfile is not encrypted. + Returns ``False`` indicating that the mock keyfile is not encrypted. Returns: - bool: Always returns False for Mockkeyfile. + bool: Always returns ``False`` for Mockkeyfile. """ return False @@ -811,7 +833,7 @@ def encrypt(self, password=None): Raises a ValueError since encryption is not supported for the mock keyfile. Args: - password (str, optional): Ignored in this context. Defaults to None. + password (str, optional): Ignored in this context. Defaults to ``None``. Raises: ValueError: Always raises this exception for Mockkeyfile. @@ -823,7 +845,7 @@ def decrypt(self, password=None): Returns without doing anything since the mock keyfile is not encrypted. Args: - password (str, optional): Ignored in this context. Defaults to None. + password (str, optional): Ignored in this context. Defaults to ``None``. """ pass From 514a64cb9c2e21eb54bd98504433baf34af859a4 Mon Sep 17 00:00:00 2001 From: Raj Karamchedu Date: Thu, 25 Jan 2024 15:54:09 +0530 Subject: [PATCH 02/23] Seventh docstrings formatting PR --- bittensor/stream.py | 15 +- bittensor/subtensor.py | 383 ++++++++++++++++++++--------------------- 2 files changed, 195 insertions(+), 203 deletions(-) diff --git a/bittensor/stream.py b/bittensor/stream.py index 28374e42a0..d29c8940be 100644 --- a/bittensor/stream.py +++ b/bittensor/stream.py @@ -10,8 +10,8 @@ class BTStreamingResponseModel(BaseModel): """ - BTStreamingResponseModel is a Pydantic model that encapsulates the token streamer callable for Pydantic validation. - It is used within the StreamingSynapse class to create a BTStreamingResponse object, which is responsible for handling + :func:`BTStreamingResponseModel` is a Pydantic model that encapsulates the token streamer callable for Pydantic validation. + It is used within the :func:`StreamingSynapse` class to create a :func:`BTStreamingResponse` object, which is responsible for handling the streaming of tokens. The token streamer is a callable that takes a send function and returns an awaitable. It is responsible for generating @@ -31,7 +31,7 @@ class BTStreamingResponseModel(BaseModel): class StreamingSynapse(bittensor.Synapse, ABC): """ - The StreamingSynapse class is designed to be subclassed for handling streaming responses in the Bittensor network. + The :func:`StreamingSynapse` class is designed to be subclassed for handling streaming responses in the Bittensor network. It provides abstract methods that must be implemented by the subclass to deserialize, process streaming responses, and extract JSON data. It also includes a method to create a streaming response object. """ @@ -41,12 +41,12 @@ class Config: class BTStreamingResponse(_StreamingResponse): """ - BTStreamingResponse is a specialized subclass of the Starlette StreamingResponse designed to handle the streaming + :func:`BTStreamingResponse` is a specialized subclass of the Starlette StreamingResponse designed to handle the streaming of tokens within the Bittensor network. It is used internally by the StreamingSynapse class to manage the response streaming process, including sending headers and calling the token streamer provided by the subclass. This class is not intended to be directly instantiated or modified by developers subclassing StreamingSynapse. - Instead, it is used by the create_streaming_response method to create a response object based on the token streamer + Instead, it is used by the :func:`create_streaming_response` method to create a response object based on the token streamer provided by the subclass. """ @@ -55,8 +55,7 @@ def __init__(self, model: BTStreamingResponseModel, **kwargs): Initializes the BTStreamingResponse with the given token streamer model. Args: - model: A BTStreamingResponseModel instance containing the token streamer callable, which is responsible - for generating the content of the response. + model: A BTStreamingResponseModel instance containing the token streamer callable, which is responsible for generating the content of the response. **kwargs: Additional keyword arguments passed to the parent StreamingResponse class. """ super().__init__(content=iter(()), **kwargs) @@ -89,7 +88,7 @@ async def __call__(self, scope: Scope, receive: Receive, send: Send): application. This method is part of the ASGI interface and is called by the ASGI server to handle the request and send the - response. It delegates to the stream_response method to perform the actual streaming process. + response. It delegates to the :func:`stream_response` method to perform the actual streaming process. Args: scope: The scope of the request, containing information about the client, server, and request itself. diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 2d3c3c464c..137d416ce9 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -93,9 +93,9 @@ class ParamWithTypes(TypedDict): class subtensor: """ - The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, - facilitating a range of operations essential for the decentralized machine learning network. This class - enables neurons (network participants) to engage in activities such as registering on the network, managing + The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. + + This class enables neurons (network participants) to engage in activities such as registering on the network, managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. The Bittensor network operates on a digital ledger where each neuron holds stakes (S) and learns a set @@ -103,23 +103,22 @@ class subtensor: the ranking and incentive mechanisms within the network. Higher-ranked neurons, as determined by their contributions and trust within the network, receive more incentives. - The Subtensor class connects to various Bittensor networks like the main 'finney' network or local test + The Subtensor class connects to various Bittensor networks like the main ``finney`` network or local test networks, providing a gateway to the blockchain layer of Bittensor. It leverages a staked weighted trust system and consensus to ensure fair and distributed incentive mechanisms, where incentives (I) are - primarily allocated to neurons that are trusted by the majority of the network​``【oaicite:1】``​. + primarily allocated to neurons that are trusted by the majority of the network. Additionally, Bittensor introduces a speculation-based reward mechanism in the form of bonds (B), allowing neurons to accumulate bonds in other neurons, speculating on their future value. This mechanism aligns with market-based speculation, incentivizing neurons to make judicious decisions in their inter-neuronal investments. - Attributes: - network (str): The name of the Bittensor network (e.g., 'finney', 'test', 'archive', 'local') the instance - is connected to, determining the blockchain interaction context. - chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication - with the Bittensor blockchain for transaction processing and data retrieval. + Args: + network (str): The name of the Bittensor network (e.g., 'finney', 'test', 'archive', 'local') the instance is connected to, determining the blockchain interaction context. + chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication with the Bittensor blockchain for transaction processing and data retrieval. - Example Usage: + Example Usage:: + # Connect to the main Bittensor network (Finney). finney_subtensor = subtensor(network='finney') @@ -144,7 +143,7 @@ class subtensor: By facilitating these operations, the Subtensor class is instrumental in maintaining the decentralized intelligence and dynamic learning environment of the Bittensor network, as envisioned in its foundational - principles and mechanisms described in the NeurIPS paper. + principles and mechanisms described in the `NeurIPS paper `_. paper. """ @staticmethod @@ -205,16 +204,13 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @staticmethod def determine_chain_endpoint_and_network(network: str): """Determines the chain endpoint and network from the passed network or chain_endpoint. + Args: - network (str): The network flag. The likely choices are: - -- finney (main network) - -- archive (archive network +300 blocks) - -- local (local running network) - -- test (test network) + network (str): The network flag. The choices are: ``-- finney`` (main network), ``-- archive`` (archive network +300 blocks), ``-- local`` (local running network), ``-- test`` (test network). chain_endpoint (str): The chain endpoint flag. If set, overrides the network argument. Returns: - network (str): The network flag. The likely choices are: - chain_endpoint (str): The chain endpoint flag. If set, overrides the network argument. + network (str): The network flag. + chain_endpoint (str): The chain endpoint flag. If set, overrides the ``network`` argument. """ if network == None: return None, None @@ -314,21 +310,18 @@ def __init__( """ Initializes a Subtensor interface for interacting with the Bittensor blockchain. - NOTE: Currently subtensor defaults to the finney network. This will change in a future release. + NOTE: + Currently subtensor defaults to the ``finney`` network. This will change in a future release. We strongly encourage users to run their own local subtensor node whenever possible. This increases decentralization and resilience of the network. In a future release, local subtensor will become the - default and the fallback to finney removed. Please plan ahead for this change. We will provide detailed + default and the fallback to ``finney`` removed. Please plan ahead for this change. We will provide detailed instructions on how to run a local subtensor node in the documentation in a subsequent release. Args: - network (str, optional): The network name to connect to (e.g., 'finney', 'local'). This can also be - the chain endpoint (e.g. wss://entrypoint-finney.opentensor.ai:443) and will - be correctly parsed into the network and chain endpoint. If not specified, - defaults to the main Bittensor network. - config (bittensor.config, optional): Configuration object for the subtensor. - If not provided, a default configuration is used. - _mock (bool, optional): If set to True, uses a mocked connection for testing purposes. + network (str, optional): The network name to connect to (e.g., ``finney``, ``local``). This can also be the chain endpoint (e.g., ``wss://entrypoint-finney.opentensor.ai:443``) and will be correctly parsed into the network and chain endpoint. If not specified, defaults to the main Bittensor network. + config (bittensor.config, optional): Configuration object for the subtensor. If not provided, a default configuration is used. + _mock (bool, optional): If set to ``True``, uses a mocked connection for testing purposes. This initialization sets up the connection to the specified Bittensor network, allowing for various blockchain operations such as neuron registration, stake management, and setting weights. @@ -453,11 +446,11 @@ def nominate( Args: wallet (bittensor.wallet): The wallet containing the hotkey to be nominated. - wait_for_finalization (bool, optional): If True, waits until the transaction is finalized on the blockchain. - wait_for_inclusion (bool, optional): If True, waits until the transaction is included in a block. + wait_for_finalization (bool, optional): If ``True``, waits until the transaction is finalized on the blockchain. + wait_for_inclusion (bool, optional): If ``True``, waits until the transaction is included in a block. Returns: - bool: True if the nomination process is successful, False otherwise. + bool: ``True`` if the nomination process is successful, ``False`` otherwise. This function is a key part of the decentralized governance mechanism of Bittensor, allowing for the dynamic selection and participation of validators in the network's consensus process. @@ -485,11 +478,11 @@ def delegate( Args: wallet (bittensor.wallet): The wallet containing the hotkey to be nominated. - wait_for_finalization (bool, optional): If True, waits until the transaction is finalized on the blockchain. - wait_for_inclusion (bool, optional): If True, waits until the transaction is included in a block. + wait_for_finalization (bool, optional): If ``True``, waits until the transaction is finalized on the blockchain. + wait_for_inclusion (bool, optional): If ``True``, waits until the transaction is included in a block. Returns: - bool: True if the nomination process is successful, False otherwise. + bool: ``True`` if the nomination process is successful, False otherwise. This function is a key part of the decentralized governance mechanism of Bittensor, allowing for the dynamic selection and participation of validators in the network's consensus process. @@ -519,14 +512,14 @@ def undelegate( Args: wallet (bittensor.wallet): The wallet used for the undelegation process. - delegate_ss58 (Optional[str]): The SS58 address of the delegate neuron. + delegate_ss58 (Optional[str]): The ``SS58`` address of the delegate neuron. amount (Union[Balance, float]): The amount of TAO to undelegate. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the undelegation is successful, False otherwise. + bool: ``True`` if the undelegation is successful, False otherwise. This function reflects the dynamic and speculative nature of the Bittensor network, allowing neurons to adjust their stakes and investments based on changing perceptions and performances within the network. @@ -568,10 +561,10 @@ def set_weights( version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the setting of weights is successful, False otherwise. + bool: ``True`` if the setting of weights is successful, False otherwise. This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. @@ -685,7 +678,7 @@ def register( Other arguments: Various optional parameters to customize the registration process. Returns: - bool: True if the registration is successful, False otherwise. + bool: ``True`` if the registration is successful, False otherwise. This function facilitates the entry of new neurons into the network, supporting the decentralized growth and scalability of the Bittensor ecosystem. @@ -750,13 +743,13 @@ def run_faucet( Other arguments: Various optional parameters to customize the faucet transaction process. Returns: - bool: True if the faucet transaction is successful, False otherwise. + bool: ``True`` if the faucet transaction is successful, False otherwise. This function is part of Bittensor's onboarding process, ensuring that new neurons have the necessary resources to begin their journey in the decentralized AI network. - Note: This is for testnet ONLY and is disabled currently. You must build your own - staging subtensor chain with the `--features pow-faucet` argument to enable this. + Note: + This is for testnet ONLY and is disabled currently. You must build your own staging subtensor chain with the ``--features pow-faucet`` argument to enable this. """ return run_faucet_extrinsic( subtensor=self, @@ -791,10 +784,10 @@ def burned_register( netuid (int): The unique identifier of the subnet. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the registration is successful, False otherwise. + bool: ``True`` if the registration is successful, False otherwise. This function offers an alternative registration path, aligning with the network's principles of token circulation and value conservation. @@ -817,15 +810,16 @@ def _do_pow_register( wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: """Sends a (POW) register extrinsic to the chain. + Args: - netuid (int): the subnet to register on. - wallet (bittensor.wallet): the wallet to register. - pow_result (POWSolution): the pow result to register. - wait_for_inclusion (bool): if true, waits for the extrinsic to be included in a block. - wait_for_finalization (bool): if true, waits for the extrinsic to be finalized. + netuid (int): The subnet to register on. + wallet (bittensor.wallet): The wallet to register. + pow_result (POWSolution): The PoW result to register. + wait_for_inclusion (bool): If ``true``, waits for the extrinsic to be included in a block. + wait_for_finalization (bool): If ``true``, waits for the extrinsic to be finalized. Returns: - success (bool): True if the extrinsic was included in a block. - error (Optional[str]): None on success or not waiting for inclusion/finalization, otherwise the error message. + success (bool): ``True`` if the extrinsic was included in a block. + error (Optional[str]): ``None`` on success or not waiting for inclusion/finalization, otherwise the error message. """ @retry(delay=2, tries=3, backoff=2, max_delay=4) @@ -974,10 +968,10 @@ def transfer( amount (Union[Balance, float]): The amount of TAO to be transferred. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the transfer is successful, False otherwise. + bool: ``True`` if the transfer is successful, False otherwise. This function is essential for the fluid movement of tokens in the network, supporting various economic activities such as staking, delegation, and reward distribution. @@ -1002,9 +996,8 @@ def get_transfer_fee( Args: wallet (bittensor.wallet): The wallet from which the transfer is initiated. - dest (str): The SS58 address of the destination account. - value (Union[Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, - or in Tao (float) or Rao (int) units. + dest (str): The ``SS58`` address of the destination account. + value (Union[Balance, float, int]): The amount of tokens to be transferred, specified as a Balance object, or in Tao (float) or Rao (int) units. Returns: Balance: The estimated transaction fee for the transfer, represented as a Balance object. @@ -1049,17 +1042,17 @@ def _do_transfer( wait_for_finalization: bool = False, ) -> Tuple[bool, Optional[str], Optional[str]]: """Sends a transfer extrinsic to the chain. + Args: - wallet (:obj:`bittensor.wallet`): Wallet object. - dest (:obj:`str`): Destination public key address. - transfer_balance (:obj:`Balance`): Amount to transfer. - wait_for_inclusion (:obj:`bool`): If true, waits for inclusion. - wait_for_finalization (:obj:`bool`): If true, waits for finalization. + wallet (:func:`bittensor.wallet`): Wallet object. + dest (str): Destination public key address. + transfer_balance (:func:`Balance`): Amount to transfer. + wait_for_inclusion (bool): If ``true``, waits for inclusion. + wait_for_finalization (bool): If ``true``, waits for finalization. Returns: - success (:obj:`bool`): True if transfer was successful. - block_hash (:obj:`str`): Block hash of the transfer. - (On success and if wait_for_ finalization/inclusion is True) - error (:obj:`str`): Error message if transfer failed. + success (bool): ``True`` if transfer was successful. + block_hash (str): Block hash of the transfer. On success and if wait_for_ finalization/inclusion is ``True``. + error (str): Error message if transfer failed. """ @retry(delay=2, tries=3, backoff=2, max_delay=4) @@ -1099,11 +1092,10 @@ def get_existential_deposit(self, block: Optional[int] = None) -> Optional[Balan balances below this threshold can be reaped to conserve network resources. Args: - block (Optional[int], optional): Block number at which to query the deposit amount. If None, - the current block is used. + block (Optional[int], optional): Block number at which to query the deposit amount. If ``None``, the current block is used. Returns: - Optional[Balance]: The existential deposit amount, or None if the query fails. + Optional[Balance]: The existential deposit amount, or ``None`` if the query fails. The existential deposit is a fundamental economic parameter in the Bittensor network, ensuring efficient use of storage and preventing the proliferation of dust accounts. @@ -1136,10 +1128,10 @@ def register_subnetwork( wallet (bittensor.wallet): The wallet to be used for registration. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the subnetwork registration is successful, False otherwise. + bool: ``True`` if the subnetwork registration is successful, False otherwise. This function allows for the expansion and diversification of the Bittensor network, supporting its decentralized and adaptable architecture. @@ -1174,10 +1166,10 @@ def set_hyperparameter( value: The new value for the hyperparameter. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the hyperparameter setting is successful, False otherwise. + bool: ``True`` if the hyperparameter setting is successful, False otherwise. This function plays a critical role in the dynamic governance and adaptability of the Bittensor network, allowing for fine-tuning of network operations and characteristics. @@ -1223,10 +1215,10 @@ def serve( Other arguments: Placeholder parameters for future extensions. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the serve registration is successful, False otherwise. + bool: ``True`` if the serve registration is successful, False otherwise. This function is essential for establishing the neuron's presence in the network, enabling it to participate in the decentralized machine learning processes of Bittensor. @@ -1262,10 +1254,10 @@ def serve_axon( axon (bittensor.Axon): The Axon instance to be registered for serving. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the Axon serve registration is successful, False otherwise. + bool: ``True`` if the Axon serve registration is successful, False otherwise. By registering an Axon, the neuron becomes an active part of the network's distributed computing infrastructure, contributing to the collective intelligence of Bittensor. @@ -1352,13 +1344,13 @@ def _do_serve_prometheus( """ Sends a serve prometheus extrinsic to the chain. Args: - wallet (:obj:`bittensor.wallet`): Wallet object. - call_params (:obj:`PrometheusServeCallParams`): Prometheus serve call parameters. - wait_for_inclusion (:obj:`bool`): If true, waits for inclusion. - wait_for_finalization (:obj:`bool`): If true, waits for finalization. + wallet (:func:`bittensor.wallet`): Wallet object. + call_params (:func:`PrometheusServeCallParams`): Prometheus serve call parameters. + wait_for_inclusion (bool): If ``true``, waits for inclusion. + wait_for_finalization (bool): If ``true``, waits for finalization. Returns: - success (:obj:`bool`): True if serve prometheus was successful. - error (:obj:`Optional[str]`): Error message if serve prometheus failed, None otherwise. + success (bool): ``True`` if serve prometheus was successful. + error (:func:`Optional[str]`): Error message if serve prometheus failed, ``None`` otherwise. """ @retry(delay=2, tries=3, backoff=2, max_delay=4) @@ -1400,15 +1392,15 @@ def _do_associate_ips( Sends an associate IPs extrinsic to the chain. Args: - wallet (:obj:`bittensor.wallet`): Wallet object. - ip_info_list (:obj:`List[IPInfo]`): List of IPInfo objects. - netuid (:obj:`int`): Netuid to associate IPs to. - wait_for_inclusion (:obj:`bool`): If true, waits for inclusion. - wait_for_finalization (:obj:`bool`): If true, waits for finalization. + wallet (:func:`bittensor.wallet`): Wallet object. + ip_info_list (:func:`List[IPInfo]`): List of IPInfo objects. + netuid (int): Netuid to associate IPs to. + wait_for_inclusion (bool): If ``true``, waits for inclusion. + wait_for_finalization (bool): If ``true``, waits for finalization. Returns: - success (:obj:`bool`): True if associate IPs was successful. - error (:obj:`Optional[str]`): Error message if associate IPs failed, None otherwise. + success (bool): ``True`` if associate IPs was successful. + error (:func:`Optional[str]`): Error message if associate IPs failed, None otherwise. """ @retry(delay=2, tries=3, backoff=2, max_delay=4) @@ -1454,20 +1446,20 @@ def add_stake( prompt: bool = False, ) -> bool: """ - Adds the specified amount of stake to a neuron identified by the hotkey SS58 address. Staking + Adds the specified amount of stake to a neuron identified by the hotkey ``SS58`` address. Staking is a fundamental process in the Bittensor network that enables neurons to participate actively and earn incentives. Args: wallet (bittensor.wallet): The wallet to be used for staking. - hotkey_ss58 (Optional[str]): The SS58 address of the hotkey associated with the neuron. + hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey associated with the neuron. amount (Union[Balance, float]): The amount of TAO to stake. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the staking is successful, False otherwise. + bool: ``True`` if the staking is successful, False otherwise. This function enables neurons to increase their stake in the network, enhancing their influence and potential rewards in line with Bittensor's consensus and reward mechanisms. @@ -1497,14 +1489,14 @@ def add_stake_multiple( Args: wallet (bittensor.wallet): The wallet used for staking. - hotkey_ss58s (List[str]): List of SS58 addresses of hotkeys to stake to. + hotkey_ss58s (List[str]): List of ``SS58`` addresses of hotkeys to stake to. amounts (List[Union[Balance, float]], optional): Corresponding amounts of TAO to stake for each hotkey. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the staking is successful for all specified neurons, False otherwise. + bool: ``True`` if the staking is successful for all specified neurons, False otherwise. This function is essential for managing stakes across multiple neurons, reflecting the dynamic and collaborative nature of the Bittensor network. @@ -1528,14 +1520,15 @@ def _do_stake( wait_for_finalization: bool = False, ) -> bool: """Sends a stake extrinsic to the chain. + Args: - wallet (:obj:`bittensor.wallet`): Wallet object that can sign the extrinsic. - hotkey_ss58 (:obj:`str`): Hotkey ss58 address to stake to. - amount (:obj:`Balance`): Amount to stake. - wait_for_inclusion (:obj:`bool`): If true, waits for inclusion before returning. - wait_for_finalization (:obj:`bool`): If true, waits for finalization before returning. + wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. + hotkey_ss58 (str): Hotkey ``ss58`` address to stake to. + amount (:func:`Balance`): Amount to stake. + wait_for_inclusion (bool): If ``true``, waits for inclusion before returning. + wait_for_finalization (bool): If ``true``, waits for finalization before returning. Returns: - success (:obj:`bool`): True if the extrinsic was successful. + success (bool): ``True`` if the extrinsic was successful. Raises: StakeError: If the extrinsic failed. """ @@ -1586,15 +1579,14 @@ def unstake_multiple( Args: wallet (bittensor.wallet): The wallet linked to the coldkey from which the stakes are being withdrawn. - hotkey_ss58s (List[str]): A list of hotkey SS58 addresses to unstake from. - amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. - If not provided, unstakes all available stakes. + hotkey_ss58s (List[str]): A list of hotkey ``SS58`` addresses to unstake from. + amounts (List[Union[Balance, float]], optional): The amounts of TAO to unstake from each hotkey. If not provided, unstakes all available stakes. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the batch unstaking is successful, False otherwise. + bool: ``True`` if the batch unstaking is successful, False otherwise. This function allows for strategic reallocation or withdrawal of stakes, aligning with the dynamic stake management aspect of the Bittensor network. @@ -1624,14 +1616,14 @@ def unstake( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The SS58 address of the hotkey account to unstake from. + hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the unstaking process is successful, False otherwise. + bool: ``True`` if the unstaking process is successful, False otherwise. This function supports flexible stake management, allowing neurons to adjust their network participation and potential reward accruals. @@ -1655,14 +1647,15 @@ def _do_unstake( wait_for_finalization: bool = False, ) -> bool: """Sends an unstake extrinsic to the chain. + Args: - wallet (:obj:`bittensor.wallet`): Wallet object that can sign the extrinsic. - hotkey_ss58 (:obj:`str`): Hotkey ss58 address to unstake from. - amount (:obj:`Balance`): Amount to unstake. - wait_for_inclusion (:obj:`bool`): If true, waits for inclusion before returning. - wait_for_finalization (:obj:`bool`): If true, waits for finalization before returning. + wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. + hotkey_ss58 (str): Hotkey ``ss58`` address to unstake from. + amount (:func:`Balance`): Amount to unstake. + wait_for_inclusion (bool): If ``true``, waits for inclusion before returning. + wait_for_finalization (bool): If ``true``, waits for finalization before returning. Returns: - success (:obj:`bool`): True if the extrinsic was successful. + success (bool): ``True`` if the extrinsic was successful. Raises: StakeError: If the extrinsic failed. """ @@ -1712,14 +1705,14 @@ def register_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The SS58 address of the hotkey account to unstake from. + hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the unstaking process is successful, False otherwise. + bool: ``True`` if the unstaking process is successful, False otherwise. This function supports flexible stake management, allowing neurons to adjust their network participation and potential reward accruals. @@ -1741,14 +1734,14 @@ def leave_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The SS58 address of the hotkey account to unstake from. + hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the unstaking process is successful, False otherwise. + bool: ``True`` if the unstaking process is successful, False otherwise. This function supports flexible stake management, allowing neurons to adjust their network participation and potential reward accruals. @@ -1773,14 +1766,14 @@ def vote_senate( Args: wallet (bittensor.wallet): The wallet associated with the neuron from which the stake is being removed. - hotkey_ss58 (Optional[str]): The SS58 address of the hotkey account to unstake from. + hotkey_ss58 (Optional[str]): The ``SS58`` address of the hotkey account to unstake from. amount (Union[Balance, float], optional): The amount of TAO to unstake. If not specified, unstakes all. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the unstaking process is successful, False otherwise. + bool: ``True`` if the unstaking process is successful, False otherwise. This function supports flexible stake management, allowing neurons to adjust their network participation and potential reward accruals. @@ -1803,11 +1796,11 @@ def is_senate_member(self, hotkey_ss58: str, block: Optional[int] = None) -> boo approving various network operations and proposals. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number at which to check senate membership. Returns: - bool: True if the neuron is a senate member at the given block, False otherwise. + bool: ``True`` if the neuron is a senate member at the given block, False otherwise. This function is crucial for understanding the governance dynamics of the Bittensor network and for identifying the neurons that hold decision-making power within the network. @@ -1829,7 +1822,7 @@ def get_vote_data( block (Optional[int], optional): The blockchain block number to query the voting data. Returns: - Optional[ProposalVoteData]: An object containing the proposal's voting data, or None if not found. + Optional[ProposalVoteData]: An object containing the proposal's voting data, or ``None`` if not found. This function is important for tracking and understanding the decision-making processes within the Bittensor network, particularly how proposals are received and acted upon by the governing body. @@ -1850,7 +1843,7 @@ def get_senate_members(self, block: Optional[int] = None) -> Optional[List[str]] block (Optional[int], optional): The blockchain block number at which to retrieve the senate members. Returns: - Optional[List[str]]: A list of SS58 addresses of current senate members, or None if not available. + Optional[List[str]]: A list of ``SS58`` addresses of current senate members, or ``None`` if not available. Understanding the composition of the senate is key to grasping the governance structure and decision-making authority within the Bittensor network. @@ -1871,7 +1864,7 @@ def get_proposal_call_data( block (Optional[int], optional): The blockchain block number at which to query the proposal call data. Returns: - Optional[bittensor.ProposalCallData]: An object containing the proposal's call data, or None if not found. + Optional[bittensor.ProposalCallData]: An object containing the proposal's call data, or ``None`` if not found. This function is crucial for analyzing the types of proposals made within the network and the specific changes or actions they intend to implement or address. @@ -1891,7 +1884,7 @@ def get_proposal_hashes(self, block: Optional[int] = None) -> Optional[List[str] block (Optional[int], optional): The blockchain block number to query the proposal hashes. Returns: - Optional[List[str]]: A list of proposal hashes, or None if not available. + Optional[List[str]]: A list of proposal hashes, or ``None`` if not available. This function enables tracking and reviewing the proposals made in the network, offering insights into the active governance and decision-making processes. @@ -1917,7 +1910,7 @@ def get_proposals( Returns: Optional[Dict[str, Tuple[bittensor.ProposalCallData, bittensor.ProposalVoteData]]]: - A dictionary mapping proposal hashes to their corresponding call and vote data, or None if not available. + A dictionary mapping proposal hashes to their corresponding call and vote data, or ``None`` if not available. This function is integral for analyzing the governance activity on the Bittensor network, providing a holistic view of the proposals and their impact or potential changes within the network. @@ -1952,10 +1945,10 @@ def root_register( wallet (bittensor.wallet): The wallet associated with the neuron to be registered on the root network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the registration on the root network is successful, False otherwise. + bool: ``True`` if the registration on the root network is successful, False otherwise. This function enables neurons to engage in the most critical and influential aspects of the network's governance, signifying a high level of commitment and responsibility in the Bittensor ecosystem. @@ -2027,10 +2020,10 @@ def root_set_weights( version_key (int, optional): Version key for compatibility with the network. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. - prompt (bool, optional): If True, prompts for user confirmation before proceeding. + prompt (bool, optional): If ``True``, prompts for user confirmation before proceeding. Returns: - bool: True if the setting of root-level weights is successful, False otherwise. + bool: ``True`` if the setting of root-level weights is successful, False otherwise. This function plays a pivotal role in shaping the root network's collective intelligence and decision-making processes, reflecting the principles of decentralized governance and collaborative learning in Bittensor. @@ -2062,14 +2055,15 @@ def query_identity( detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. - NOTE: See the bittensor cli documentation for supported identity parameters. + NOTE: + See the `Bittensor CLI documentation `_ for supported identity parameters. Args: - key (str): The key used to query the neuron's identity, typically the neuron's SS58 address. + key (str): The key used to query the neuron's identity, typically the neuron's ``SS58`` address. block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - Optional[object]: An object containing the identity information of the neuron if found, None otherwise. + Optional[object]: An object containing the identity information of the neuron if found, ``None`` otherwise. The identity information can include various attributes such as the neuron's stake, rank, and other network-specific details, providing insights into the neuron's role and status within the Bittensor network. @@ -2104,17 +2098,18 @@ def update_identity( Updates the identity of a neuron on the Bittensor blockchain. This function allows neurons to modify their identity attributes, reflecting changes in their roles, stakes, or other network-specific parameters. - NOTE: See the bittensor cli documentation for supported identity parameters. + NOTE: + See the `Bittensor CLI documentation `_ for supported identity parameters. Args: wallet (bittensor.wallet): The wallet associated with the neuron whose identity is being updated. - identified (str, optional): The identified SS58 address of the neuron. Defaults to the wallet's coldkey address. + identified (str, optional): The identified ``SS58`` address of the neuron. Defaults to the wallet's coldkey address. params (dict, optional): A dictionary of parameters to update in the neuron's identity. wait_for_inclusion (bool, optional): Waits for the transaction to be included in a block. wait_for_finalization (bool, optional): Waits for the transaction to be finalized on the blockchain. Returns: - bool: True if the identity update is successful, False otherwise. + bool: ``True`` if the identity update is successful, False otherwise. This function plays a vital role in maintaining the accuracy and currency of neuron identities in the Bittensor network, ensuring that the network's governance and consensus mechanisms operate effectively. @@ -2189,7 +2184,7 @@ def query_subtensor( params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, None otherwise. + Optional[object]: An object containing the requested data if found, ``None`` otherwise. This query function is essential for accessing detailed information about the network and its neurons, providing valuable insights into the state and dynamics of the Bittensor ecosystem. @@ -2227,7 +2222,7 @@ def query_map_subtensor( params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - QueryMapResult: An object containing the map-like data structure, or None if not found. + QueryMapResult: An object containing the map-like data structure, or ``None`` if not found. This function is particularly useful for analyzing and understanding complex network structures and relationships within the Bittensor ecosystem, such as inter-neuronal connections and stake distributions. @@ -2261,7 +2256,7 @@ def query_constant( block (Optional[int], optional): The blockchain block number at which to query the constant. Returns: - Optional[object]: The value of the constant if found, None otherwise. + Optional[object]: The value of the constant if found, ``None`` otherwise. Constants queried through this function can include critical network parameters such as inflation rates, consensus rules, or validation thresholds, providing a deeper understanding of the Bittensor network's @@ -2302,7 +2297,7 @@ def query_module( params (Optional[List[object]], optional): A list of parameters to pass to the query function. Returns: - Optional[object]: An object containing the requested data if found, None otherwise. + Optional[object]: An object containing the requested data if found, ``None`` otherwise. This versatile query function is key to accessing a wide range of data and insights from different parts of the Bittensor blockchain, enhancing the understanding and analysis of the network's state and dynamics. @@ -2342,7 +2337,7 @@ def query_map( params (Optional[List[object]], optional): Parameters to be passed to the query. Returns: - Optional[object]: A data structure representing the map storage if found, None otherwise. + Optional[object]: A data structure representing the map storage if found, ``None`` otherwise. This function is particularly useful for retrieving detailed and structured data from various blockchain modules, offering insights into the network's state and the relationships between its different components. @@ -2378,7 +2373,7 @@ def state_call( block (Optional[int], optional): The blockchain block number at which to perform the state call. Returns: - Optional[object]: The result of the state call if successful, None otherwise. + Optional[object]: The result of the state call if successful, ``None`` otherwise. The state call function provides a more direct and flexible way of querying blockchain data, useful for specific use cases where standard queries are insufficient. @@ -2414,7 +2409,7 @@ def query_runtime_api( block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - Optional[bytes]: The Scale Bytes encoded result from the runtime API call, or None if the call fails. + Optional[bytes]: The Scale Bytes encoded result from the runtime API call, or ``None`` if the call fails. This function enables access to the deeper layers of the Bittensor blockchain, allowing for detailed and specific interactions with the network's runtime environment. @@ -2476,25 +2471,25 @@ def _encode_params( def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: """ - Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. - 'Rho' represents the global inflation rate, which directly influences the network's + Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the global inflation rate, which directly influences the network's token emission rate and economic model. - Note: This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. + Note: + This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. Args: netuid (int): The unique identifier of the subnet. block (Optional[int], optional): The blockchain block number at which to query the parameter. Returns: - Optional[int]: The value of the 'Rho' hyperparameter if the subnet exists, None otherwise. + Optional[int]: The value of the 'Rho' hyperparameter if the subnet exists, ``None`` otherwise. Mathematical Context: Rho (p) is calculated based on the network's target inflation and actual neuron staking. It adjusts the emission rate of the TAO token to balance the network's economy and dynamics. The formula for Rho is defined as: p = (Staking_Target / Staking_Actual) * Inflation_Target. Here, Staking_Target and Staking_Actual represent the desired and actual total stakes in the network, - while Inflation_Target is the predefined inflation rate goal​``【oaicite:0】``​. + while Inflation_Target is the predefined inflation rate goal. 'Rho' is essential for understanding the network's economic dynamics, affecting the reward distribution and incentive structures across the network's neurons. @@ -2521,7 +2516,7 @@ def kappa(self, netuid: int, block: Optional[int] = None) -> Optional[float]: It is derived from the softmax function applied to the inter-neuronal weights set by each neuron. The formula for Kappa is: κ_i = exp(w_i) / Σ(exp(w_j)), where w_i represents the weight set by neuron i, and the denominator is the sum of exponential weights set by all neurons. - This mechanism ensures a normalized and probabilistic distribution of ranks based on relative weights​``【oaicite:0】``​. + This mechanism ensures a normalized and probabilistic distribution of ranks based on relative weights. Understanding 'Kappa' is crucial for analyzing stake dynamics and the consensus mechanism within the network, as it plays a significant role in neuron ranking and incentive allocation processes. @@ -2543,7 +2538,7 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, None otherwise. + Optional[int]: The value of the 'Difficulty' hyperparameter if the subnet exists, ``None`` otherwise. The 'Difficulty' parameter directly impacts the network's security and integrity by setting the computational effort required for validating transactions and participating in the network's consensus mechanism. @@ -2585,7 +2580,7 @@ def immunity_period( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, None otherwise. + Optional[int]: The value of the 'ImmunityPeriod' hyperparameter if the subnet exists, ``None`` otherwise. The 'ImmunityPeriod' is a critical aspect of the network's governance system, ensuring that new participants have a grace period to establish themselves and contribute to the network without facing @@ -2868,7 +2863,7 @@ def serving_rate_limit( block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - Optional[int]: The serving rate limit of the subnet if it exists, None otherwise. + Optional[int]: The serving rate limit of the subnet if it exists, ``None`` otherwise. The serving rate limit is a crucial parameter for maintaining network efficiency and preventing overuse of resources by individual neurons. It helps ensure a balanced distribution of service @@ -2910,7 +2905,7 @@ def subnet_exists(self, netuid: int, block: Optional[int] = None) -> bool: block (Optional[int], optional): The blockchain block number at which to check the subnet's existence. Returns: - bool: True if the subnet exists, False otherwise. + bool: ``True`` if the subnet exists, False otherwise. This function is critical for verifying the presence of specific subnets in the network, enabling a deeper understanding of the network's structure and composition. @@ -3080,7 +3075,7 @@ def get_subnet_info( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[SubnetInfo]: Detailed information about the subnet, or None if not found. + Optional[SubnetInfo]: Detailed information about the subnet, or ``None`` if not found. This function is essential for neurons and stakeholders interested in the specifics of a particular subnet, including its governance, performance, and role within the broader network. @@ -3118,7 +3113,7 @@ def get_subnet_hyperparameters( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[SubnetHyperparameters]: The subnet's hyperparameters, or None if not available. + Optional[SubnetHyperparameters]: The subnet's hyperparameters, or ``None`` if not available. Understanding the hyperparameters is crucial for comprehending how subnets are configured and managed, and how they interact with the network's consensus and incentive mechanisms. @@ -3152,7 +3147,7 @@ def get_subnet_owner( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[str]: The SS58 address of the subnet's owner, or None if not available. + Optional[str]: The SS58 address of the subnet's owner, or ``None`` if not available. Knowing the subnet owner provides insights into the governance and operational control of the subnet, which can be important for decision-making and collaboration within the network. @@ -3172,7 +3167,7 @@ def is_hotkey_delegate(self, hotkey_ss58: str, block: Optional[int] = None) -> b block (Optional[int], optional): The blockchain block number for the query. Returns: - bool: True if the hotkey is a delegate, False otherwise. + bool: ``True`` if the hotkey is a delegate, ``False`` otherwise. Being a delegate is a significant status within the Bittensor network, indicating a neuron's involvement in consensus and governance processes. @@ -3189,7 +3184,7 @@ def get_delegate_take( represents the percentage of rewards that the delegate claims from its nominators' stakes. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3210,7 +3205,7 @@ def get_nominators_for_hotkey( Nominators are neurons that stake their tokens on a delegate to support its operations. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3233,11 +3228,11 @@ def get_delegate_by_hotkey( a comprehensive view of the delegate's status, including its stakes, nominators, and reward distribution. Args: - hotkey_ss58 (str): The SS58 address of the delegate's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the delegate's hotkey. block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[DelegateInfo]: Detailed information about the delegate neuron, None if not found. + Optional[DelegateInfo]: Detailed information about the delegate neuron, ``None`` if not found. This function is essential for understanding the roles and influence of delegate neurons within the Bittensor network's consensus and governance structures. @@ -3307,7 +3302,7 @@ def get_delegated( identifies the delegates that a specific account has staked tokens on. Args: - coldkey_ss58 (str): The SS58 address of the account's coldkey. + coldkey_ss58 (str): The ``SS58`` address of the account's coldkey. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3350,7 +3345,7 @@ def get_stake_info_for_coldkey( about the stakes held by an account, including the staked amounts and associated delegates. Args: - coldkey_ss58 (str): The SS58 address of the account's coldkey. + coldkey_ss58 (str): The ``SS58`` address of the account's coldkey. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3386,7 +3381,7 @@ def get_stake_info_for_coldkeys( accounts, providing a collective view of their stakes and delegations. Args: - coldkey_ss58_list (List[str]): A list of SS58 addresses of the accounts' coldkeys. + coldkey_ss58_list (List[str]): A list of ``SS58`` addresses of the accounts' coldkeys. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3427,11 +3422,11 @@ def is_hotkey_registered_any( Checks if a neuron's hotkey is registered on any subnet within the Bittensor network. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number at which to perform the check. Returns: - bool: True if the hotkey is registered on any subnet, False otherwise. + bool: ``True`` if the hotkey is registered on any subnet, False otherwise. This function is essential for determining the network-wide presence and participation of a neuron. """ @@ -3444,12 +3439,12 @@ def is_hotkey_registered_on_subnet( Checks if a neuron's hotkey is registered on a specific subnet within the Bittensor network. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. block (Optional[int], optional): The blockchain block number at which to perform the check. Returns: - bool: True if the hotkey is registered on the specified subnet, False otherwise. + bool: ``True`` if the hotkey is registered on the specified subnet, False otherwise. This function helps in assessing the participation of a neuron in a particular subnet, indicating its specific area of operation or influence within the network. @@ -3470,13 +3465,11 @@ def is_hotkey_registered( Args: hotkey_ss58 (str): The SS58 address of the neuron's hotkey. - netuid (Optional[int], optional): The unique identifier of the subnet to check the registration. - If None, the registration is checked across all subnets. + netuid (Optional[int], optional): The unique identifier of the subnet to check the registration. If ``None``, the registration is checked across all subnets. block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - bool: True if the hotkey is registered in the specified context (either any subnet or a specific subnet), - False otherwise. + bool: ``True`` if the hotkey is registered in the specified context (either any subnet or a specific subnet), ``False`` otherwise. This function is important for verifying the active status of neurons in the Bittensor network. It aids in understanding whether a neuron is eligible to participate in network processes such as consensus, @@ -3494,12 +3487,12 @@ def get_uid_for_hotkey_on_subnet( Retrieves the unique identifier (UID) for a neuron's hotkey on a specific subnet. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[int]: The UID of the neuron if it is registered on the subnet, None otherwise. + Optional[int]: The UID of the neuron if it is registered on the subnet, ``None`` otherwise. The UID is a critical identifier within the network, linking the neuron's hotkey to its operational and governance activities on a particular subnet. @@ -3515,7 +3508,7 @@ def get_all_uids_for_hotkey( linked to a specific hotkey. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: @@ -3538,7 +3531,7 @@ def get_netuids_for_hotkey( the hotkey is active. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: @@ -3556,12 +3549,12 @@ def get_neuron_for_pubkey_and_subnet( the Bittensor network. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. netuid (int): The unique identifier of the subnet. block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - Optional[NeuronInfo]: Detailed information about the neuron if found, None otherwise. + Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. This function is crucial for accessing specific neuron data and understanding its status, stake, and other attributes within a particular subnet of the Bittensor ecosystem. @@ -3576,12 +3569,12 @@ def get_all_neurons_for_pubkey( self, hotkey_ss58: str, block: Optional[int] = None ) -> List[NeuronInfo]: """ - Retrieves information about all neuron instances associated with a given public key (hotkey SS58 + Retrieves information about all neuron instances associated with a given public key (hotkey ``SS58`` address) across different subnets of the Bittensor network. This function aggregates neuron data from various subnets to provide a comprehensive view of a neuron's presence and status within the network. Args: - hotkey_ss58 (str): The SS58 address of the neuron's hotkey. + hotkey_ss58 (str): The ``SS58`` address of the neuron's hotkey. block (Optional[int], optional): The blockchain block number for the query. Returns: @@ -3608,7 +3601,7 @@ def neuron_has_validator_permit( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[bool]: True if the neuron has a validator permit, False otherwise. + Optional[bool]: ``True`` if the neuron has a validator permit, False otherwise. This function is essential for understanding a neuron's role and capabilities within a specific subnet, particularly regarding its involvement in network validation and governance. @@ -3629,7 +3622,7 @@ def neuron_for_wallet( block (Optional[int], optional): The blockchain block number at which to perform the query. Returns: - Optional[NeuronInfo]: Detailed information about the neuron if found, None otherwise. + Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. This function is important for wallet owners to understand and manage their neuron's presence and activities within a particular subnet of the Bittensor network. @@ -3652,7 +3645,7 @@ def neuron_for_uid( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[NeuronInfo]: Detailed information about the neuron if found, None otherwise. + Optional[NeuronInfo]: Detailed information about the neuron if found, ``None`` otherwise. This function is crucial for analyzing individual neurons' contributions and status within a specific subnet, offering insights into their roles in the network's consensus and validation mechanisms. @@ -3724,7 +3717,7 @@ def neuron_for_uid_lite( block (Optional[int], optional): The blockchain block number for the query. Returns: - Optional[NeuronInfoLite]: A simplified version of neuron information if found, None otherwise. + Optional[NeuronInfoLite]: A simplified version of neuron information if found, ``None`` otherwise. This function is useful for quick and efficient analyses of neuron status and activities within a subnet without the need for comprehensive data retrieval. @@ -3800,7 +3793,7 @@ def metagraph( Args: netuid (int): The network UID of the subnet to query. lite (bool, default=True): If true, returns a metagraph using a lightweight sync (no weights, no bonds). - block (Optional[int]): Block number for synchronization, or None for the latest block. + block (Optional[int]): Block number for synchronization, or ``None`` for the latest block. Returns: bittensor.Metagraph: The metagraph representing the subnet's structure and neuron relationships. @@ -3911,7 +3904,7 @@ def associated_validator_ip_info( block (Optional[int]): The blockchain block number for the query. Returns: - Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or None if no validators are associated. + Optional[List[IPInfo]]: A list of IPInfo objects for validator nodes in the subnet, or ``None`` if no validators are associated. Validator IP information is key for establishing secure and reliable connections within the network, facilitating consensus and validation processes critical for the network's integrity and performance. @@ -4079,7 +4072,7 @@ def get_balance(self, address: str, block: int = None) -> Balance: the blockchain to determine the amount of Tao held by a given account. Args: - address (str): The Substrate address in ss58 format. + address (str): The Substrate address in ``ss58`` format. block (int, optional): The blockchain block number at which to perform the query. Returns: @@ -4138,7 +4131,7 @@ def get_balances(self, block: int = None) -> Dict[str, Balance]: block (int, optional): The blockchain block number at which to perform the query. Returns: - Dict[str, Balance]: A dictionary mapping each account's ss58 address to its balance. + Dict[str, Balance]: A dictionary mapping each account's ``ss58`` address to its balance. This function is valuable for analyzing the overall economic landscape of the Bittensor network, including the distribution of financial resources and the financial status of network participants. From 90e17981bdf4950158652be553512fd4dabe11af Mon Sep 17 00:00:00 2001 From: Raj Karamchedu Date: Fri, 26 Jan 2024 15:38:37 +0530 Subject: [PATCH 03/23] Eighth (final) docstrings formatting PR --- bittensor/synapse.py | 138 ++++++++++++++++++++++------------------ bittensor/tensor.py | 4 +- bittensor/threadpool.py | 12 ++-- 3 files changed, 84 insertions(+), 70 deletions(-) diff --git a/bittensor/synapse.py b/bittensor/synapse.py index f5f3a75b8a..d6d9551ca0 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -64,16 +64,15 @@ def get_size(obj, seen=None) -> int: def cast_int(raw: str) -> int: """ - Converts a string to an integer, if the string is not None. + Converts a string to an integer, if the string is not ``None``. - This function attempts to convert a string to an integer. If the string is None, - it simply returns None. + This function attempts to convert a string to an integer. If the string is ``None``, it simply returns ``None``. Args: raw (str): The string to convert. Returns: - int or None: The converted integer, or None if the input was None. + int or None: The converted integer, or ``None`` if the input was ``None``. """ return int(raw) if raw != None else raw @@ -81,16 +80,15 @@ def cast_int(raw: str) -> int: def cast_float(raw: str) -> float: """ - Converts a string to a float, if the string is not None. + Converts a string to a float, if the string is not ``None``. - This function attempts to convert a string to a float. If the string is None, - it simply returns None. + This function attempts to convert a string to a float. If the string is ``None``, it simply returns ``None``. Args: raw (str): The string to convert. Returns: - float or None: The converted float, or None if the input was None. + float or None: The converted float, or ``None`` if the input was ``None``. """ return float(raw) if raw != None else raw @@ -98,9 +96,10 @@ def cast_float(raw: str) -> float: class TerminalInfo(pydantic.BaseModel): """ - TerminalInfo is a crucial class in the Bittensor framework, encapsulating detailed information about - a network synapse (node) involved in a communication process. This class serves as a metadata carrier, - providing essential details about the state and configuration of a terminal during network interactions. + TerminalInfo encapsulates detailed information about a network synapse (node) involved in a communication process. + + This class serves as a metadata carrier, + providing essential details about the state and configuration of a terminal during network interactions. This is a crucial class in the Bittensor framework. The TerminalInfo class contains information such as HTTP status codes and messages, processing times, IP addresses, ports, Bittensor version numbers, and unique identifiers. These details are vital for @@ -110,7 +109,7 @@ class TerminalInfo(pydantic.BaseModel): designed to be used natively within Synapses, so that you will not need to call this directly, but rather is used as a helper class for Synapses. - Attributes: + Args: status_code (int): HTTP status code indicating the result of a network request. Essential for identifying the outcome of network interactions. status_message (str): Descriptive message associated with the status code, providing additional context about the request's result. process_time (float): Time taken by the terminal to process the call, important for performance monitoring and optimization. @@ -122,7 +121,8 @@ class TerminalInfo(pydantic.BaseModel): hotkey (str): Encoded hotkey string of the terminal wallet, important for transaction and identity verification in the network. signature (str): Digital signature verifying the tuple of nonce, axon_hotkey, dendrite_hotkey, and uuid, critical for ensuring data authenticity and security. - Usage: + Usage:: + # Creating a TerminalInfo instance terminal_info = TerminalInfo( status_code=200, @@ -254,6 +254,7 @@ class Config: class Synapse(pydantic.BaseModel): """ Represents a Synapse in the Bittensor network, serving as a communication schema between neurons (nodes). + Synapses ensure the format and correctness of transmission tensors according to the Bittensor protocol. Each Synapse type is tailored for a specific machine learning (ML) task, following unique compression and communication processes. This helps maintain sanitized, correct, and useful information flow across the network. @@ -263,7 +264,7 @@ class Synapse(pydantic.BaseModel): computation, ensuring secure and efficient data exchange in the network. The class includes Pydantic validators and root validators to enforce data integrity and format. Additionally, - properties like 'is_success', 'is_failure', 'is_timeout', etc., provide convenient status checks based on + properties like ``is_success``, ``is_failure``, ``is_timeout``, etc., provide convenient status checks based on dendrite responses. Think of Bittensor Synapses as glorified pydantic wrappers that have been designed to be used in a distributed @@ -272,26 +273,26 @@ class Synapse(pydantic.BaseModel): Key Features: - 1. HTTP Route Name (`name` attribute): + 1. HTTP Route Name (``name`` attribute): Enables the identification and proper routing of requests within the network. Essential for users defining custom routes for specific machine learning tasks. - 2. Query Timeout (`timeout` attribute): + 2. Query Timeout (``timeout`` attribute): Determines the maximum duration allowed for a query, ensuring timely responses and network efficiency. Crucial for users to manage network latency and response times, particularly in time-sensitive applications. - 3. Request Sizes (`total_size`, `header_size` attributes): + 3. Request Sizes (``total_size``, ``header_size`` attributes): Keeps track of the size of request bodies and headers, ensuring efficient data transmission without overloading the network. Important for users to monitor and optimize the data payload, especially in bandwidth-constrained environments. - 4. Terminal Information (`dendrite`, `axon` attributes): + 4. Terminal Information (``dendrite``, ``axon`` attributes): Stores information about the dendrite (receiving end) and axon (sending end), facilitating communication between nodes. Users can access detailed information about the communication endpoints, aiding in debugging and network analysis. - 5. Body Hash Computation (`computed_body_hash`, `required_hash_fields`): + 5. Body Hash Computation (``computed_body_hash``, ``required_hash_fields``): Ensures data integrity and security by computing hashes of transmitted data. Provides users with a mechanism to verify data integrity and detect any tampering during transmission. @@ -299,12 +300,13 @@ class Synapse(pydantic.BaseModel): Facilitates the conversion of Synapse objects to and from a format suitable for network transmission. Essential for users who need to customize data formats for specific machine learning models or tasks. - 7. Status Check Properties (`is_success`, `is_failure`, `is_timeout`, etc.): + 7. Status Check Properties (``is_success``, ``is_failure``, ``is_timeout``, etc.): Provides quick and easy methods to check the status of a request, improving error handling and response management. Users can efficiently handle different outcomes of network requests, enhancing the robustness of their applications. - Example usage: + Example usage:: + # Creating a Synapse instance with default values synapse = Synapse() @@ -334,8 +336,8 @@ class Synapse(pydantic.BaseModel): print(synapse.axon.status_code) synapse.axon.status_code = 408 # Timeout - Attributes: - name (str): HTTP route name, set on axon.attach. + Args: + name (str): HTTP route name, set on :func:`axon.attach`. timeout (float): Total query length, set by the dendrite terminal. total_size (int): Total size of request body in bytes. header_size (int): Size of request header in bytes. @@ -346,7 +348,7 @@ class Synapse(pydantic.BaseModel): Methods: deserialize: Custom deserialization logic for subclasses. - __setattr__: Override method to make 'required_hash_fields' read-only. + __setattr__: Override method to make ``required_hash_fields`` read-only. get_total_size: Calculates and returns the total size of the object. to_headers: Constructs a dictionary of headers from instance properties. body_hash: Computes a SHA3-256 hash of the serialized body. @@ -374,7 +376,8 @@ def deserialize(self) -> "Synapse": In its default form, this method simply returns the instance of the Synapse itself without any modifications. Subclasses of Synapse can override this method to add specific deserialization behaviors, such as converting serialized data back into complex object types or performing additional data integrity checks. - Example: + Example:: + class CustomSynapse(Synapse): additional_data: str @@ -490,9 +493,9 @@ def set_name_type(cls, values) -> dict: def __setattr__(self, name: str, value: Any): """ - Override the __setattr__ method to make the `required_hash_fields` property read-only. + Override the :func:`__setattr__` method to make the ``required_hash_fields`` property read-only. - This is a security mechanism such that the `required_hash_fields` property cannot be + This is a security mechanism such that the ``required_hash_fields`` property cannot be overridden by the user or malicious code. """ if name == "body_hash": @@ -506,7 +509,7 @@ def get_total_size(self) -> int: Get the total size of the current object. This method first calculates the size of the current object, then assigns it - to the instance variable self.total_size and finally returns this value. + to the instance variable :func:`self.total_size` and finally returns this value. Returns: int: The total size of the current object. @@ -519,11 +522,11 @@ def is_success(self) -> bool: """ Checks if the dendrite's status code indicates success. - This method returns True if the status code of the dendrite is 200, + This method returns ``True`` if the status code of the dendrite is ``200``, which typically represents a successful HTTP request. Returns: - bool: True if dendrite's status code is 200, False otherwise. + bool: ``True`` if dendrite's status code is ``200``, ``False`` otherwise. """ return self.dendrite.status_code == 200 @@ -532,11 +535,11 @@ def is_failure(self) -> bool: """ Checks if the dendrite's status code indicates failure. - This method returns True if the status code of the dendrite is not 200, + This method returns ``True`` if the status code of the dendrite is not ``200``, which would mean the HTTP request was not successful. Returns: - bool: True if dendrite's status code is not 200, False otherwise. + bool: ``True`` if dendrite's status code is not ``200``, ``False`` otherwise. """ return self.dendrite.status_code != 200 @@ -545,11 +548,11 @@ def is_timeout(self) -> bool: """ Checks if the dendrite's status code indicates a timeout. - This method returns True if the status code of the dendrite is 408, + This method returns ``True`` if the status code of the dendrite is ``408``, which is the HTTP status code for a request timeout. Returns: - bool: True if dendrite's status code is 408, False otherwise. + bool: ``True`` if dendrite's status code is ``408``, ``False`` otherwise. """ return self.dendrite.status_code == 408 @@ -558,11 +561,11 @@ def is_blacklist(self) -> bool: """ Checks if the dendrite's status code indicates a blacklisted request. - This method returns True if the status code of the dendrite is 403, + This method returns ``True`` if the status code of the dendrite is ``403``, which is the HTTP status code for a forbidden request. Returns: - bool: True if dendrite's status code is 403, False otherwise. + bool: ``True`` if dendrite's status code is ``403``, ``False`` otherwise. """ return self.dendrite.status_code == 403 @@ -571,31 +574,31 @@ def failed_verification(self) -> bool: """ Checks if the dendrite's status code indicates failed verification. - This method returns True if the status code of the dendrite is 401, + This method returns ``True`` if the status code of the dendrite is ``401``, which is the HTTP status code for unauthorized access. Returns: - bool: True if dendrite's status code is 401, False otherwise. + bool: ``True`` if dendrite's status code is ``401``, ``False`` otherwise. """ return self.dendrite.status_code == 401 def to_headers(self) -> dict: """ - Converts the state of a Synapse instance into a dictionary of HTTP headers. This method is essential for + Converts the state of a Synapse instance into a dictionary of HTTP headers. + + This method is essential for packaging Synapse data for network transmission in the Bittensor framework, ensuring that each key aspect of the Synapse is represented in a format suitable for HTTP communication. Process: - 1. Basic Information: It starts by including the 'name' and 'timeout' of the Synapse, which are fundamental - for identifying the query and managing its lifespan on the network. - 2. Complex Objects: The method serializes the 'axon' and 'dendrite' objects, if present, into strings. This - serialization is crucial for preserving the state and structure of these objects over the network. - 3. Encoding: Non-optional complex objects are serialized and encoded in base64, making them safe for HTTP - transport. - 4. Size Metrics: The method calculates and adds the size of headers and the total object size, providing - valuable information for network bandwidth management. - - Example Usage: + + 1. Basic Information: It starts by including the ``name`` and ``timeout`` of the Synapse, which are fundamental for identifying the query and managing its lifespan on the network. + 2. Complex Objects: The method serializes the ``axon`` and ``dendrite`` objects, if present, into strings. This serialization is crucial for preserving the state and structure of these objects over the network. + 3. Encoding: Non-optional complex objects are serialized and encoded in base64, making them safe for HTTP transport. + 4. Size Metrics: The method calculates and adds the size of headers and the total object size, providing valuable information for network bandwidth management. + + Example Usage:: + synapse = Synapse(name="ExampleSynapse", timeout=30) headers = synapse.to_headers() # headers now contains a dictionary representing the Synapse instance @@ -662,16 +665,20 @@ def to_headers(self) -> dict: @property def body_hash(self) -> str: """ - Computes a SHA3-256 hash of the serialized body of the Synapse instance. This hash is used to + Computes a SHA3-256 hash of the serialized body of the Synapse instance. + + This hash is used to ensure the data integrity and security of the Synapse instance when it's transmitted across the - network. It's a crucial feature for verifying that the data received is the same as the data sent. + network. It is a crucial feature for verifying that the data received is the same as the data sent. Process: - 1. Iterates over each required field as specified in `required_fields_hash`. + + 1. Iterates over each required field as specified in ``required_fields_hash``. 2. Concatenates the string representation of these fields. 3. Applies SHA3-256 hashing to the concatenated string to produce a unique fingerprint of the data. - Example: + Example:: + synapse = Synapse(name="ExampleRoute", timeout=10) hash_value = synapse.body_hash # hash_value is the SHA3-256 hash of the serialized body of the Synapse instance @@ -696,16 +703,19 @@ def body_hash(self) -> str: @classmethod def parse_headers_to_inputs(cls, headers: dict) -> dict: """ - Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating - the reconstruction of Synapse objects. This method is essential for parsing network-transmitted + Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating the reconstruction of Synapse objects. + + This method is essential for parsing network-transmitted data back into a Synapse instance, ensuring data consistency and integrity. Process: - 1. Separates headers into categories based on prefixes ('axon', 'dendrite', etc.). - 2. Decodes and deserializes 'input_obj' headers into their original objects. + + 1. Separates headers into categories based on prefixes (``axon``, ``dendrite``, etc.). + 2. Decodes and deserializes ``input_obj`` headers into their original objects. 3. Assigns simple fields directly from the headers to the input dictionary. - Example: + Example:: + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', @@ -714,8 +724,8 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: inputs = Synapse.parse_headers_to_inputs(received_headers) # inputs now contains a structured representation of Synapse properties based on the headers - Note: This is handled automatically when calling Synapse.from_headers(headers) and does not - need to be called directly. + Note: + This is handled automatically when calling :func:`Synapse.from_headers(headers)` and does not need to be called directly. Args: headers (dict): The headers dictionary to parse. @@ -785,12 +795,14 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: @classmethod def from_headers(cls, headers: dict) -> "Synapse": """ - Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the - Synapse's state as it was prior to network transmission. This method is a key part of the + Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the Synapse's state as it was prior to network transmission. + + This method is a key part of the deserialization process in the Bittensor network, allowing nodes to accurately reconstruct Synapse objects from received data. - Example: + Example:: + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', diff --git a/bittensor/tensor.py b/bittensor/tensor.py index 50b39df4b2..7a1573329c 100644 --- a/bittensor/tensor.py +++ b/bittensor/tensor.py @@ -40,7 +40,7 @@ def cast_dtype(raw: Union[None, torch.dtype, str]) -> str: """ - Casts the raw value to a string representing the torch data type. + Casts the raw value to a string representing the `torch data type `_. Args: raw (Union[None, torch.dtype, str]): The raw value to cast. @@ -110,7 +110,7 @@ class Tensor(pydantic.BaseModel): """ Represents a Tensor object. - Attributes: + Args: buffer (Optional[str]): Tensor buffer data. dtype (str): Tensor data type. shape (List[int]): Tensor shape. diff --git a/bittensor/threadpool.py b/bittensor/threadpool.py index 7cc1b6a9a3..d4de054bde 100644 --- a/bittensor/threadpool.py +++ b/bittensor/threadpool.py @@ -1,7 +1,7 @@ # Copyright 2009 Brian Quinlan. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. -"""Implements ThreadPoolExecutor.""" +"""Implements `ThreadPoolExecutor `_.""" __author__ = "Brian Quinlan (brian@sweetapp.com)" @@ -111,7 +111,7 @@ def _worker(executor_reference, work_queue, initializer, initargs): class BrokenThreadPool(_base.BrokenExecutor): """ - Raised when a worker thread in a ThreadPoolExecutor failed initializing. + Raised when a worker thread in a `ThreadPoolExecutor `_ failed initializing. """ @@ -129,7 +129,8 @@ def __init__( initializer=None, initargs=(), ): - """Initializes a new ThreadPoolExecutor instance. + """Initializes a new `ThreadPoolExecutor `_ instance. + Args: max_workers: The maximum number of threads that can be used to execute the given calls. @@ -193,8 +194,9 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @classmethod def config(cls) -> "bittensor.config": - """Get config from the argument parser - Return: bittensor.config object + """Get config from the argument parser. + + Return: :func:`bittensor.config` object. """ parser = argparse.ArgumentParser() PriorityThreadPoolExecutor.add_args(parser) From 7c53ff272d4a0abdec0da42e8161f94411fcb677 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Sat, 27 Jan 2024 07:03:45 +0000 Subject: [PATCH 04/23] burn -> recycle for public facing code --- bittensor/btlogging.py | 6 ++-- bittensor/chain_data.py | 2 +- bittensor/commands/network.py | 2 +- bittensor/commands/register.py | 4 +-- bittensor/config.py | 2 +- bittensor/dendrite.py | 34 +++++++++--------- bittensor/extrinsics/registration.py | 4 +-- bittensor/keyfile.py | 54 ++++++++++++++-------------- bittensor/subtensor.py | 43 +++++++++++----------- bittensor/synapse.py | 44 +++++++++++------------ bittensor/threadpool.py | 4 +-- 11 files changed, 98 insertions(+), 101 deletions(-) diff --git a/bittensor/btlogging.py b/bittensor/btlogging.py index d17a0dd00a..aef1964f99 100644 --- a/bittensor/btlogging.py +++ b/bittensor/btlogging.py @@ -59,7 +59,7 @@ def __new__( logging_dir: str = None, ): r"""Instantiate Bittensor logging system backend. - + Args: config (bittensor.config, optional): bittensor.logging.config() @@ -131,8 +131,8 @@ def __new__( @classmethod def config(cls): """Get config from the argument parser. - - Return: + + Return: bittensor.config object """ parser = argparse.ArgumentParser() diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index be2486c8b7..057a606718 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -766,7 +766,7 @@ def delegated_list_from_vec_u8( cls, vec_u8: List[int] ) -> List[Tuple["DelegateInfo", Balance]]: r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. - + This is the list of delegates that the user has delegated to, and the amount of stake delegated. """ decoded = from_scale_encoding(vec_u8, ChainDataType.DelegatedInfo, is_vec=True) diff --git a/bittensor/commands/network.py b/bittensor/commands/network.py index 59e8c179c7..22d0989f72 100644 --- a/bittensor/commands/network.py +++ b/bittensor/commands/network.py @@ -271,7 +271,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): table.add_column("[overline white]MAX_N", style="white", justify="center") table.add_column("[overline white]EMISSION", style="white", justify="center") table.add_column("[overline white]TEMPO", style="white", justify="center") - table.add_column("[overline white]BURN", style="white", justify="center") + table.add_column("[overline white]RECYCLE", style="white", justify="center") table.add_column("[overline white]POW", style="white", justify="center") table.add_column("[overline white]SUDO", style="white") for row in rows: diff --git a/bittensor/commands/register.py b/bittensor/commands/register.py index 07225ca2f8..72fdb6853a 100644 --- a/bittensor/commands/register.py +++ b/bittensor/commands/register.py @@ -38,7 +38,7 @@ class RegisterCommand: The registration cost is determined by the current recycle amount for the specified subnet. If the balance is insufficient or the subnet does not exist, the command will exit with an appropriate error message. - If the preconditions are met, and the user confirms the transaction (if ``no_prompt`` is not set), the command proceeds to register the neuron by burning the required amount of TAO. + If the preconditions are met, and the user confirms the transaction (if ``no_prompt`` is not set), the command proceeds to register the neuron by recycling the required amount of TAO. The command structure includes: @@ -87,7 +87,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): sys.exit(1) # Check current recycle amount - current_recycle = subtensor.burn(netuid=cli.config.netuid) + current_recycle = subtensor.recycle(netuid=cli.config.netuid) balance = subtensor.get_balance(address=wallet.coldkeypub.ss58_address) # Check balance is sufficient diff --git a/bittensor/config.py b/bittensor/config.py index 3962615c25..1aacaca2b6 100644 --- a/bittensor/config.py +++ b/bittensor/config.py @@ -247,7 +247,7 @@ def __parse_args__( args: List[str], parser: argparse.ArgumentParser = None, strict: bool = False ) -> argparse.Namespace: """Parses the passed args use the passed parser. - + Args: args (List[str]): List of arguments to parse. diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index bb41b81929..b96e819ce9 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -31,8 +31,8 @@ class dendrite(torch.nn.Module): """ - The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module. - + The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module. + In the brain analogy, dendrites receive signals from other neurons (in this case, network servers or axons), and the Dendrite class here is designed to send requests to those endpoint to recieve inputs. @@ -73,11 +73,11 @@ class dendrite(torch.nn.Module): aclose_session(self): Asynchronously closes the internal aiohttp client session. - NOTE: + NOTE: When working with async `aiohttp `_ client sessions, it is recommended to use a context manager. Example with a context manager:: - + >>> aysnc with dendrite(wallet = bittensor.wallet()) as d: >>> print(d) >>> d( ) # ping axon @@ -87,7 +87,7 @@ class dendrite(torch.nn.Module): However, you are able to safely call :func:`dendrite.query()` without a context manager in a synchronous setting. Example without a context manager:: - + >>> d = dendrite(wallet = bittensor.wallet() ) >>> print(d) >>> d( ) # ping axon @@ -141,7 +141,7 @@ async def session(self) -> aiohttp.ClientSession: the dendrite, adhering to the async nature of the network interactions in the Bittensor framework. Example usage:: - + import bittensor as bt # Import bittensor wallet = bt.wallet( ... ) # Initialize a wallet dendrite = bt.dendrite( wallet ) # Initialize a dendrite instance with the wallet @@ -167,7 +167,7 @@ def close_session(self): resources like open connections and internal buffers. It is crucial for preventing resource leakage and should be called when the dendrite instance is no longer in use, especially in synchronous contexts. - Note: + Note: This method utilizes asyncio's event loop to close the session asynchronously from a synchronous context. It is advisable to use this method only when asynchronous context management is not feasible. Usage: @@ -193,7 +193,7 @@ async def aclose_session(self): await :func:`dendrite_instance.aclose_session()`. Example:: - + async with dendrite_instance: # Operations using dendrite pass @@ -237,7 +237,7 @@ def _handle_request_errors(self, synapse, request_name, exception): request_name: The name of the request during which the exception occurred. exception: The exception object caught during the request. - Note: + Note: This method updates the synapse object in-place. """ if isinstance(exception, aiohttp.ClientConnectorError): @@ -263,7 +263,7 @@ def _log_outgoing_request(self, synapse): is crucial for monitoring and debugging network activity within the Bittensor network. To turn on debug messages, set the environment variable BITTENSOR_DEBUG to ``1``, or call the bittensor debug method like so:: - + import bittensor bittensor.debug() @@ -349,7 +349,7 @@ async def forward( returned, each containing the response from the corresponding Axon. For example:: - + >>> ... >>> wallet = bittensor.wallet() # Initialize a wallet >>> synapse = bittensor.Synapse(...) # Create a synapse object that contains query data @@ -362,7 +362,7 @@ async def forward( iterated over to process each chunk individually. For example:: - + >>> ... >>> dendrte = bittensor.dendrite(wallet = wallet) >>> async for chunk in dendrite.forward(axons, synapse, timeout, deserialize, run_async, streaming): @@ -768,7 +768,7 @@ async def __aenter__(self): Dendrite: The current instance of the Dendrite class. Usage:: - + async with Dendrite() as dendrite: await dendrite.some_async_method() """ @@ -787,11 +787,11 @@ async def __aexit__(self, exc_type, exc_value, traceback): traceback (TracebackType, optional): A traceback object encapsulating the call stack at the point where the exception was raised. Usage:: - + async with bt.dendrite( wallet ) as dendrite: await dendrite.some_async_method() - Note: + Note: This automatically closes the session by calling :func:`__aexit__` after the context closes. """ await self.aclose_session() @@ -803,11 +803,11 @@ def __del__(self): This method is invoked when the Dendrite instance is about to be destroyed. The destructor ensures that the aiohttp client session is closed before the instance is fully destroyed, releasing any remaining resources. - Note: + Note: Relying on the destructor for cleanup can be unpredictable. It is recommended to explicitly close sessions using the provided methods or the ``async with`` context manager. Usage:: - + dendrite = Dendrite() # ... some operations ... del dendrite # This will implicitly invoke the __del__ method and close the session. diff --git a/bittensor/extrinsics/registration.py b/bittensor/extrinsics/registration.py index e75217b217..75cc464a13 100644 --- a/bittensor/extrinsics/registration.py +++ b/bittensor/extrinsics/registration.py @@ -257,7 +257,7 @@ def burned_register_extrinsic( old_balance = subtensor.get_balance(wallet.coldkeypub.ss58_address) - burn_amount = subtensor.burn(netuid=netuid) + recycle_amount = subtensor.recycle(netuid=netuid) if not neuron.is_null: bittensor.__console__.print( ":white_heavy_check_mark: [green]Already Registered[/green]:\n" @@ -272,7 +272,7 @@ def burned_register_extrinsic( if prompt: # Prompt user for confirmation. - if not Confirm.ask(f"Recycle {burn_amount} to register on subnet:{netuid}?"): + if not Confirm.ask(f"Recycle {recycle_amount} to register on subnet:{netuid}?"): return False with bittensor.__console__.status(":satellite: Recycling TAO for Registration..."): diff --git a/bittensor/keyfile.py b/bittensor/keyfile.py index 0bcadaa86b..c901851b57 100644 --- a/bittensor/keyfile.py +++ b/bittensor/keyfile.py @@ -44,7 +44,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: """Serializes keypair object into keyfile data. - + Args: keypair (bittensor.Keypair): The keypair object to be serialized. Returns: @@ -70,7 +70,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Keypair": """Deserializes Keypair object from passed keyfile data. - + Args: keyfile_data (bytes): The keyfile data as bytes to be loaded. Returns: @@ -118,7 +118,7 @@ def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Key def validate_password(password: str) -> bool: """Validates the password against a password policy. - + Args: password (str): The password to verify. Returns: @@ -145,7 +145,7 @@ def validate_password(password: str) -> bool: def ask_password_to_encrypt() -> str: """Prompts the user to enter a password for key encryption. - + Returns: password (str): The valid password entered by the user. """ @@ -158,7 +158,7 @@ def ask_password_to_encrypt() -> str: def keyfile_data_is_encrypted_nacl(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is NaCl encrypted. - + Args: keyfile_data ( bytes, required ): Bytes to validate. @@ -171,7 +171,7 @@ def keyfile_data_is_encrypted_nacl(keyfile_data: bytes) -> bool: def keyfile_data_is_encrypted_ansible(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is ansible encrypted. - + Args: keyfile_data (bytes): The bytes to validate. Returns: @@ -192,7 +192,7 @@ def keyfile_data_is_encrypted_legacy(keyfile_data: bytes) -> bool: def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: """Returns ``true`` if the keyfile data is encrypted. - + Args: keyfile_data (bytes): The bytes to validate. Returns: @@ -207,7 +207,7 @@ def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: def keyfile_data_encryption_method(keyfile_data: bytes) -> bool: """Returns ``true`` if the keyfile data is encrypted. - + Args: keyfile_data ( bytes, required ): Bytes to validate @@ -236,7 +236,7 @@ def legacy_encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> by def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: """Encrypts the passed keyfile data using ansible vault. - + Args: keyfile_data (bytes): The bytes to encrypt. password (str, optional): The password used to encrypt the data. If ``None``, asks for user input. @@ -260,7 +260,7 @@ def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: def get_coldkey_password_from_environment(coldkey_name: str) -> Optional[str]: """Retrieves the cold key password from the environment variables. - + Args: coldkey_name (str): The name of the cold key. Returns: @@ -279,7 +279,7 @@ def decrypt_keyfile_data( keyfile_data: bytes, password: str = None, coldkey_name: Optional[str] = None ) -> bytes: """Decrypts the passed keyfile data using ansible vault. - + Args: keyfile_data (bytes): The bytes to decrypt. password (str, optional): The password used to decrypt the data. If ``None``, asks for user input. @@ -373,7 +373,7 @@ def __repr__(self): @property def keypair(self) -> "bittensor.Keypair": """Returns the keypair from path, decrypts data if the file is encrypted. - + Returns: keypair (bittensor.Keypair): The keypair stored under the path. Raises: @@ -384,7 +384,7 @@ def keypair(self) -> "bittensor.Keypair": @property def data(self) -> bytes: """Returns the keyfile data under path. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -395,7 +395,7 @@ def data(self) -> bytes: @property def keyfile_data(self) -> bytes: """Returns the keyfile data under path. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -411,7 +411,7 @@ def set_keypair( password: str = None, ): """Writes the keypair to the file and optionally encrypts data. - + Args: keypair (bittensor.Keypair): The keypair to store under the path. encrypt (bool, optional): If ``True``, encrypts the file under the path. Default is ``True``. @@ -428,7 +428,7 @@ def set_keypair( def get_keypair(self, password: str = None) -> "bittensor.Keypair": """Returns the keypair from the path, decrypts data if the file is encrypted. - + Args: password (str, optional): The password used to decrypt the file. If ``None``, asks for user input. Returns: @@ -453,7 +453,7 @@ def make_dirs(self): def exists_on_device(self) -> bool: """Returns ``True`` if the file exists on the device. - + Returns: on_device (bool): ``True`` if the file is on the device. """ @@ -463,7 +463,7 @@ def exists_on_device(self) -> bool: def is_readable(self) -> bool: """Returns ``True`` if the file under path is readable. - + Returns: readable (bool): ``True`` if the file is readable. """ @@ -475,7 +475,7 @@ def is_readable(self) -> bool: def is_writable(self) -> bool: """Returns ``True`` if the file under path is writable. - + Returns: writable (bool): ``True`` if the file is writable. """ @@ -485,7 +485,7 @@ def is_writable(self) -> bool: def is_encrypted(self) -> bool: """Returns ``True`` if the file under path is encrypted. - + Returns: encrypted (bool): ``True`` if the file is encrypted. """ @@ -497,7 +497,7 @@ def is_encrypted(self) -> bool: def _may_overwrite(self) -> bool: """Asks the user if it is okay to overwrite the file. - + Returns: may_overwrite (bool): ``True`` if the user allows overwriting the file. """ @@ -508,7 +508,7 @@ def check_and_update_encryption( self, print_result: bool = True, no_prompt: bool = False ): """Check the version of keyfile and update if needed. - + Args: print_result (bool): Print the checking result or not. @@ -609,7 +609,7 @@ def check_and_update_encryption( def encrypt(self, password: str = None): """Encrypts the file under the path. - + Args: password (str, optional): The password for encryption. If ``None``, asks for user input. Raises: @@ -636,7 +636,7 @@ def encrypt(self, password: str = None): def decrypt(self, password: str = None): """Decrypts the file under the path. - + Args: password (str, optional): The password for decryption. If ``None``, asks for user input. Raises: @@ -665,7 +665,7 @@ def decrypt(self, password: str = None): def _read_keyfile_data_from_file(self) -> bytes: """Reads the keyfile data from the file. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -685,7 +685,7 @@ def _read_keyfile_data_from_file(self) -> bytes: def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = False): """Writes the keyfile data to the file. - + Args: keyfile_data (bytes): The byte data to store under the path. overwrite (bool, optional): If ``True``, overwrites the data without asking for permission from the user. Default is ``False``. @@ -707,7 +707,7 @@ def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = Fal class Mockkeyfile: """ The Mockkeyfile is a mock object representing a keyfile that does not exist on the device. - + It is designed for use in testing scenarios and simulations where actual filesystem operations are not required. The keypair stored in the Mockkeyfile is treated as non-encrypted and the data is stored as a serialized string. """ diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 137d416ce9..a19fb622ff 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -93,8 +93,8 @@ class ParamWithTypes(TypedDict): class subtensor: """ - The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. - + The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. + This class enables neurons (network participants) to engage in activities such as registering on the network, managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. @@ -118,7 +118,7 @@ class subtensor: chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication with the Bittensor blockchain for transaction processing and data retrieval. Example Usage:: - + # Connect to the main Bittensor network (Finney). finney_subtensor = subtensor(network='finney') @@ -204,12 +204,12 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @staticmethod def determine_chain_endpoint_and_network(network: str): """Determines the chain endpoint and network from the passed network or chain_endpoint. - + Args: network (str): The network flag. The choices are: ``-- finney`` (main network), ``-- archive`` (archive network +300 blocks), ``-- local`` (local running network), ``-- test`` (test network). chain_endpoint (str): The chain endpoint flag. If set, overrides the network argument. Returns: - network (str): The network flag. + network (str): The network flag. chain_endpoint (str): The chain endpoint flag. If set, overrides the ``network`` argument. """ if network == None: @@ -310,7 +310,7 @@ def __init__( """ Initializes a Subtensor interface for interacting with the Bittensor blockchain. - NOTE: + NOTE: Currently subtensor defaults to the ``finney`` network. This will change in a future release. We strongly encourage users to run their own local subtensor node whenever possible. This increases @@ -748,7 +748,7 @@ def run_faucet( This function is part of Bittensor's onboarding process, ensuring that new neurons have the necessary resources to begin their journey in the decentralized AI network. - Note: + Note: This is for testnet ONLY and is disabled currently. You must build your own staging subtensor chain with the ``--features pow-faucet`` argument to enable this. """ return run_faucet_extrinsic( @@ -776,8 +776,8 @@ def burned_register( prompt: bool = False, ) -> bool: """ - Registers a neuron on the Bittensor network by burning TAO. This method of registration - involves recycling TAO tokens, contributing to the network's deflationary mechanism. + Registers a neuron on the Bittensor network by recycling TAO. This method of registration + involves recycling TAO tokens, allowing them to be re-mined by performing work on the network. Args: wallet (bittensor.wallet): The wallet associated with the neuron to be registered. @@ -788,9 +788,6 @@ def burned_register( Returns: bool: ``True`` if the registration is successful, False otherwise. - - This function offers an alternative registration path, aligning with the network's principles - of token circulation and value conservation. """ return burned_register_extrinsic( subtensor=self, @@ -810,7 +807,7 @@ def _do_pow_register( wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: """Sends a (POW) register extrinsic to the chain. - + Args: netuid (int): The subnet to register on. wallet (bittensor.wallet): The wallet to register. @@ -1042,7 +1039,7 @@ def _do_transfer( wait_for_finalization: bool = False, ) -> Tuple[bool, Optional[str], Optional[str]]: """Sends a transfer extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object. dest (str): Destination public key address. @@ -1520,7 +1517,7 @@ def _do_stake( wait_for_finalization: bool = False, ) -> bool: """Sends a stake extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. hotkey_ss58 (str): Hotkey ``ss58`` address to stake to. @@ -1647,7 +1644,7 @@ def _do_unstake( wait_for_finalization: bool = False, ) -> bool: """Sends an unstake extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. hotkey_ss58 (str): Hotkey ``ss58`` address to unstake from. @@ -2055,7 +2052,7 @@ def query_identity( detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. - NOTE: + NOTE: See the `Bittensor CLI documentation `_ for supported identity parameters. Args: @@ -2098,7 +2095,7 @@ def update_identity( Updates the identity of a neuron on the Bittensor blockchain. This function allows neurons to modify their identity attributes, reflecting changes in their roles, stakes, or other network-specific parameters. - NOTE: + NOTE: See the `Bittensor CLI documentation `_ for supported identity parameters. Args: @@ -2474,7 +2471,7 @@ def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the global inflation rate, which directly influences the network's token emission rate and economic model. - Note: + Note: This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. Args: @@ -2547,10 +2544,10 @@ def difficulty(self, netuid: int, block: Optional[int] = None) -> Optional[int]: return None return self.query_subtensor("Difficulty", block, [netuid]).value - def burn(self, netuid: int, block: Optional[int] = None) -> Optional[Balance]: + def recycle(self, netuid: int, block: Optional[int] = None) -> Optional[Balance]: """ Retrieves the 'Burn' hyperparameter for a specified subnet. The 'Burn' parameter represents the - amount of Tao that is effectively removed from circulation within the Bittensor network. + amount of Tao that is effectively recycled within the Bittensor network. Args: netuid (int): The unique identifier of the subnet. @@ -2559,8 +2556,8 @@ def burn(self, netuid: int, block: Optional[int] = None) -> Optional[Balance]: Returns: Optional[Balance]: The value of the 'Burn' hyperparameter if the subnet exists, None otherwise. - Understanding the 'Burn' rate is essential for analyzing the network's economic model, particularly - how it manages inflation and the overall supply of its native token Tao. + Understanding the 'Burn' rate is essential for analyzing the network registration usage, particularly + how it is correlated with user activity and the overall cost of participation in a given subnet. """ if not self.subnet_exists(netuid, block): return None diff --git a/bittensor/synapse.py b/bittensor/synapse.py index d6d9551ca0..338e661744 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -96,8 +96,8 @@ def cast_float(raw: str) -> float: class TerminalInfo(pydantic.BaseModel): """ - TerminalInfo encapsulates detailed information about a network synapse (node) involved in a communication process. - + TerminalInfo encapsulates detailed information about a network synapse (node) involved in a communication process. + This class serves as a metadata carrier, providing essential details about the state and configuration of a terminal during network interactions. This is a crucial class in the Bittensor framework. @@ -122,7 +122,7 @@ class TerminalInfo(pydantic.BaseModel): signature (str): Digital signature verifying the tuple of nonce, axon_hotkey, dendrite_hotkey, and uuid, critical for ensuring data authenticity and security. Usage:: - + # Creating a TerminalInfo instance terminal_info = TerminalInfo( status_code=200, @@ -254,7 +254,7 @@ class Config: class Synapse(pydantic.BaseModel): """ Represents a Synapse in the Bittensor network, serving as a communication schema between neurons (nodes). - + Synapses ensure the format and correctness of transmission tensors according to the Bittensor protocol. Each Synapse type is tailored for a specific machine learning (ML) task, following unique compression and communication processes. This helps maintain sanitized, correct, and useful information flow across the network. @@ -306,7 +306,7 @@ class Synapse(pydantic.BaseModel): the robustness of their applications. Example usage:: - + # Creating a Synapse instance with default values synapse = Synapse() @@ -377,7 +377,7 @@ def deserialize(self) -> "Synapse": In its default form, this method simply returns the instance of the Synapse itself without any modifications. Subclasses of Synapse can override this method to add specific deserialization behaviors, such as converting serialized data back into complex object types or performing additional data integrity checks. Example:: - + class CustomSynapse(Synapse): additional_data: str @@ -584,21 +584,21 @@ def failed_verification(self) -> bool: def to_headers(self) -> dict: """ - Converts the state of a Synapse instance into a dictionary of HTTP headers. - + Converts the state of a Synapse instance into a dictionary of HTTP headers. + This method is essential for packaging Synapse data for network transmission in the Bittensor framework, ensuring that each key aspect of the Synapse is represented in a format suitable for HTTP communication. Process: - + 1. Basic Information: It starts by including the ``name`` and ``timeout`` of the Synapse, which are fundamental for identifying the query and managing its lifespan on the network. 2. Complex Objects: The method serializes the ``axon`` and ``dendrite`` objects, if present, into strings. This serialization is crucial for preserving the state and structure of these objects over the network. 3. Encoding: Non-optional complex objects are serialized and encoded in base64, making them safe for HTTP transport. 4. Size Metrics: The method calculates and adds the size of headers and the total object size, providing valuable information for network bandwidth management. Example Usage:: - + synapse = Synapse(name="ExampleSynapse", timeout=30) headers = synapse.to_headers() # headers now contains a dictionary representing the Synapse instance @@ -665,20 +665,20 @@ def to_headers(self) -> dict: @property def body_hash(self) -> str: """ - Computes a SHA3-256 hash of the serialized body of the Synapse instance. - + Computes a SHA3-256 hash of the serialized body of the Synapse instance. + This hash is used to ensure the data integrity and security of the Synapse instance when it's transmitted across the network. It is a crucial feature for verifying that the data received is the same as the data sent. Process: - + 1. Iterates over each required field as specified in ``required_fields_hash``. 2. Concatenates the string representation of these fields. 3. Applies SHA3-256 hashing to the concatenated string to produce a unique fingerprint of the data. Example:: - + synapse = Synapse(name="ExampleRoute", timeout=10) hash_value = synapse.body_hash # hash_value is the SHA3-256 hash of the serialized body of the Synapse instance @@ -703,19 +703,19 @@ def body_hash(self) -> str: @classmethod def parse_headers_to_inputs(cls, headers: dict) -> dict: """ - Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating the reconstruction of Synapse objects. - + Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating the reconstruction of Synapse objects. + This method is essential for parsing network-transmitted data back into a Synapse instance, ensuring data consistency and integrity. Process: - + 1. Separates headers into categories based on prefixes (``axon``, ``dendrite``, etc.). 2. Decodes and deserializes ``input_obj`` headers into their original objects. 3. Assigns simple fields directly from the headers to the input dictionary. Example:: - + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', @@ -724,7 +724,7 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: inputs = Synapse.parse_headers_to_inputs(received_headers) # inputs now contains a structured representation of Synapse properties based on the headers - Note: + Note: This is handled automatically when calling :func:`Synapse.from_headers(headers)` and does not need to be called directly. Args: @@ -795,14 +795,14 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: @classmethod def from_headers(cls, headers: dict) -> "Synapse": """ - Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the Synapse's state as it was prior to network transmission. - + Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the Synapse's state as it was prior to network transmission. + This method is a key part of the deserialization process in the Bittensor network, allowing nodes to accurately reconstruct Synapse objects from received data. Example:: - + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', diff --git a/bittensor/threadpool.py b/bittensor/threadpool.py index d4de054bde..2211331453 100644 --- a/bittensor/threadpool.py +++ b/bittensor/threadpool.py @@ -130,7 +130,7 @@ def __init__( initargs=(), ): """Initializes a new `ThreadPoolExecutor `_ instance. - + Args: max_workers: The maximum number of threads that can be used to execute the given calls. @@ -195,7 +195,7 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @classmethod def config(cls) -> "bittensor.config": """Get config from the argument parser. - + Return: :func:`bittensor.config` object. """ parser = argparse.ArgumentParser() From 0f0cd1b70782e7abb2b035f4e5dd9fdce92b569a Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Wed, 31 Jan 2024 16:58:41 +0000 Subject: [PATCH 05/23] missed burn cmd in mock subtensor --- bittensor/mock/subtensor_mock.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index 57b4e5011b..227fb1f78d 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -1127,7 +1127,7 @@ def _do_burned_register( raise Exception("Subnet does not exist") bal = self.get_balance(wallet.coldkeypub.ss58_address) - burn = self.burn(netuid=netuid) + burn = self.recycle(netuid=netuid) existential_deposit = self.get_existential_deposit() if bal < burn + existential_deposit: From 88579a2a2eb8dee2cb3cb54da58f70f3eb0c1302 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Wed, 31 Jan 2024 20:22:36 +0000 Subject: [PATCH 06/23] blac formatting --- bittensor/__init__.py | 4 +- bittensor/axon.py | 9 ++- bittensor/btlogging.py | 7 ++- bittensor/chain_data.py | 18 ++++-- bittensor/commands/delegates.py | 14 ++--- bittensor/commands/inspect.py | 12 ++-- bittensor/commands/metagraph.py | 8 ++- bittensor/commands/overview.py | 12 ++-- bittensor/commands/root.py | 8 ++- bittensor/commands/senate.py | 20 ++++--- bittensor/commands/stake.py | 6 +- bittensor/commands/wallets.py | 6 +- bittensor/config.py | 3 +- bittensor/dendrite.py | 38 ++++++------ bittensor/errors.py | 22 +++++++ bittensor/extrinsics/log_utilities.py | 26 ++++++--- bittensor/keyfile.py | 72 ++++++++++++----------- bittensor/mock/subtensor_mock.py | 20 ++++--- bittensor/subtensor.py | 84 ++++++++++++++------------- bittensor/synapse.py | 44 +++++++------- bittensor/threadpool.py | 4 +- bittensor/utils/__init__.py | 6 +- bittensor/utils/networking.py | 1 + bittensor/utils/stats.py | 1 + bittensor/utils/weight_utils.py | 1 + bittensor/wallet.py | 19 +++--- tests/unit_tests/test_wallet.py | 16 +++-- 27 files changed, 272 insertions(+), 209 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index f27a85ebd4..8a562f5ce9 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -80,7 +80,9 @@ def debug(on: bool = True): __pipaddress__ = "https://pypi.org/pypi/bittensor/json" # Raw github url for delegates registry file -__delegates_details_url__: str = "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" +__delegates_details_url__: str = ( + "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" +) # Substrate ss58_format __ss58_format__ = 42 diff --git a/bittensor/axon.py b/bittensor/axon.py index 59f45ba4f0..46d331d653 100644 --- a/bittensor/axon.py +++ b/bittensor/axon.py @@ -1,5 +1,6 @@ """ Create and initialize Axon, which services the forward and backward requests from other neurons. """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao # Copyright © 2022 Opentensor Foundation @@ -1207,9 +1208,11 @@ async def verify(self, synapse: bittensor.Synapse): # We attempt to run the verification function using the synapse instance # created from the request. If this function runs without throwing an exception, # it means that the verification was successful. - await verify_fn(synapse) if inspect.iscoroutinefunction( - verify_fn - ) else verify_fn(synapse) + ( + await verify_fn(synapse) + if inspect.iscoroutinefunction(verify_fn) + else verify_fn(synapse) + ) except Exception as e: # If there was an exception during the verification process, we log that # there was a verification exception. diff --git a/bittensor/btlogging.py b/bittensor/btlogging.py index d17a0dd00a..25ead64d76 100644 --- a/bittensor/btlogging.py +++ b/bittensor/btlogging.py @@ -1,5 +1,6 @@ """ Standardized logging for Bittensor. """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -59,7 +60,7 @@ def __new__( logging_dir: str = None, ): r"""Instantiate Bittensor logging system backend. - + Args: config (bittensor.config, optional): bittensor.logging.config() @@ -131,8 +132,8 @@ def __new__( @classmethod def config(cls): """Get config from the argument parser. - - Return: + + Return: bittensor.config object """ parser = argparse.ArgumentParser() diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index be2486c8b7..f48971fcbb 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -341,6 +341,7 @@ class NeuronInfo: r""" Dataclass for neuron metadata. """ + hotkey: str coldkey: str uid: int @@ -516,6 +517,7 @@ class NeuronInfoLite: r""" Dataclass for neuron metadata, but without the weights and bonds. """ + hotkey: str coldkey: str uid: int @@ -673,6 +675,7 @@ class PrometheusInfo: r""" Dataclass for prometheus info. """ + block: int version: int ip: str @@ -694,6 +697,7 @@ class DelegateInfo: r""" Dataclass for delegate info. """ + hotkey_ss58: str # Hotkey of delegate total_stake: Balance # Total stake of the delegate nominators: List[ @@ -766,7 +770,7 @@ def delegated_list_from_vec_u8( cls, vec_u8: List[int] ) -> List[Tuple["DelegateInfo", Balance]]: r"""Returns a list of Tuples of DelegateInfo objects, and Balance, from a ``vec_u8``. - + This is the list of delegates that the user has delegated to, and the amount of stake delegated. """ decoded = from_scale_encoding(vec_u8, ChainDataType.DelegatedInfo, is_vec=True) @@ -787,6 +791,7 @@ class StakeInfo: r""" Dataclass for stake info. """ + hotkey_ss58: str # Hotkey address coldkey_ss58: str # Coldkey address stake: Balance # Stake for the hotkey-coldkey pair @@ -821,10 +826,10 @@ def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" - decoded: Optional[ - List[Tuple(str, List[object])] - ] = from_scale_encoding_using_type_string( - input=vec_u8, type_string="Vec<(AccountId, Vec)>" + decoded: Optional[List[Tuple(str, List[object])]] = ( + from_scale_encoding_using_type_string( + input=vec_u8, type_string="Vec<(AccountId, Vec)>" + ) ) if decoded is None: @@ -857,6 +862,7 @@ class SubnetInfo: r""" Dataclass for subnet info. """ + netuid: int rho: int kappa: int @@ -948,6 +954,7 @@ class SubnetHyperparameters: r""" Dataclass for subnet hyperparameters. """ + rho: int kappa: int immunity_period: int @@ -1039,6 +1046,7 @@ class IPInfo: r""" Dataclass for associated IP Info. """ + ip: str ip_type: int protocol: int diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index e96a43963b..774a26e72a 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -101,9 +101,9 @@ def show_delegates( for prev_delegate in prev_delegates: prev_delegates_dict[prev_delegate.hotkey_ss58] = prev_delegate - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -793,9 +793,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): delegates.sort(key=lambda delegate: delegate[0].total_stake, reverse=True) total_delegated += sum(my_delegates.values()) - registered_delegate_info: Optional[ - DelegatesDetails - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[DelegatesDetails] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -845,7 +845,7 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ), # f'{delegate.take * 100:.1f}%',s f"{ delegate[0].total_daily_return.tao * ( 1000 / ( 0.001 + delegate[0].total_stake.tao ) )!s:6.6}", - str(delegate_description) + str(delegate_description), # f'{delegate_profile.description:140.140}', ) diff --git a/bittensor/commands/inspect.py b/bittensor/commands/inspect.py index bdf2a00e26..f2b3caeaad 100644 --- a/bittensor/commands/inspect.py +++ b/bittensor/commands/inspect.py @@ -141,9 +141,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) bittensor.logging.debug(f"Netuids to check: {netuids}") - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -184,9 +184,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]Emission", footer_style="overline white", style="green" ) for wallet in tqdm(wallets): - delegates: List[ - Tuple(bittensor.DelegateInfo, bittensor.Balance) - ] = subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + delegates: List[Tuple(bittensor.DelegateInfo, bittensor.Balance)] = ( + subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) + ) if not wallet.coldkeypub_file.exists_on_device(): continue cold_balance = subtensor.get_balance(wallet.coldkeypub.ss58_address) diff --git a/bittensor/commands/metagraph.py b/bittensor/commands/metagraph.py index d837e19028..73383e5ba1 100644 --- a/bittensor/commands/metagraph.py +++ b/bittensor/commands/metagraph.py @@ -124,9 +124,11 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "*" if metagraph.validator_permit[uid] else "", str((metagraph.block.item() - metagraph.last_update[uid].item())), str(metagraph.active[uid].item()), - ep.ip + ":" + str(ep.port) - if ep.is_serving - else "[yellow]none[/yellow]", + ( + ep.ip + ":" + str(ep.port) + if ep.is_serving + else "[yellow]none[/yellow]" + ), ep.hotkey[:10], ep.coldkey[:10], ] diff --git a/bittensor/commands/overview.py b/bittensor/commands/overview.py index 068c11f004..8e94b1f2e1 100644 --- a/bittensor/commands/overview.py +++ b/bittensor/commands/overview.py @@ -372,11 +372,13 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "{:.5f}".format(validator_trust), "*" if validator_permit else "", str(last_update), - bittensor.utils.networking.int_to_ip(nn.axon_info.ip) - + ":" - + str(nn.axon_info.port) - if nn.axon_info.port != 0 - else "[yellow]none[/yellow]", + ( + bittensor.utils.networking.int_to_ip(nn.axon_info.ip) + + ":" + + str(nn.axon_info.port) + if nn.axon_info.port != 0 + else "[yellow]none[/yellow]" + ), nn.hotkey, ] diff --git a/bittensor/commands/root.py b/bittensor/commands/root.py index 49355d23f2..0ac387bb13 100644 --- a/bittensor/commands/root.py +++ b/bittensor/commands/root.py @@ -192,9 +192,11 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): for neuron_data in root_neurons: table.add_row( str(neuron_data.uid), - delegate_info[neuron_data.hotkey].name - if neuron_data.hotkey in delegate_info - else "", + ( + delegate_info[neuron_data.hotkey].name + if neuron_data.hotkey in delegate_info + else "" + ), neuron_data.hotkey, "{:.5f}".format( float(subtensor.get_total_stake_for_hotkey(neuron_data.hotkey)) diff --git a/bittensor/commands/senate.py b/bittensor/commands/senate.py index adb704a582..20c02f8a98 100644 --- a/bittensor/commands/senate.py +++ b/bittensor/commands/senate.py @@ -93,9 +93,11 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): for ss58_address in senate_members: table.add_row( - delegate_info[ss58_address].name - if ss58_address in delegate_info - else "", + ( + delegate_info[ss58_address].name + if ss58_address in delegate_info + else "" + ), ss58_address, ) @@ -210,9 +212,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): senate_members = subtensor.get_senate_members() proposals = subtensor.get_proposals() - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = ( @@ -341,9 +343,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): console.print(":cross_mark: [red]Failed[/red]: Proposal not found.") return - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) table = Table(show_footer=False) table.title = "[white]Votes for Proposal {}".format(proposal_hash) diff --git a/bittensor/commands/stake.py b/bittensor/commands/stake.py index cbb210f34b..f93d4e24ab 100644 --- a/bittensor/commands/stake.py +++ b/bittensor/commands/stake.py @@ -392,9 +392,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallets = _get_coldkey_wallets_for_path(cli.config.wallet.path) else: wallets = [bittensor.wallet(config=cli.config)] - registered_delegate_info: Optional[ - Dict[str, DelegatesDetails] - ] = get_delegates_details(url=bittensor.__delegates_details_url__) + registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( + get_delegates_details(url=bittensor.__delegates_details_url__) + ) def get_stake_accounts( wallet, subtensor diff --git a/bittensor/commands/wallets.py b/bittensor/commands/wallets.py index ea19155b6e..0c05cf75c4 100644 --- a/bittensor/commands/wallets.py +++ b/bittensor/commands/wallets.py @@ -199,9 +199,9 @@ def check_config(config: "bittensor.config"): else: config.ss58_address = prompt_answer if not bittensor.utils.is_valid_bittensor_address_or_public_key( - address=config.ss58_address - if config.ss58_address - else config.public_key_hex + address=( + config.ss58_address if config.ss58_address else config.public_key_hex + ) ): sys.exit(1) diff --git a/bittensor/config.py b/bittensor/config.py index 3962615c25..52ea745e91 100644 --- a/bittensor/config.py +++ b/bittensor/config.py @@ -1,6 +1,7 @@ """ Implementation of the config class, which manages the configuration of different Bittensor modules. """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao # Copyright © 2022 Opentensor Foundation @@ -247,7 +248,7 @@ def __parse_args__( args: List[str], parser: argparse.ArgumentParser = None, strict: bool = False ) -> argparse.Namespace: """Parses the passed args use the passed parser. - + Args: args (List[str]): List of arguments to parse. diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index bb41b81929..160d281c71 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -31,8 +31,8 @@ class dendrite(torch.nn.Module): """ - The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module. - + The Dendrite class, inheriting from PyTorch's Module class, represents the abstracted implementation of a network client module. + In the brain analogy, dendrites receive signals from other neurons (in this case, network servers or axons), and the Dendrite class here is designed to send requests to those endpoint to recieve inputs. @@ -73,11 +73,11 @@ class dendrite(torch.nn.Module): aclose_session(self): Asynchronously closes the internal aiohttp client session. - NOTE: + NOTE: When working with async `aiohttp `_ client sessions, it is recommended to use a context manager. Example with a context manager:: - + >>> aysnc with dendrite(wallet = bittensor.wallet()) as d: >>> print(d) >>> d( ) # ping axon @@ -87,7 +87,7 @@ class dendrite(torch.nn.Module): However, you are able to safely call :func:`dendrite.query()` without a context manager in a synchronous setting. Example without a context manager:: - + >>> d = dendrite(wallet = bittensor.wallet() ) >>> print(d) >>> d( ) # ping axon @@ -141,7 +141,7 @@ async def session(self) -> aiohttp.ClientSession: the dendrite, adhering to the async nature of the network interactions in the Bittensor framework. Example usage:: - + import bittensor as bt # Import bittensor wallet = bt.wallet( ... ) # Initialize a wallet dendrite = bt.dendrite( wallet ) # Initialize a dendrite instance with the wallet @@ -167,7 +167,7 @@ def close_session(self): resources like open connections and internal buffers. It is crucial for preventing resource leakage and should be called when the dendrite instance is no longer in use, especially in synchronous contexts. - Note: + Note: This method utilizes asyncio's event loop to close the session asynchronously from a synchronous context. It is advisable to use this method only when asynchronous context management is not feasible. Usage: @@ -193,7 +193,7 @@ async def aclose_session(self): await :func:`dendrite_instance.aclose_session()`. Example:: - + async with dendrite_instance: # Operations using dendrite pass @@ -237,7 +237,7 @@ def _handle_request_errors(self, synapse, request_name, exception): request_name: The name of the request during which the exception occurred. exception: The exception object caught during the request. - Note: + Note: This method updates the synapse object in-place. """ if isinstance(exception, aiohttp.ClientConnectorError): @@ -263,7 +263,7 @@ def _log_outgoing_request(self, synapse): is crucial for monitoring and debugging network activity within the Bittensor network. To turn on debug messages, set the environment variable BITTENSOR_DEBUG to ``1``, or call the bittensor debug method like so:: - + import bittensor bittensor.debug() @@ -289,9 +289,7 @@ def _log_incoming_response(self, synapse): f"dendrite | <-- | {synapse.get_total_size()} B | {synapse.name} | {synapse.axon.hotkey} | {synapse.axon.ip}:{str(synapse.axon.port)} | {synapse.dendrite.status_code} | {synapse.dendrite.status_message}" ) - def query( - self, *args, **kwargs - ) -> Union[ + def query(self, *args, **kwargs) -> Union[ bittensor.Synapse, List[bittensor.Synapse], bittensor.StreamingSynapse, @@ -349,7 +347,7 @@ async def forward( returned, each containing the response from the corresponding Axon. For example:: - + >>> ... >>> wallet = bittensor.wallet() # Initialize a wallet >>> synapse = bittensor.Synapse(...) # Create a synapse object that contains query data @@ -362,7 +360,7 @@ async def forward( iterated over to process each chunk individually. For example:: - + >>> ... >>> dendrte = bittensor.dendrite(wallet = wallet) >>> async for chunk in dendrite.forward(axons, synapse, timeout, deserialize, run_async, streaming): @@ -768,7 +766,7 @@ async def __aenter__(self): Dendrite: The current instance of the Dendrite class. Usage:: - + async with Dendrite() as dendrite: await dendrite.some_async_method() """ @@ -787,11 +785,11 @@ async def __aexit__(self, exc_type, exc_value, traceback): traceback (TracebackType, optional): A traceback object encapsulating the call stack at the point where the exception was raised. Usage:: - + async with bt.dendrite( wallet ) as dendrite: await dendrite.some_async_method() - Note: + Note: This automatically closes the session by calling :func:`__aexit__` after the context closes. """ await self.aclose_session() @@ -803,11 +801,11 @@ def __del__(self): This method is invoked when the Dendrite instance is about to be destroyed. The destructor ensures that the aiohttp client session is closed before the instance is fully destroyed, releasing any remaining resources. - Note: + Note: Relying on the destructor for cleanup can be unpredictable. It is recommended to explicitly close sessions using the provided methods or the ``async with`` context manager. Usage:: - + dendrite = Dendrite() # ... some operations ... del dendrite # This will implicitly invoke the __del__ method and close the session. diff --git a/bittensor/errors.py b/bittensor/errors.py index 8b8440bb8d..5cd56cb31e 100644 --- a/bittensor/errors.py +++ b/bittensor/errors.py @@ -18,61 +18,73 @@ class ChainError(BaseException): r"""Base error for any chain related errors.""" + pass class ChainConnectionError(ChainError): r"""Error for any chain connection related errors.""" + pass class ChainTransactionError(ChainError): r"""Error for any chain transaction related errors.""" + pass class ChainQueryError(ChainError): r"""Error for any chain query related errors.""" + pass class StakeError(ChainTransactionError): r"""Error raised when a stake transaction fails.""" + pass class UnstakeError(ChainTransactionError): r"""Error raised when an unstake transaction fails.""" + pass class IdentityError(ChainTransactionError): r"""Error raised when an identity transaction fails.""" + pass class NominationError(ChainTransactionError): r"""Error raised when a nomination transaction fails.""" + pass class TransferError(ChainTransactionError): r"""Error raised when a transfer transaction fails.""" + pass class RegistrationError(ChainTransactionError): r"""Error raised when a neuron registration transaction fails.""" + pass class NotRegisteredError(ChainTransactionError): r"""Error raised when a neuron is not registered, and the transaction requires it to be.""" + pass class NotDelegateError(StakeError): r"""Error raised when a hotkey you are trying to stake to is not a delegate.""" + pass @@ -84,49 +96,59 @@ class KeyFileError(Exception): class MetadataError(ChainTransactionError): r"""Error raised when metadata commitment transaction fails.""" + pass class InvalidRequestNameError(Exception): r"""This exception is raised when the request name is invalid. Ususally indicates a broken URL.""" + pass class UnknownSynapseError(Exception): r"""This exception is raised when the request name is not found in the Axon's forward_fns dictionary.""" + pass class SynapseParsingError(Exception): r"""This exception is raised when the request headers are unable to be parsed into the synapse type.""" + pass class NotVerifiedException(Exception): r"""This exception is raised when the request is not verified.""" + pass class BlacklistedException(Exception): r"""This exception is raised when the request is blacklisted.""" + pass class PriorityException(Exception): r"""This exception is raised when the request priority is not met.""" + pass class PostProcessException(Exception): r"""This exception is raised when the response headers cannot be updated.""" + pass class RunException(Exception): r"""This exception is raised when the requested function cannot be executed. Indicates a server error.""" + pass class InternalServerError(Exception): r"""This exception is raised when the requested function fails on the server. Indicates a server error.""" + pass diff --git a/bittensor/extrinsics/log_utilities.py b/bittensor/extrinsics/log_utilities.py index bcd6f09890..448dfa69a9 100644 --- a/bittensor/extrinsics/log_utilities.py +++ b/bittensor/extrinsics/log_utilities.py @@ -347,9 +347,11 @@ def print_synergy_table(self, stats: Dict, syn_loss_diff: Dict, sort_col: str): ( "[bright_cyan]{:.2f}[/bright_cyan]" if t == s - else "[magenta]{:.3f}[/magenta]" - if syn_loss_diff[s[0]][t[0]] > 0 - else "[dim]{:.0f}[/dim]" + else ( + "[magenta]{:.3f}[/magenta]" + if syn_loss_diff[s[0]][t[0]] > 0 + else "[dim]{:.0f}[/dim]" + ) ) .format(syn_loss_diff[s[0]][t[0]]) .replace("0.", ".") @@ -409,12 +411,18 @@ def print_stats_table( ] # available columns intersecting with stats_keys rows = [ [ - ("", 0) - if key not in stat - else ( - ("* " if key == "uid" and mark_uids and uid in mark_uids else "") - + txt.format(stat[key]), - stat[key], + ( + ("", 0) + if key not in stat + else ( + ( + "* " + if key == "uid" and mark_uids and uid in mark_uids + else "" + ) + + txt.format(stat[key]), + stat[key], + ) ) for _, key, txt, _ in columns ] diff --git a/bittensor/keyfile.py b/bittensor/keyfile.py index 0bcadaa86b..722687bfe1 100644 --- a/bittensor/keyfile.py +++ b/bittensor/keyfile.py @@ -44,7 +44,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: """Serializes keypair object into keyfile data. - + Args: keypair (bittensor.Keypair): The keypair object to be serialized. Returns: @@ -54,14 +54,16 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: "accountId": "0x" + keypair.public_key.hex() if keypair.public_key else None, "publicKey": "0x" + keypair.public_key.hex() if keypair.public_key else None, "secretPhrase": keypair.mnemonic if keypair.mnemonic else None, - "secretSeed": "0x" - + ( - keypair.seed_hex - if isinstance(keypair.seed_hex, str) - else keypair.seed_hex.hex() - ) - if keypair.seed_hex - else None, + "secretSeed": ( + "0x" + + ( + keypair.seed_hex + if isinstance(keypair.seed_hex, str) + else keypair.seed_hex.hex() + ) + if keypair.seed_hex + else None + ), "ss58Address": keypair.ss58_address if keypair.ss58_address else None, } data = json.dumps(json_data).encode() @@ -70,7 +72,7 @@ def serialized_keypair_to_keyfile_data(keypair: "bittensor.Keypair") -> bytes: def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Keypair": """Deserializes Keypair object from passed keyfile data. - + Args: keyfile_data (bytes): The keyfile data as bytes to be loaded. Returns: @@ -118,7 +120,7 @@ def deserialize_keypair_from_keyfile_data(keyfile_data: bytes) -> "bittensor.Key def validate_password(password: str) -> bool: """Validates the password against a password policy. - + Args: password (str): The password to verify. Returns: @@ -145,7 +147,7 @@ def validate_password(password: str) -> bool: def ask_password_to_encrypt() -> str: """Prompts the user to enter a password for key encryption. - + Returns: password (str): The valid password entered by the user. """ @@ -158,7 +160,7 @@ def ask_password_to_encrypt() -> str: def keyfile_data_is_encrypted_nacl(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is NaCl encrypted. - + Args: keyfile_data ( bytes, required ): Bytes to validate. @@ -171,7 +173,7 @@ def keyfile_data_is_encrypted_nacl(keyfile_data: bytes) -> bool: def keyfile_data_is_encrypted_ansible(keyfile_data: bytes) -> bool: """Returns true if the keyfile data is ansible encrypted. - + Args: keyfile_data (bytes): The bytes to validate. Returns: @@ -192,7 +194,7 @@ def keyfile_data_is_encrypted_legacy(keyfile_data: bytes) -> bool: def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: """Returns ``true`` if the keyfile data is encrypted. - + Args: keyfile_data (bytes): The bytes to validate. Returns: @@ -207,7 +209,7 @@ def keyfile_data_is_encrypted(keyfile_data: bytes) -> bool: def keyfile_data_encryption_method(keyfile_data: bytes) -> bool: """Returns ``true`` if the keyfile data is encrypted. - + Args: keyfile_data ( bytes, required ): Bytes to validate @@ -236,7 +238,7 @@ def legacy_encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> by def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: """Encrypts the passed keyfile data using ansible vault. - + Args: keyfile_data (bytes): The bytes to encrypt. password (str, optional): The password used to encrypt the data. If ``None``, asks for user input. @@ -260,7 +262,7 @@ def encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: def get_coldkey_password_from_environment(coldkey_name: str) -> Optional[str]: """Retrieves the cold key password from the environment variables. - + Args: coldkey_name (str): The name of the cold key. Returns: @@ -279,7 +281,7 @@ def decrypt_keyfile_data( keyfile_data: bytes, password: str = None, coldkey_name: Optional[str] = None ) -> bytes: """Decrypts the passed keyfile data using ansible vault. - + Args: keyfile_data (bytes): The bytes to decrypt. password (str, optional): The password used to decrypt the data. If ``None``, asks for user input. @@ -373,7 +375,7 @@ def __repr__(self): @property def keypair(self) -> "bittensor.Keypair": """Returns the keypair from path, decrypts data if the file is encrypted. - + Returns: keypair (bittensor.Keypair): The keypair stored under the path. Raises: @@ -384,7 +386,7 @@ def keypair(self) -> "bittensor.Keypair": @property def data(self) -> bytes: """Returns the keyfile data under path. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -395,7 +397,7 @@ def data(self) -> bytes: @property def keyfile_data(self) -> bytes: """Returns the keyfile data under path. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -411,7 +413,7 @@ def set_keypair( password: str = None, ): """Writes the keypair to the file and optionally encrypts data. - + Args: keypair (bittensor.Keypair): The keypair to store under the path. encrypt (bool, optional): If ``True``, encrypts the file under the path. Default is ``True``. @@ -428,7 +430,7 @@ def set_keypair( def get_keypair(self, password: str = None) -> "bittensor.Keypair": """Returns the keypair from the path, decrypts data if the file is encrypted. - + Args: password (str, optional): The password used to decrypt the file. If ``None``, asks for user input. Returns: @@ -453,7 +455,7 @@ def make_dirs(self): def exists_on_device(self) -> bool: """Returns ``True`` if the file exists on the device. - + Returns: on_device (bool): ``True`` if the file is on the device. """ @@ -463,7 +465,7 @@ def exists_on_device(self) -> bool: def is_readable(self) -> bool: """Returns ``True`` if the file under path is readable. - + Returns: readable (bool): ``True`` if the file is readable. """ @@ -475,7 +477,7 @@ def is_readable(self) -> bool: def is_writable(self) -> bool: """Returns ``True`` if the file under path is writable. - + Returns: writable (bool): ``True`` if the file is writable. """ @@ -485,7 +487,7 @@ def is_writable(self) -> bool: def is_encrypted(self) -> bool: """Returns ``True`` if the file under path is encrypted. - + Returns: encrypted (bool): ``True`` if the file is encrypted. """ @@ -497,7 +499,7 @@ def is_encrypted(self) -> bool: def _may_overwrite(self) -> bool: """Asks the user if it is okay to overwrite the file. - + Returns: may_overwrite (bool): ``True`` if the user allows overwriting the file. """ @@ -508,7 +510,7 @@ def check_and_update_encryption( self, print_result: bool = True, no_prompt: bool = False ): """Check the version of keyfile and update if needed. - + Args: print_result (bool): Print the checking result or not. @@ -609,7 +611,7 @@ def check_and_update_encryption( def encrypt(self, password: str = None): """Encrypts the file under the path. - + Args: password (str, optional): The password for encryption. If ``None``, asks for user input. Raises: @@ -636,7 +638,7 @@ def encrypt(self, password: str = None): def decrypt(self, password: str = None): """Decrypts the file under the path. - + Args: password (str, optional): The password for decryption. If ``None``, asks for user input. Raises: @@ -665,7 +667,7 @@ def decrypt(self, password: str = None): def _read_keyfile_data_from_file(self) -> bytes: """Reads the keyfile data from the file. - + Returns: keyfile_data (bytes): The keyfile data stored under the path. Raises: @@ -685,7 +687,7 @@ def _read_keyfile_data_from_file(self) -> bytes: def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = False): """Writes the keyfile data to the file. - + Args: keyfile_data (bytes): The byte data to store under the path. overwrite (bool, optional): If ``True``, overwrites the data without asking for permission from the user. Default is ``False``. @@ -707,7 +709,7 @@ def _write_keyfile_data_to_file(self, keyfile_data: bytes, overwrite: bool = Fal class Mockkeyfile: """ The Mockkeyfile is a mock object representing a keyfile that does not exist on the device. - + It is designed for use in testing scenarios and simulations where actual filesystem operations are not required. The keypair stored in the Mockkeyfile is treated as non-encrypted and the data is stored as a serialized string. """ diff --git a/bittensor/mock/subtensor_mock.py b/bittensor/mock/subtensor_mock.py index 57b4e5011b..a3775226bc 100644 --- a/bittensor/mock/subtensor_mock.py +++ b/bittensor/mock/subtensor_mock.py @@ -145,15 +145,21 @@ def __init__( ] = None, ): _records = [ - (MockSubtensorValue(value=record[0]), MockSubtensorValue(value=record[1])) - # Make sure record is a tuple of MockSubtensorValue (dict with value attr) - if not ( - isinstance(record, tuple) - and all( - isinstance(item, dict) and hasattr(item, "value") for item in record + ( + ( + MockSubtensorValue(value=record[0]), + MockSubtensorValue(value=record[1]), ) + # Make sure record is a tuple of MockSubtensorValue (dict with value attr) + if not ( + isinstance(record, tuple) + and all( + isinstance(item, dict) and hasattr(item, "value") + for item in record + ) + ) + else record ) - else record for record in records ] diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 137d416ce9..1bd766c387 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -93,8 +93,8 @@ class ParamWithTypes(TypedDict): class subtensor: """ - The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. - + The Subtensor class in Bittensor serves as a crucial interface for interacting with the Bittensor blockchain, facilitating a range of operations essential for the decentralized machine learning network. + This class enables neurons (network participants) to engage in activities such as registering on the network, managing staked weights, setting inter-neuronal weights, and participating in consensus mechanisms. @@ -118,7 +118,7 @@ class subtensor: chain_endpoint (str): The blockchain node endpoint URL, enabling direct communication with the Bittensor blockchain for transaction processing and data retrieval. Example Usage:: - + # Connect to the main Bittensor network (Finney). finney_subtensor = subtensor(network='finney') @@ -204,12 +204,12 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @staticmethod def determine_chain_endpoint_and_network(network: str): """Determines the chain endpoint and network from the passed network or chain_endpoint. - + Args: network (str): The network flag. The choices are: ``-- finney`` (main network), ``-- archive`` (archive network +300 blocks), ``-- local`` (local running network), ``-- test`` (test network). chain_endpoint (str): The chain endpoint flag. If set, overrides the network argument. Returns: - network (str): The network flag. + network (str): The network flag. chain_endpoint (str): The chain endpoint flag. If set, overrides the ``network`` argument. """ if network == None: @@ -310,7 +310,7 @@ def __init__( """ Initializes a Subtensor interface for interacting with the Bittensor blockchain. - NOTE: + NOTE: Currently subtensor defaults to the ``finney`` network. This will change in a future release. We strongly encourage users to run their own local subtensor node whenever possible. This increases @@ -748,7 +748,7 @@ def run_faucet( This function is part of Bittensor's onboarding process, ensuring that new neurons have the necessary resources to begin their journey in the decentralized AI network. - Note: + Note: This is for testnet ONLY and is disabled currently. You must build your own staging subtensor chain with the ``--features pow-faucet`` argument to enable this. """ return run_faucet_extrinsic( @@ -810,7 +810,7 @@ def _do_pow_register( wait_for_finalization: bool = True, ) -> Tuple[bool, Optional[str]]: """Sends a (POW) register extrinsic to the chain. - + Args: netuid (int): The subnet to register on. wallet (bittensor.wallet): The wallet to register. @@ -1042,7 +1042,7 @@ def _do_transfer( wait_for_finalization: bool = False, ) -> Tuple[bool, Optional[str], Optional[str]]: """Sends a transfer extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object. dest (str): Destination public key address. @@ -1520,7 +1520,7 @@ def _do_stake( wait_for_finalization: bool = False, ) -> bool: """Sends a stake extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. hotkey_ss58 (str): Hotkey ``ss58`` address to stake to. @@ -1647,7 +1647,7 @@ def _do_unstake( wait_for_finalization: bool = False, ) -> bool: """Sends an unstake extrinsic to the chain. - + Args: wallet (:func:`bittensor.wallet`): Wallet object that can sign the extrinsic. hotkey_ss58 (str): Hotkey ``ss58`` address to unstake from. @@ -2055,7 +2055,7 @@ def query_identity( detailed identity information about a specific neuron, which is a crucial aspect of the network's decentralized identity and governance system. - NOTE: + NOTE: See the `Bittensor CLI documentation `_ for supported identity parameters. Args: @@ -2076,9 +2076,9 @@ def make_substrate_call_with_retry(): module="Registry", storage_function="IdentityOf", params=[key], - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) identity_info = make_substrate_call_with_retry() @@ -2098,7 +2098,7 @@ def update_identity( Updates the identity of a neuron on the Bittensor blockchain. This function allows neurons to modify their identity attributes, reflecting changes in their roles, stakes, or other network-specific parameters. - NOTE: + NOTE: See the `Bittensor CLI documentation `_ for supported identity parameters. Args: @@ -2197,9 +2197,9 @@ def make_substrate_call_with_retry(): module="SubtensorModule", storage_function=name, params=params, - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) return make_substrate_call_with_retry() @@ -2235,9 +2235,9 @@ def make_substrate_call_with_retry(): module="SubtensorModule", storage_function=name, params=params, - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) return make_substrate_call_with_retry() @@ -2269,9 +2269,9 @@ def make_substrate_call_with_retry(): return substrate.get_constant( module_name=module_name, constant_name=constant_name, - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) return make_substrate_call_with_retry() @@ -2310,9 +2310,9 @@ def make_substrate_call_with_retry(): module=module, storage_function=name, params=params, - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) return make_substrate_call_with_retry() @@ -2350,9 +2350,9 @@ def make_substrate_call_with_retry(): module=module, storage_function=name, params=params, - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) return make_substrate_call_with_retry() @@ -2420,9 +2420,11 @@ def query_runtime_api( json_result = self.state_call( method=f"{runtime_api}_{method}", - data="0x" - if params is None - else self._encode_params(call_definition=call_definition, params=params), + data=( + "0x" + if params is None + else self._encode_params(call_definition=call_definition, params=params) + ), block=block, ) @@ -2474,7 +2476,7 @@ def rho(self, netuid: int, block: Optional[int] = None) -> Optional[int]: Retrieves the 'Rho' hyperparameter for a specified subnet within the Bittensor network. 'Rho' represents the global inflation rate, which directly influences the network's token emission rate and economic model. - Note: + Note: This is currently fixed such that the Bittensor blockchain emmits 7200 Tao per day. Args: @@ -4090,9 +4092,9 @@ def make_substrate_call_with_retry(): module="System", storage_function="Account", params=[address], - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) result = make_substrate_call_with_retry() @@ -4143,9 +4145,9 @@ def make_substrate_call_with_retry(): return substrate.query_map( module="System", storage_function="Account", - block_hash=None - if block == None - else substrate.get_block_hash(block), + block_hash=( + None if block == None else substrate.get_block_hash(block) + ), ) result = make_substrate_call_with_retry() diff --git a/bittensor/synapse.py b/bittensor/synapse.py index d6d9551ca0..338e661744 100644 --- a/bittensor/synapse.py +++ b/bittensor/synapse.py @@ -96,8 +96,8 @@ def cast_float(raw: str) -> float: class TerminalInfo(pydantic.BaseModel): """ - TerminalInfo encapsulates detailed information about a network synapse (node) involved in a communication process. - + TerminalInfo encapsulates detailed information about a network synapse (node) involved in a communication process. + This class serves as a metadata carrier, providing essential details about the state and configuration of a terminal during network interactions. This is a crucial class in the Bittensor framework. @@ -122,7 +122,7 @@ class TerminalInfo(pydantic.BaseModel): signature (str): Digital signature verifying the tuple of nonce, axon_hotkey, dendrite_hotkey, and uuid, critical for ensuring data authenticity and security. Usage:: - + # Creating a TerminalInfo instance terminal_info = TerminalInfo( status_code=200, @@ -254,7 +254,7 @@ class Config: class Synapse(pydantic.BaseModel): """ Represents a Synapse in the Bittensor network, serving as a communication schema between neurons (nodes). - + Synapses ensure the format and correctness of transmission tensors according to the Bittensor protocol. Each Synapse type is tailored for a specific machine learning (ML) task, following unique compression and communication processes. This helps maintain sanitized, correct, and useful information flow across the network. @@ -306,7 +306,7 @@ class Synapse(pydantic.BaseModel): the robustness of their applications. Example usage:: - + # Creating a Synapse instance with default values synapse = Synapse() @@ -377,7 +377,7 @@ def deserialize(self) -> "Synapse": In its default form, this method simply returns the instance of the Synapse itself without any modifications. Subclasses of Synapse can override this method to add specific deserialization behaviors, such as converting serialized data back into complex object types or performing additional data integrity checks. Example:: - + class CustomSynapse(Synapse): additional_data: str @@ -584,21 +584,21 @@ def failed_verification(self) -> bool: def to_headers(self) -> dict: """ - Converts the state of a Synapse instance into a dictionary of HTTP headers. - + Converts the state of a Synapse instance into a dictionary of HTTP headers. + This method is essential for packaging Synapse data for network transmission in the Bittensor framework, ensuring that each key aspect of the Synapse is represented in a format suitable for HTTP communication. Process: - + 1. Basic Information: It starts by including the ``name`` and ``timeout`` of the Synapse, which are fundamental for identifying the query and managing its lifespan on the network. 2. Complex Objects: The method serializes the ``axon`` and ``dendrite`` objects, if present, into strings. This serialization is crucial for preserving the state and structure of these objects over the network. 3. Encoding: Non-optional complex objects are serialized and encoded in base64, making them safe for HTTP transport. 4. Size Metrics: The method calculates and adds the size of headers and the total object size, providing valuable information for network bandwidth management. Example Usage:: - + synapse = Synapse(name="ExampleSynapse", timeout=30) headers = synapse.to_headers() # headers now contains a dictionary representing the Synapse instance @@ -665,20 +665,20 @@ def to_headers(self) -> dict: @property def body_hash(self) -> str: """ - Computes a SHA3-256 hash of the serialized body of the Synapse instance. - + Computes a SHA3-256 hash of the serialized body of the Synapse instance. + This hash is used to ensure the data integrity and security of the Synapse instance when it's transmitted across the network. It is a crucial feature for verifying that the data received is the same as the data sent. Process: - + 1. Iterates over each required field as specified in ``required_fields_hash``. 2. Concatenates the string representation of these fields. 3. Applies SHA3-256 hashing to the concatenated string to produce a unique fingerprint of the data. Example:: - + synapse = Synapse(name="ExampleRoute", timeout=10) hash_value = synapse.body_hash # hash_value is the SHA3-256 hash of the serialized body of the Synapse instance @@ -703,19 +703,19 @@ def body_hash(self) -> str: @classmethod def parse_headers_to_inputs(cls, headers: dict) -> dict: """ - Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating the reconstruction of Synapse objects. - + Interprets and transforms a given dictionary of headers into a structured dictionary, facilitating the reconstruction of Synapse objects. + This method is essential for parsing network-transmitted data back into a Synapse instance, ensuring data consistency and integrity. Process: - + 1. Separates headers into categories based on prefixes (``axon``, ``dendrite``, etc.). 2. Decodes and deserializes ``input_obj`` headers into their original objects. 3. Assigns simple fields directly from the headers to the input dictionary. Example:: - + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', @@ -724,7 +724,7 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: inputs = Synapse.parse_headers_to_inputs(received_headers) # inputs now contains a structured representation of Synapse properties based on the headers - Note: + Note: This is handled automatically when calling :func:`Synapse.from_headers(headers)` and does not need to be called directly. Args: @@ -795,14 +795,14 @@ def parse_headers_to_inputs(cls, headers: dict) -> dict: @classmethod def from_headers(cls, headers: dict) -> "Synapse": """ - Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the Synapse's state as it was prior to network transmission. - + Constructs a new Synapse instance from a given headers dictionary, enabling the re-creation of the Synapse's state as it was prior to network transmission. + This method is a key part of the deserialization process in the Bittensor network, allowing nodes to accurately reconstruct Synapse objects from received data. Example:: - + received_headers = { 'bt_header_axon_address': '127.0.0.1', 'bt_header_dendrite_port': '8080', diff --git a/bittensor/threadpool.py b/bittensor/threadpool.py index d4de054bde..2211331453 100644 --- a/bittensor/threadpool.py +++ b/bittensor/threadpool.py @@ -130,7 +130,7 @@ def __init__( initargs=(), ): """Initializes a new `ThreadPoolExecutor `_ instance. - + Args: max_workers: The maximum number of threads that can be used to execute the given calls. @@ -195,7 +195,7 @@ def add_args(cls, parser: argparse.ArgumentParser, prefix: str = None): @classmethod def config(cls) -> "bittensor.config": """Get config from the argument parser. - + Return: :func:`bittensor.config` object. """ parser = argparse.ArgumentParser() diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index 74485fc630..a79ed2cf38 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -162,9 +162,9 @@ def get_explorer_url_for_network( explorer_urls: Optional[Dict[str, str]] = {} # Will be None if the network is not known. i.e. not in network_map - explorer_root_urls: Optional[ - Dict[str, str] - ] = get_explorer_root_url_by_network_from_map(network, network_map) + explorer_root_urls: Optional[Dict[str, str]] = ( + get_explorer_root_url_by_network_from_map(network, network_map) + ) if explorer_root_urls != {}: # We are on a known network. diff --git a/bittensor/utils/networking.py b/bittensor/utils/networking.py index d8e1b79e9f..f8bc105ba4 100644 --- a/bittensor/utils/networking.py +++ b/bittensor/utils/networking.py @@ -1,5 +1,6 @@ """ Utils for handling local network with ip and ports. """ + # The MIT License (MIT) # Copyright © 2021-2022 Yuma Rao # Copyright © 2022-2023 Opentensor Foundation diff --git a/bittensor/utils/stats.py b/bittensor/utils/stats.py index 117eb43e4b..fd52dade20 100644 --- a/bittensor/utils/stats.py +++ b/bittensor/utils/stats.py @@ -1,5 +1,6 @@ """ A exponential moving average that updates values based on time since last update. """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao diff --git a/bittensor/utils/weight_utils.py b/bittensor/utils/weight_utils.py index 102f145606..0c5c944a0e 100644 --- a/bittensor/utils/weight_utils.py +++ b/bittensor/utils/weight_utils.py @@ -1,5 +1,6 @@ """ Conversion for weight between chain representation and torch tensor """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao diff --git a/bittensor/wallet.py b/bittensor/wallet.py index 29a6129ca2..40b36aa21b 100644 --- a/bittensor/wallet.py +++ b/bittensor/wallet.py @@ -1,5 +1,6 @@ """ Implementation of the wallet class, which manages balances with staking and transfer. Also manages hotkey and coldkey. """ + # The MIT License (MIT) # Copyright © 2021 Yuma Rao @@ -668,8 +669,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -678,8 +678,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_coldkey( @@ -688,8 +687,7 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_coldkey( self, @@ -776,8 +774,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -786,8 +783,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... @overload def regenerate_hotkey( @@ -796,8 +792,7 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": - ... + ) -> "wallet": ... def regenerate_hotkey( self, diff --git a/tests/unit_tests/test_wallet.py b/tests/unit_tests/test_wallet.py index 1c27d427bf..119e3a3115 100644 --- a/tests/unit_tests/test_wallet.py +++ b/tests/unit_tests/test_wallet.py @@ -338,9 +338,11 @@ def test_regen_coldkey_from_hex_seed_str(self): mock_set_coldkey.assert_called_once() keypair: bittensor.Keypair = mock_set_coldkey.call_args_list[0][0][0] self.assertRegex( - keypair.seed_hex - if isinstance(keypair.seed_hex, str) - else keypair.seed_hex.hex(), + ( + keypair.seed_hex + if isinstance(keypair.seed_hex, str) + else keypair.seed_hex.hex() + ), rf"(0x|){seed_str[2:]}", ) self.assertEqual( @@ -368,9 +370,11 @@ def test_regen_hotkey_from_hex_seed_str(self): mock_set_hotkey.assert_called_once() keypair: bittensor.Keypair = mock_set_hotkey.call_args_list[0][0][0] self.assertRegex( - keypair.seed_hex - if isinstance(keypair.seed_hex, str) - else keypair.seed_hex.hex(), + ( + keypair.seed_hex + if isinstance(keypair.seed_hex, str) + else keypair.seed_hex.hex() + ), rf"(0x|){seed_str[2:]}", ) self.assertEqual( From 1bc3e0e043b288e906ce187fa8f0dc0844620445 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Wed, 31 Jan 2024 22:06:11 +0000 Subject: [PATCH 07/23] black formatting with correct version --- bittensor/__init__.py | 4 +--- bittensor/chain_data.py | 8 ++++---- bittensor/commands/delegates.py | 12 ++++++------ bittensor/commands/inspect.py | 12 ++++++------ bittensor/commands/senate.py | 12 ++++++------ bittensor/commands/stake.py | 6 +++--- bittensor/dendrite.py | 4 +++- bittensor/utils/__init__.py | 6 +++--- bittensor/wallet.py | 18 ++++++++++++------ 9 files changed, 44 insertions(+), 38 deletions(-) diff --git a/bittensor/__init__.py b/bittensor/__init__.py index 8a562f5ce9..f27a85ebd4 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -80,9 +80,7 @@ def debug(on: bool = True): __pipaddress__ = "https://pypi.org/pypi/bittensor/json" # Raw github url for delegates registry file -__delegates_details_url__: str = ( - "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" -) +__delegates_details_url__: str = "https://raw.githubusercontent.com/opentensor/bittensor-delegates/main/public/delegates.json" # Substrate ss58_format __ss58_format__ = 42 diff --git a/bittensor/chain_data.py b/bittensor/chain_data.py index f48971fcbb..e06a33d727 100644 --- a/bittensor/chain_data.py +++ b/bittensor/chain_data.py @@ -826,10 +826,10 @@ def list_of_tuple_from_vec_u8( cls, vec_u8: List[int] ) -> Dict[str, List["StakeInfo"]]: r"""Returns a list of StakeInfo objects from a ``vec_u8``.""" - decoded: Optional[List[Tuple(str, List[object])]] = ( - from_scale_encoding_using_type_string( - input=vec_u8, type_string="Vec<(AccountId, Vec)>" - ) + decoded: Optional[ + List[Tuple(str, List[object])] + ] = from_scale_encoding_using_type_string( + input=vec_u8, type_string="Vec<(AccountId, Vec)>" ) if decoded is None: diff --git a/bittensor/commands/delegates.py b/bittensor/commands/delegates.py index 774a26e72a..9c3bb5ed97 100644 --- a/bittensor/commands/delegates.py +++ b/bittensor/commands/delegates.py @@ -101,9 +101,9 @@ def show_delegates( for prev_delegate in prev_delegates: prev_delegates_dict[prev_delegate.hotkey_ss58] = prev_delegate - registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + Dict[str, DelegatesDetails] + ] = get_delegates_details(url=bittensor.__delegates_details_url__) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -793,9 +793,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): delegates.sort(key=lambda delegate: delegate[0].total_stake, reverse=True) total_delegated += sum(my_delegates.values()) - registered_delegate_info: Optional[DelegatesDetails] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + DelegatesDetails + ] = get_delegates_details(url=bittensor.__delegates_details_url__) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" diff --git a/bittensor/commands/inspect.py b/bittensor/commands/inspect.py index f2b3caeaad..bdf2a00e26 100644 --- a/bittensor/commands/inspect.py +++ b/bittensor/commands/inspect.py @@ -141,9 +141,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): ) bittensor.logging.debug(f"Netuids to check: {netuids}") - registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + Dict[str, DelegatesDetails] + ] = get_delegates_details(url=bittensor.__delegates_details_url__) if registered_delegate_info is None: bittensor.__console__.print( ":warning:[yellow]Could not get delegate info from chain.[/yellow]" @@ -184,9 +184,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): "[overline white]Emission", footer_style="overline white", style="green" ) for wallet in tqdm(wallets): - delegates: List[Tuple(bittensor.DelegateInfo, bittensor.Balance)] = ( - subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) - ) + delegates: List[ + Tuple(bittensor.DelegateInfo, bittensor.Balance) + ] = subtensor.get_delegated(coldkey_ss58=wallet.coldkeypub.ss58_address) if not wallet.coldkeypub_file.exists_on_device(): continue cold_balance = subtensor.get_balance(wallet.coldkeypub.ss58_address) diff --git a/bittensor/commands/senate.py b/bittensor/commands/senate.py index 20c02f8a98..931da96d42 100644 --- a/bittensor/commands/senate.py +++ b/bittensor/commands/senate.py @@ -212,9 +212,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): senate_members = subtensor.get_senate_members() proposals = subtensor.get_proposals() - registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + Dict[str, DelegatesDetails] + ] = get_delegates_details(url=bittensor.__delegates_details_url__) table = Table(show_footer=False) table.title = ( @@ -343,9 +343,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): console.print(":cross_mark: [red]Failed[/red]: Proposal not found.") return - registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + Dict[str, DelegatesDetails] + ] = get_delegates_details(url=bittensor.__delegates_details_url__) table = Table(show_footer=False) table.title = "[white]Votes for Proposal {}".format(proposal_hash) diff --git a/bittensor/commands/stake.py b/bittensor/commands/stake.py index f93d4e24ab..cbb210f34b 100644 --- a/bittensor/commands/stake.py +++ b/bittensor/commands/stake.py @@ -392,9 +392,9 @@ def _run(cli: "bittensor.cli", subtensor: "bittensor.subtensor"): wallets = _get_coldkey_wallets_for_path(cli.config.wallet.path) else: wallets = [bittensor.wallet(config=cli.config)] - registered_delegate_info: Optional[Dict[str, DelegatesDetails]] = ( - get_delegates_details(url=bittensor.__delegates_details_url__) - ) + registered_delegate_info: Optional[ + Dict[str, DelegatesDetails] + ] = get_delegates_details(url=bittensor.__delegates_details_url__) def get_stake_accounts( wallet, subtensor diff --git a/bittensor/dendrite.py b/bittensor/dendrite.py index 160d281c71..b96e819ce9 100644 --- a/bittensor/dendrite.py +++ b/bittensor/dendrite.py @@ -289,7 +289,9 @@ def _log_incoming_response(self, synapse): f"dendrite | <-- | {synapse.get_total_size()} B | {synapse.name} | {synapse.axon.hotkey} | {synapse.axon.ip}:{str(synapse.axon.port)} | {synapse.dendrite.status_code} | {synapse.dendrite.status_message}" ) - def query(self, *args, **kwargs) -> Union[ + def query( + self, *args, **kwargs + ) -> Union[ bittensor.Synapse, List[bittensor.Synapse], bittensor.StreamingSynapse, diff --git a/bittensor/utils/__init__.py b/bittensor/utils/__init__.py index a79ed2cf38..74485fc630 100644 --- a/bittensor/utils/__init__.py +++ b/bittensor/utils/__init__.py @@ -162,9 +162,9 @@ def get_explorer_url_for_network( explorer_urls: Optional[Dict[str, str]] = {} # Will be None if the network is not known. i.e. not in network_map - explorer_root_urls: Optional[Dict[str, str]] = ( - get_explorer_root_url_by_network_from_map(network, network_map) - ) + explorer_root_urls: Optional[ + Dict[str, str] + ] = get_explorer_root_url_by_network_from_map(network, network_map) if explorer_root_urls != {}: # We are on a known network. diff --git a/bittensor/wallet.py b/bittensor/wallet.py index 40b36aa21b..54837182aa 100644 --- a/bittensor/wallet.py +++ b/bittensor/wallet.py @@ -669,7 +669,8 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... @overload def regenerate_coldkey( @@ -678,7 +679,8 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... @overload def regenerate_coldkey( @@ -687,7 +689,8 @@ def regenerate_coldkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... def regenerate_coldkey( self, @@ -774,7 +777,8 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... @overload def regenerate_hotkey( @@ -783,7 +787,8 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... @overload def regenerate_hotkey( @@ -792,7 +797,8 @@ def regenerate_hotkey( use_password: bool = True, overwrite: bool = False, suppress: bool = False, - ) -> "wallet": ... + ) -> "wallet": + ... def regenerate_hotkey( self, From ceea1087af36bf91f6fedb2a593927c1ef5dc12d Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Thu, 1 Feb 2024 02:25:35 +0000 Subject: [PATCH 08/23] add correct black version to circleci --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 34f1050f6d..f865826f3f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -28,7 +28,7 @@ jobs: python -m venv env/ . env/bin/activate python -m pip install --upgrade pip - pip install black + pip install black==23.7.0 - save_cache: name: Save cached black venv From f20bfebacac0212a4a701eb6238880c158f96587 Mon Sep 17 00:00:00 2001 From: Gus Date: Mon, 29 Jan 2024 15:40:36 -0500 Subject: [PATCH 09/23] chore(pytest): Migrate unittest test cases to pytest --- tests/unit_tests/test_keyfile.py | 855 ++++++++++++++++--------------- 1 file changed, 443 insertions(+), 412 deletions(-) diff --git a/tests/unit_tests/test_keyfile.py b/tests/unit_tests/test_keyfile.py index 49373bfa3c..e54e8802ba 100644 --- a/tests/unit_tests/test_keyfile.py +++ b/tests/unit_tests/test_keyfile.py @@ -29,450 +29,481 @@ from bip39 import bip39_validate -class KeyPairTestCase(unittest.TestCase): - """ - Test case for the KeyPair class. - """ - - def test_generate_mnemonic(self): - """ - Test the generation of a mnemonic and its validation. - """ - mnemonic = Keypair.generate_mnemonic() - self.assertTrue(bip39_validate(mnemonic)) - - def test_invalid_mnemonic(self): - """ - Test the validation of an invalid mnemonic. - """ - mnemonic = "This is an invalid mnemonic" - self.assertFalse(bip39_validate(mnemonic)) - - def test_create_sr25519_keypair(self): - """ - Test the creation of a sr25519 keypair from a mnemonic and verify the SS58 address. - """ - mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" - keypair = Keypair.create_from_mnemonic(mnemonic, ss58_format=0) - self.assertEqual( - keypair.ss58_address, "16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2" - ) +def test_generate_mnemonic(): + """ + Test the generation of a mnemonic and its validation. + """ + mnemonic = Keypair.generate_mnemonic() + assert bip39_validate(mnemonic) == True - def test_only_provide_ss58_address(self): - """ - Test the creation of a keypair with only the SS58 address provided. - """ - keypair = Keypair( - ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2" - ) - self.assertEqual( - "0x" + keypair.public_key.hex(), - "0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446", - ) - def test_only_provide_public_key(self): - """ - Test the creation of a keypair with only the public key provided. - """ - keypair = Keypair( - public_key="0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446", - ss58_format=0, - ) - self.assertEqual( - keypair.ss58_address, "16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2" - ) +def test_invalid_mnemonic(): + """ + Test the validation of an invalid mnemonic. + """ + mnemonic = "This is an invalid mnemonic" + assert bip39_validate(mnemonic) == False + + +def test_create_sr25519_keypair(): + """ + Test the creation of a sr25519 keypair from a mnemonic and verify the SS58 address. + """ + mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" + keypair = Keypair.create_from_mnemonic(mnemonic, ss58_format=0) + assert keypair.ss58_address == "16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2" + + +def test_only_provide_ss58_address(): + """ + Test the creation of a keypair with only the SS58 address provided. + """ + keypair = Keypair(ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2") - def test_provide_no_ss58_address_and_public_key(self): - """ - Test the creation of a keypair without providing SS58 address and public key. - """ - self.assertRaises(ValueError, Keypair) - - def test_incorrect_private_key_length_sr25519(self): - """ - Test the creation of a keypair with an incorrect private key length for sr25519. - """ - self.assertRaises( - ValueError, - Keypair, + assert ( + f"0x{keypair.public_key.hex()}" + == "0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446" + ) + + +def test_only_provide_public_key(): + """ + Test the creation of a keypair with only the public key provided. + """ + keypair = Keypair( + public_key="0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446", + ss58_format=0, + ) + + assert keypair.ss58_address == "16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2" + + +def test_provide_no_ss58_address_and_public_key(): + """ + Test the creation of a keypair without providing SS58 address and public key. + """ + with pytest.raises(ValueError): + Keypair() + + +def test_incorrect_private_key_length_sr25519(): + """ + Test the creation of a keypair with an incorrect private key length for sr25519. + """ + with pytest.raises(ValueError): + Keypair( private_key="0x23", ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", ) - def test_incorrect_public_key(self): - """ - Test the creation of a keypair with an incorrect public key. - """ - self.assertRaises(ValueError, Keypair, public_key="0x23") - - def test_sign_and_verify(self): - """ - Test the signing and verification of a message using a keypair. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - signature = keypair.sign("Test1231223123123") - self.assertTrue(keypair.verify("Test1231223123123", signature)) - - def test_sign_and_verify_hex_data(self): - """ - Test the signing and verification of hex data using a keypair. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - signature = keypair.sign("0x1234") - self.assertTrue(keypair.verify("0x1234", signature)) - - def test_sign_and_verify_scale_bytes(self): - """ - Test the signing and verification of ScaleBytes data using a keypair. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - data = ScaleBytes("0x1234") - signature = keypair.sign(data) - self.assertTrue(keypair.verify(data, signature)) - - def test_sign_missing_private_key(self): - """ - Test signing a message with a keypair that is missing the private key. - """ - keypair = Keypair( - ss58_address="5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY" - ) - self.assertRaises(ConfigurationError, keypair.sign, "0x1234") - def test_sign_unsupported_crypto_type(self): - """ - Test signing a message with an unsupported crypto type. - """ - keypair = Keypair.create_from_private_key( - ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", - private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", - crypto_type=3, - ) - self.assertRaises(ConfigurationError, keypair.sign, "0x1234") +def test_incorrect_public_key(): + """ + Test the creation of a keypair with an incorrect public key. + """ + with pytest.raises(ValueError): + Keypair(public_key="0x23") - def test_verify_unsupported_crypto_type(self): - """ - Test verifying a signature with an unsupported crypto type. - """ - keypair = Keypair.create_from_private_key( - ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", - private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", - crypto_type=3, - ) - self.assertRaises(ConfigurationError, keypair.verify, "0x1234", "0x1234") - - def test_sign_and_verify_incorrect_signature(self): - """ - Test verifying an incorrect signature for a signed message. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - signature = "0x4c291bfb0bb9c1274e86d4b666d13b2ac99a0bacc04a4846fb8ea50bda114677f83c1f164af58fc184451e5140cc8160c4de626163b11451d3bbb208a1889f8a" - self.assertFalse(keypair.verify("Test1231223123123", signature)) - - def test_sign_and_verify_invalid_signature(self): - """ - Test verifying an invalid signature format for a signed message. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - signature = "Test" - self.assertRaises(TypeError, keypair.verify, "Test1231223123123", signature) - - def test_sign_and_verify_invalid_message(self): - """ - Test verifying a signature against an incorrect message. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic(mnemonic) - signature = keypair.sign("Test1231223123123") - self.assertFalse(keypair.verify("OtherMessage", signature)) - - def test_create_ed25519_keypair(self): - """ - Test the creation of an ed25519 keypair from a mnemonic and verify the SS58 address. - """ - mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" - keypair = Keypair.create_from_mnemonic( - mnemonic, ss58_format=0, crypto_type=KeypairType.ED25519 - ) - self.assertEqual( - keypair.ss58_address, "16dYRUXznyhvWHS1ktUENGfNAEjCawyDzHRtN9AdFnJRc38h" - ) - def test_sign_and_verify_ed25519(self): - """ - Test the signing and verification of a message using an ed25519 keypair. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic( - mnemonic, crypto_type=KeypairType.ED25519 - ) - signature = keypair.sign("Test1231223123123") - self.assertTrue(keypair.verify("Test1231223123123", signature)) - - def test_sign_and_verify_invalid_signature_ed25519(self): - """ - Test verifying an incorrect signature for a message signed with an ed25519 keypair. - """ - mnemonic = Keypair.generate_mnemonic() - keypair = Keypair.create_from_mnemonic( - mnemonic, crypto_type=KeypairType.ED25519 - ) - signature = "0x4c291bfb0bb9c1274e86d4b666d13b2ac99a0bacc04a4846fb8ea50bda114677f83c1f164af58fc184451e5140cc8160c4de626163b11451d3bbb208a1889f8a" - self.assertFalse(keypair.verify("Test1231223123123", signature)) - - def test_unsupport_crypto_type(self): - """ - Test creating a keypair with an unsupported crypto type. - """ - self.assertRaises( - ValueError, - Keypair.create_from_seed, +def test_sign_and_verify(): + """ + Test the signing and verification of a message using a keypair. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + signature = keypair.sign("Test1231223123123") + assert keypair.verify("Test1231223123123", signature) == True + + +def test_sign_and_verify_hex_data(): + """ + Test the signing and verification of hex data using a keypair. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + signature = keypair.sign("0x1234") + assert keypair.verify("0x1234", signature) == True + + +def test_sign_and_verify_scale_bytes(): + """ + Test the signing and verification of ScaleBytes data using a keypair. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + data = ScaleBytes("0x1234") + signature = keypair.sign(data) + assert keypair.verify(data, signature) == True + + +def test_sign_missing_private_key(): + """ + Test signing a message with a keypair that is missing the private key. + """ + keypair = Keypair(ss58_address="5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY") + with pytest.raises(ConfigurationError): + keypair.sign("0x1234") + + +def test_sign_unsupported_crypto_type(): + """ + Test signing a message with an unsupported crypto type. + """ + keypair = Keypair.create_from_private_key( + ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", + private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", + crypto_type=3, + ) + with pytest.raises(ConfigurationError): + keypair.sign("0x1234") + + +def test_verify_unsupported_crypto_type(): + """ + Test verifying a signature with an unsupported crypto type. + """ + keypair = Keypair.create_from_private_key( + ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", + private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", + crypto_type=3, + ) + with pytest.raises(ConfigurationError): + keypair.verify("0x1234", "0x1234") + + +def test_sign_and_verify_incorrect_signature(): + """ + Test verifying an incorrect signature for a signed message. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + signature = "0x4c291bfb0bb9c1274e86d4b666d13b2ac99a0bacc04a4846fb8ea50bda114677f83c1f164af58fc184451e5140cc8160c4de626163b11451d3bbb208a1889f8a" + assert keypair.verify("Test1231223123123", signature) == False + + +def test_sign_and_verify_invalid_signature(): + """ + Test verifying an invalid signature format for a signed message. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + signature = "Test" + with pytest.raises(TypeError): + keypair.verify("Test1231223123123", signature) + + +def test_sign_and_verify_invalid_message(): + """ + Test verifying a signature against an incorrect message. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic) + signature = keypair.sign("Test1231223123123") + assert keypair.verify("OtherMessage", signature) == False + + +def test_create_ed25519_keypair(): + """ + Test the creation of an ed25519 keypair from a mnemonic and verify the SS58 address. + """ + mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" + keypair = Keypair.create_from_mnemonic( + mnemonic, ss58_format=0, crypto_type=KeypairType.ED25519 + ) + assert keypair.ss58_address == "16dYRUXznyhvWHS1ktUENGfNAEjCawyDzHRtN9AdFnJRc38h" + + +def test_sign_and_verify_ed25519(): + """ + Test the signing and verification of a message using an ed25519 keypair. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic, crypto_type=KeypairType.ED25519) + signature = keypair.sign("Test1231223123123") + assert keypair.verify("Test1231223123123", signature) == True + + +def test_sign_and_verify_invalid_signature_ed25519(): + """ + Test verifying an incorrect signature for a message signed with an ed25519 keypair. + """ + mnemonic = Keypair.generate_mnemonic() + keypair = Keypair.create_from_mnemonic(mnemonic, crypto_type=KeypairType.ED25519) + signature = "0x4c291bfb0bb9c1274e86d4b666d13b2ac99a0bacc04a4846fb8ea50bda114677f83c1f164af58fc184451e5140cc8160c4de626163b11451d3bbb208a1889f8a" + assert keypair.verify("Test1231223123123", signature) == False + + +def test_unsupport_crypto_type(): + """ + Test creating a keypair with an unsupported crypto type. + """ + with pytest.raises(ValueError): + Keypair.create_from_seed( seed_hex="0xda3cf5b1e9144931?a0f0db65664aab662673b099415a7f8121b7245fb0be4143", crypto_type=2, ) - def test_create_keypair_from_private_key(self): - """ - Test creating a keypair from a private key and verify the public key. - """ - keypair = Keypair.create_from_private_key( - ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", - private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", - ) - self.assertEqual( - "0x" + keypair.public_key.hex(), - "0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446", - ) - - def test_hdkd_hard_path(self): - """ - Test hierarchical deterministic key derivation with a hard derivation path. - """ - mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" - derivation_address = "5FEiH8iuDUw271xbqWTWuB6WrDjv5dnCeDX1CyHubAniXDNN" - derivation_path = "//Alice" - derived_keypair = Keypair.create_from_uri(mnemonic + derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_soft_path(self): - """ - Test hierarchical deterministic key derivation with a soft derivation path. - """ - mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" - derivation_address = "5GNXbA46ma5dg19GXdiKi5JH3mnkZ8Yea3bBtZAvj7t99P9i" - derivation_path = "/Alice" - derived_keypair = Keypair.create_from_uri(mnemonic + derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_default_to_dev_mnemonic(self): - """ - Test hierarchical deterministic key derivation with a default development mnemonic. - """ - derivation_address = "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY" - derivation_path = "//Alice" - derived_keypair = Keypair.create_from_uri(derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_nested_hard_soft_path(self): - """ - Test hierarchical deterministic key derivation with a nested hard and soft derivation path. - """ - derivation_address = "5CJGwWiKXSE16WJaxBdPZhWqUYkotgenLUALv7ZvqQ4TXeqf" - derivation_path = "//Bob/test" - derived_keypair = Keypair.create_from_uri(derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_nested_soft_hard_path(self): - """ - Test hierarchical deterministic key derivation with a nested soft and hard derivation path. - """ - derivation_address = "5Cwc8tShrshDJUp1P1M21dKUTcYQpV9GcfSa4hUBNmMdV3Cx" - derivation_path = "/Bob//test" - derived_keypair = Keypair.create_from_uri(derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_path_gt_32_bytes(self): - """ - Test hierarchical deterministic key derivation with a derivation path longer than 32 bytes. - """ - derivation_address = "5GR5pfZeNs1uQiSWVxZaQiZou3wdZiX894eqgvfNfHbEh7W2" - derivation_path = "//PathNameLongerThan32BytesWhichShouldBeHashed" - derived_keypair = Keypair.create_from_uri(derivation_path) - self.assertEqual(derivation_address, derived_keypair.ss58_address) - - def test_hdkd_unsupported_password(self): - """ - Test hierarchical deterministic key derivation with an unsupported password. - """ - self.assertRaises( - NotImplementedError, Keypair.create_from_uri, DEV_PHRASE + "///test" - ) +def test_create_keypair_from_private_key(): + """ + Test creating a keypair from a private key and verify the public key. + """ + keypair = Keypair.create_from_private_key( + ss58_address="16ADqpMa4yzfmWs3nuTSMhfZ2ckeGtvqhPWCNqECEGDcGgU2", + private_key="0x1f1995bdf3a17b60626a26cfe6f564b337d46056b7a1281b64c649d592ccda0a9cffd34d9fb01cae1fba61aeed184c817442a2186d5172416729a4b54dd4b84e", + ) + assert ( + f"0x{keypair.public_key.hex()}" + == "0xe4359ad3e2716c539a1d663ebd0a51bdc5c98a12e663bb4c4402db47828c9446" + ) -class TestKeyFiles(unittest.TestCase): - def setUp(self) -> None: - self.root_path = f"/tmp/pytest{time.time()}" - os.makedirs(self.root_path, exist_ok=True) - self.create_keyfile() +def test_hdkd_hard_path(): + """ + Test hierarchical deterministic key derivation with a hard derivation path. + """ + mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" + derivation_address = "5FEiH8iuDUw271xbqWTWuB6WrDjv5dnCeDX1CyHubAniXDNN" + derivation_path = "//Alice" + derived_keypair = Keypair.create_from_uri(mnemonic + derivation_path) + assert derivation_address == derived_keypair.ss58_address - def tearDown(self) -> None: - shutil.rmtree(self.root_path) - def create_keyfile(self): - keyfile = bittensor.keyfile(path=os.path.join(self.root_path, "keyfile")) +def test_hdkd_soft_path(): + """ + Test hierarchical deterministic key derivation with a soft derivation path. + """ + derivation_address = "5GNXbA46ma5dg19GXdiKi5JH3mnkZ8Yea3bBtZAvj7t99P9i" + mnemonic = "old leopard transfer rib spatial phone calm indicate online fire caution review" + derived_keypair = Keypair.create_from_uri(f"{mnemonic}/Alice") + assert derivation_address == derived_keypair.ss58_address - mnemonic = bittensor.Keypair.generate_mnemonic(12) - alice = bittensor.Keypair.create_from_mnemonic(mnemonic) - keyfile.set_keypair( - alice, encrypt=True, overwrite=True, password="thisisafakepassword" - ) - bob = bittensor.Keypair.create_from_uri("/Bob") - keyfile.set_keypair( - bob, encrypt=True, overwrite=True, password="thisisafakepassword" - ) +def test_hdkd_default_to_dev_mnemonic(): + """ + Test hierarchical deterministic key derivation with a default development mnemonic. + """ + derivation_address = "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY" + derivation_path = "//Alice" + derived_keypair = Keypair.create_from_uri(derivation_path) + assert derivation_address == derived_keypair.ss58_address - return keyfile - def test_create(self): - keyfile = bittensor.keyfile(path=os.path.join(self.root_path, "keyfile")) +def test_hdkd_nested_hard_soft_path(): + """ + Test hierarchical deterministic key derivation with a nested hard and soft derivation path. + """ + derivation_address = "5CJGwWiKXSE16WJaxBdPZhWqUYkotgenLUALv7ZvqQ4TXeqf" + derivation_path = "//Bob/test" + derived_keypair = Keypair.create_from_uri(derivation_path) + assert derivation_address == derived_keypair.ss58_address - mnemonic = bittensor.Keypair.generate_mnemonic(12) - alice = bittensor.Keypair.create_from_mnemonic(mnemonic) - keyfile.set_keypair( - alice, encrypt=True, overwrite=True, password="thisisafakepassword" - ) - assert keyfile.is_readable() - assert keyfile.is_writable() - assert keyfile.is_encrypted() - keyfile.decrypt(password="thisisafakepassword") - assert not keyfile.is_encrypted() - keyfile.encrypt(password="thisisafakepassword") - assert keyfile.is_encrypted() - str(keyfile) - keyfile.decrypt(password="thisisafakepassword") - assert not keyfile.is_encrypted() - str(keyfile) - - assert ( - keyfile.get_keypair(password="thisisafakepassword").ss58_address - == alice.ss58_address - ) - assert ( - keyfile.get_keypair(password="thisisafakepassword").private_key - == alice.private_key - ) - assert ( - keyfile.get_keypair(password="thisisafakepassword").public_key - == alice.public_key - ) - bob = bittensor.Keypair.create_from_uri("/Bob") - keyfile.set_keypair( - bob, encrypt=True, overwrite=True, password="thisisafakepassword" - ) - assert ( - keyfile.get_keypair(password="thisisafakepassword").ss58_address - == bob.ss58_address - ) - assert ( - keyfile.get_keypair(password="thisisafakepassword").public_key - == bob.public_key - ) +def test_hdkd_nested_soft_hard_path(): + """ + Test hierarchical deterministic key derivation with a nested soft and hard derivation path. + """ + derivation_address = "5Cwc8tShrshDJUp1P1M21dKUTcYQpV9GcfSa4hUBNmMdV3Cx" + derivation_path = "/Bob//test" + derived_keypair = Keypair.create_from_uri(derivation_path) + assert derivation_address == derived_keypair.ss58_address - repr(keyfile) - def test_legacy_coldkey(self): - legacy_filename = os.path.join(self.root_path, "coldlegacy_keyfile") - keyfile = bittensor.keyfile(path=legacy_filename) - keyfile.make_dirs() - keyfile_data = ( - b"0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" - ) - with open(legacy_filename, "wb") as keyfile_obj: - keyfile_obj.write(keyfile_data) - assert keyfile.keyfile_data == keyfile_data - keyfile.encrypt(password="this is the fake password") - keyfile.decrypt(password="this is the fake password") - keypair_bytes = b'{"accountId": "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f", "publicKey": "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f", "secretPhrase": null, "secretSeed": null, "ss58Address": "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm"}' - assert keyfile.keyfile_data == keypair_bytes - assert ( - keyfile.get_keypair().ss58_address - == "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" - ) - assert ( - "0x" + keyfile.get_keypair().public_key.hex() - == "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" - ) +def test_hdkd_path_gt_32_bytes(): + """ + Test hierarchical deterministic key derivation with a derivation path longer than 32 bytes. + """ + derivation_address = "5GR5pfZeNs1uQiSWVxZaQiZou3wdZiX894eqgvfNfHbEh7W2" + derivation_path = "//PathNameLongerThan32BytesWhichShouldBeHashed" + derived_keypair = Keypair.create_from_uri(derivation_path) + assert derivation_address == derived_keypair.ss58_address - def test_validate_password(self): - from bittensor.keyfile import validate_password - assert validate_password(None) == False - assert validate_password("passw0rd") == False - assert validate_password("123456789") == False - with mock.patch("getpass.getpass", return_value="biTTensor"): - assert validate_password("biTTensor") == True - with mock.patch("getpass.getpass", return_value="biTTenso"): - assert validate_password("biTTensor") == False +def test_hdkd_unsupported_password(): + """ + Test hierarchical deterministic key derivation with an unsupported password. + """ - def test_decrypt_keyfile_data_legacy(self): - import base64 + with pytest.raises(NotImplementedError): + Keypair.create_from_uri(f"{DEV_PHRASE}///test") - from cryptography.fernet import Fernet - from cryptography.hazmat.backends import default_backend - from cryptography.hazmat.primitives import hashes - from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC - from bittensor.keyfile import decrypt_keyfile_data +def create_keyfile(root_path): + """ + Creates a keyfile object with two keypairs: alice and bob. - __SALT = b"Iguesscyborgslikemyselfhaveatendencytobeparanoidaboutourorigins" + Args: + root_path (str): The root path for the keyfile. - def __generate_key(password): - kdf = PBKDF2HMAC( - algorithm=hashes.SHA256(), - salt=__SALT, - length=32, - iterations=10000000, - backend=default_backend(), - ) - key = base64.urlsafe_b64encode(kdf.derive(password.encode())) - return key - - pw = "fakepasssword238947239" - data = b"encrypt me!" - key = __generate_key(pw) - cipher_suite = Fernet(key) - encrypted_data = cipher_suite.encrypt(data) - - decrypted_data = decrypt_keyfile_data(encrypted_data, pw) - assert decrypted_data == data - - def test_user_interface(self): - from bittensor.keyfile import ask_password_to_encrypt - - with mock.patch( - "getpass.getpass", - side_effect=["pass", "password", "asdury3294y", "asdury3294y"], - ): - assert ask_password_to_encrypt() == "asdury3294y" - - def test_overwriting(self): - keyfile = bittensor.keyfile(path=os.path.join(self.root_path, "keyfile")) - alice = bittensor.Keypair.create_from_uri("/Alice") - keyfile.set_keypair( - alice, encrypt=True, overwrite=True, password="thisisafakepassword" + Returns: + bittensor.keyfile: The created keyfile object. + """ + keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) + + mnemonic = bittensor.Keypair.generate_mnemonic(12) + alice = bittensor.Keypair.create_from_mnemonic(mnemonic) + keyfile.set_keypair( + alice, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + + bob = bittensor.Keypair.create_from_uri("/Bob") + keyfile.set_keypair( + bob, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + + return keyfile + + +@pytest.fixture(scope="session") +def keyfile_setup_teardown(): + root_path = f"/tmp/pytest{time.time()}" + os.makedirs(root_path, exist_ok=True) + + create_keyfile(root_path) + + yield root_path + + shutil.rmtree(root_path) + + +def test_create(keyfile_setup_teardown): + root_path = keyfile_setup_teardown + keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) + + mnemonic = bittensor.Keypair.generate_mnemonic(12) + alice = bittensor.Keypair.create_from_mnemonic(mnemonic) + keyfile.set_keypair( + alice, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + assert keyfile.is_readable() + assert keyfile.is_writable() + assert keyfile.is_encrypted() + keyfile.decrypt(password="thisisafakepassword") + assert not keyfile.is_encrypted() + keyfile.encrypt(password="thisisafakepassword") + assert keyfile.is_encrypted() + str(keyfile) + keyfile.decrypt(password="thisisafakepassword") + assert not keyfile.is_encrypted() + str(keyfile) + + assert ( + keyfile.get_keypair(password="thisisafakepassword").ss58_address + == alice.ss58_address + ) + assert ( + keyfile.get_keypair(password="thisisafakepassword").private_key + == alice.private_key + ) + assert ( + keyfile.get_keypair(password="thisisafakepassword").public_key + == alice.public_key + ) + + bob = bittensor.Keypair.create_from_uri("/Bob") + keyfile.set_keypair( + bob, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + assert ( + keyfile.get_keypair(password="thisisafakepassword").ss58_address + == bob.ss58_address + ) + assert ( + keyfile.get_keypair(password="thisisafakepassword").public_key == bob.public_key + ) + + repr(keyfile) + + +def test_legacy_coldkey(keyfile_setup_teardown): + root_path = keyfile_setup_teardown + legacy_filename = os.path.join(root_path, "coldlegacy_keyfile") + keyfile = bittensor.keyfile(path=legacy_filename) + keyfile.make_dirs() + keyfile_data = b"0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" + with open(legacy_filename, "wb") as keyfile_obj: + keyfile_obj.write(keyfile_data) + assert keyfile.keyfile_data == keyfile_data + keyfile.encrypt(password="this is the fake password") + keyfile.decrypt(password="this is the fake password") + keypair_bytes = b'{"accountId": "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f", "publicKey": "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f", "secretPhrase": null, "secretSeed": null, "ss58Address": "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm"}' + assert keyfile.keyfile_data == keypair_bytes + assert ( + keyfile.get_keypair().ss58_address + == "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" + ) + assert ( + f"0x{keyfile.get_keypair().public_key.hex()}" + == "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" + ) + + +def test_validate_password(): + from bittensor.keyfile import validate_password + + assert validate_password(None) == False + assert validate_password("passw0rd") == False + assert validate_password("123456789") == False + with mock.patch("getpass.getpass", return_value="biTTensor"): + assert validate_password("biTTensor") == True + with mock.patch("getpass.getpass", return_value="biTTenso"): + assert validate_password("biTTensor") == False + + +def test_decrypt_keyfile_data_legacy(): + import base64 + + from cryptography.fernet import Fernet + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC + + from bittensor.keyfile import decrypt_keyfile_data + + __SALT = b"Iguesscyborgslikemyselfhaveatendencytobeparanoidaboutourorigins" + + def __generate_key(password): + kdf = PBKDF2HMAC( + algorithm=hashes.SHA256(), + salt=__SALT, + length=32, + iterations=10000000, + backend=default_backend(), ) - bob = bittensor.Keypair.create_from_uri("/Bob") - - with pytest.raises(bittensor.KeyFileError) as pytest_wrapped_e: - with mock.patch("builtins.input", return_value="n"): - keyfile.set_keypair( - bob, encrypt=True, overwrite=False, password="thisisafakepassword" - ) + key = base64.urlsafe_b64encode(kdf.derive(password.encode())) + return key + + pw = "fakepasssword238947239" + data = b"encrypt me!" + key = __generate_key(pw) + cipher_suite = Fernet(key) + encrypted_data = cipher_suite.encrypt(data) + + decrypted_data = decrypt_keyfile_data(encrypted_data, pw) + assert decrypted_data == data + + +def test_user_interface(): + from bittensor.keyfile import ask_password_to_encrypt + + with mock.patch( + "getpass.getpass", + side_effect=["pass", "password", "asdury3294y", "asdury3294y"], + ): + assert ask_password_to_encrypt() == "asdury3294y" + + +def test_overwriting(keyfile_setup_teardown): + root_path = keyfile_setup_teardown + keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) + alice = bittensor.Keypair.create_from_uri("/Alice") + keyfile.set_keypair( + alice, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + bob = bittensor.Keypair.create_from_uri("/Bob") + + with pytest.raises(bittensor.KeyFileError) as pytest_wrapped_e: + with mock.patch("builtins.input", return_value="n"): + keyfile.set_keypair( + bob, encrypt=True, overwrite=False, password="thisisafakepassword" + ) From 0058a4565a728b8e3b963d1d3c801106b0941470 Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 30 Jan 2024 11:34:03 -0500 Subject: [PATCH 10/23] Expands test coverage for keyfile & refactor subtensor tests to pytest fmt --- tests/unit_tests/test_keyfile.py | 91 ++++++++- tests/unit_tests/test_subtensor.py | 286 ++++++++++++++--------------- 2 files changed, 226 insertions(+), 151 deletions(-) diff --git a/tests/unit_tests/test_keyfile.py b/tests/unit_tests/test_keyfile.py index e54e8802ba..3e43667f0a 100644 --- a/tests/unit_tests/test_keyfile.py +++ b/tests/unit_tests/test_keyfile.py @@ -16,10 +16,10 @@ # DEALINGS IN THE SOFTWARE. import os +import json import time import pytest import shutil -import unittest import bittensor import unittest.mock as mock from scalecodec import ScaleBytes @@ -367,6 +367,9 @@ def keyfile_setup_teardown(): def test_create(keyfile_setup_teardown): + """ + Test case for creating a keyfile and performing various operations on it. + """ root_path = keyfile_setup_teardown keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) @@ -416,6 +419,9 @@ def test_create(keyfile_setup_teardown): def test_legacy_coldkey(keyfile_setup_teardown): + """ + Test case for legacy cold keyfile. + """ root_path = keyfile_setup_teardown legacy_filename = os.path.join(root_path, "coldlegacy_keyfile") keyfile = bittensor.keyfile(path=legacy_filename) @@ -439,6 +445,12 @@ def test_legacy_coldkey(keyfile_setup_teardown): def test_validate_password(): + """ + Test case for the validate_password function. + + This function tests the behavior of the validate_password function from the bittensor.keyfile module. + It checks various scenarios to ensure that the function correctly validates passwords. + """ from bittensor.keyfile import validate_password assert validate_password(None) == False @@ -451,6 +463,16 @@ def test_validate_password(): def test_decrypt_keyfile_data_legacy(): + """ + Test case for decrypting legacy keyfile data. + + This test case verifies that the `decrypt_keyfile_data` function correctly decrypts + encrypted data using a legacy encryption scheme. + + The test generates a key using a password and encrypts a sample data. Then, it decrypts + the encrypted data using the same password and asserts that the decrypted data matches + the original data. + """ import base64 from cryptography.fernet import Fernet @@ -484,6 +506,13 @@ def __generate_key(password): def test_user_interface(): + """ + Test the user interface for asking password to encrypt. + + This test case uses the `ask_password_to_encrypt` function from the `bittensor.keyfile` module. + It mocks the `getpass.getpass` function to simulate user input of passwords. + The expected result is that the `ask_password_to_encrypt` function returns the correct password. + """ from bittensor.keyfile import ask_password_to_encrypt with mock.patch( @@ -494,6 +523,9 @@ def test_user_interface(): def test_overwriting(keyfile_setup_teardown): + """ + Test case for overwriting a keypair in the keyfile. + """ root_path = keyfile_setup_teardown keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) alice = bittensor.Keypair.create_from_uri("/Alice") @@ -507,3 +539,60 @@ def test_overwriting(keyfile_setup_teardown): keyfile.set_keypair( bob, encrypt=True, overwrite=False, password="thisisafakepassword" ) + + +def test_serialized_keypair_to_keyfile_data(keyfile_setup_teardown): + """ + Test case for serializing a keypair to keyfile data. + + This test case verifies that the `serialized_keypair_to_keyfile_data` function correctly + serializes a keypair to keyfile data. It then deserializes the keyfile data and asserts + that the deserialized keypair matches the original keypair. + """ + from bittensor.keyfile import serialized_keypair_to_keyfile_data + + root_path = keyfile_setup_teardown + keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) + + mnemonic = bittensor.Keypair.generate_mnemonic(12) + keypair = bittensor.Keypair.create_from_mnemonic(mnemonic) + + keyfile.set_keypair( + keypair, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + keypair_data = serialized_keypair_to_keyfile_data(keypair) + decoded_keypair_data = json.loads(keypair_data.decode()) + + assert decoded_keypair_data["secretPhrase"] == keypair.mnemonic + assert decoded_keypair_data["ss58Address"] == keypair.ss58_address + assert decoded_keypair_data["publicKey"] == f"0x{keypair.public_key.hex()}" + assert decoded_keypair_data["accountId"] == f"0x{keypair.public_key.hex()}" + + +def test_deserialize_keypair_from_keyfile_data(keyfile_setup_teardown): + """ + Test case for deserializing a keypair from keyfile data. + + This test case verifies that the `deserialize_keypair_from_keyfile_data` function correctly + deserializes keyfile data to a keypair. It first serializes a keypair to keyfile data and + then deserializes the keyfile data to a keypair. It then asserts that the deserialized keypair + matches the original keypair. + """ + from bittensor.keyfile import serialized_keypair_to_keyfile_data + from bittensor.keyfile import deserialize_keypair_from_keyfile_data + + root_path = keyfile_setup_teardown + keyfile = bittensor.keyfile(path=os.path.join(root_path, "keyfile")) + + mnemonic = bittensor.Keypair.generate_mnemonic(12) + keypair = bittensor.Keypair.create_from_mnemonic(mnemonic) + + keyfile.set_keypair( + keypair, encrypt=True, overwrite=True, password="thisisafakepassword" + ) + keypair_data = serialized_keypair_to_keyfile_data(keypair) + deserialized_keypair = deserialize_keypair_from_keyfile_data(keypair_data) + + assert deserialized_keypair.ss58_address == keypair.ss58_address + assert deserialized_keypair.public_key == keypair.public_key + assert deserialized_keypair.private_key == keypair.private_key diff --git a/tests/unit_tests/test_subtensor.py b/tests/unit_tests/test_subtensor.py index cddedc9752..7638a8cb2b 100644 --- a/tests/unit_tests/test_subtensor.py +++ b/tests/unit_tests/test_subtensor.py @@ -20,107 +20,101 @@ import pytest import bittensor -import unittest -class TestSubtensorWithExternalAxon(unittest.TestCase): - """ - Test the subtensor with external axon in the config - """ - - def test_serve_axon_with_external_ip_set(self): - internal_ip: str = "this is an internal ip" - external_ip: str = "this is an external ip" - - mock_serve_axon = MagicMock(return_value=True) - - mock_subtensor = MagicMock(spec=bittensor.subtensor, serve_axon=mock_serve_axon) - - mock_add_insecure_port = mock.MagicMock(return_value=None) - mock_wallet = MagicMock( - spec=bittensor.wallet, - coldkey=MagicMock(), - coldkeypub=MagicMock( - # mock ss58 address - ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" - ), - hotkey=MagicMock( - ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" - ), - ) - - mock_config = bittensor.axon.config() - mock_axon_with_external_ip_set = bittensor.axon( - wallet=mock_wallet, - ip=internal_ip, - external_ip=external_ip, - config=mock_config, - ) - +def test_serve_axon_with_external_ip_set(): + internal_ip: str = "this is an internal ip" + external_ip: str = "this is an external ip" + + mock_serve_axon = MagicMock(return_value=True) + + mock_subtensor = MagicMock(spec=bittensor.subtensor, serve_axon=mock_serve_axon) + + mock_add_insecure_port = mock.MagicMock(return_value=None) + mock_wallet = MagicMock( + spec=bittensor.wallet, + coldkey=MagicMock(), + coldkeypub=MagicMock( + # mock ss58 address + ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" + ), + hotkey=MagicMock( + ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" + ), + ) + + mock_config = bittensor.axon.config() + mock_axon_with_external_ip_set = bittensor.axon( + wallet=mock_wallet, + ip=internal_ip, + external_ip=external_ip, + config=mock_config, + ) + + mock_subtensor.serve_axon( + netuid=-1, + axon=mock_axon_with_external_ip_set, + ) + + mock_serve_axon.assert_called_once() + + # verify that the axon is served to the network with the external ip + _, kwargs = mock_serve_axon.call_args + axon_info = kwargs["axon"].info() + assert axon_info.ip == external_ip + + +def test_serve_axon_with_external_port_set(): + external_ip: str = "this is an external ip" + + internal_port: int = 1234 + external_port: int = 5678 + + mock_serve = MagicMock(return_value=True) + + mock_serve_axon = MagicMock(return_value=True) + + mock_subtensor = MagicMock( + spec=bittensor.subtensor, + serve=mock_serve, + serve_axon=mock_serve_axon, + ) + + mock_wallet = MagicMock( + spec=bittensor.wallet, + coldkey=MagicMock(), + coldkeypub=MagicMock( + # mock ss58 address + ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" + ), + hotkey=MagicMock( + ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" + ), + ) + + mock_config = bittensor.axon.config() + + mock_axon_with_external_port_set = bittensor.axon( + wallet=mock_wallet, + port=internal_port, + external_port=external_port, + config=mock_config, + ) + + with mock.patch( + "bittensor.utils.networking.get_external_ip", return_value=external_ip + ): + # mock the get_external_ip function to return the external ip mock_subtensor.serve_axon( netuid=-1, - axon=mock_axon_with_external_ip_set, - ) - - mock_serve_axon.assert_called_once() - - # verify that the axon is served to the network with the external ip - _, kwargs = mock_serve_axon.call_args - axon_info = kwargs["axon"].info() - self.assertEqual(axon_info.ip, external_ip) - - def test_serve_axon_with_external_port_set(self): - external_ip: str = "this is an external ip" - - internal_port: int = 1234 - external_port: int = 5678 - - mock_serve = MagicMock(return_value=True) - - mock_serve_axon = MagicMock(return_value=True) - - mock_subtensor = MagicMock( - spec=bittensor.subtensor, - serve=mock_serve, - serve_axon=mock_serve_axon, + axon=mock_axon_with_external_port_set, ) - mock_wallet = MagicMock( - spec=bittensor.wallet, - coldkey=MagicMock(), - coldkeypub=MagicMock( - # mock ss58 address - ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" - ), - hotkey=MagicMock( - ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" - ), - ) - - mock_add_insecure_port = mock.MagicMock(return_value=None) - mock_config = bittensor.axon.config() - - mock_axon_with_external_port_set = bittensor.axon( - wallet=mock_wallet, - port=internal_port, - external_port=external_port, - config=mock_config, - ) - - with mock.patch( - "bittensor.utils.networking.get_external_ip", return_value=external_ip - ): - # mock the get_external_ip function to return the external ip - mock_subtensor.serve_axon( - netuid=-1, - axon=mock_axon_with_external_port_set, - ) - - mock_serve_axon.assert_called_once() - # verify that the axon is served to the network with the external port - _, kwargs = mock_serve_axon.call_args - axon_info = kwargs["axon"].info() - self.assertEqual(axon_info.port, external_port) + mock_serve_axon.assert_called_once() + # verify that the axon is served to the network with the external port + _, kwargs = mock_serve_axon.call_args + axon_info = kwargs["axon"].info() + assert axon_info.port == external_port class ExitEarly(Exception): @@ -129,61 +123,53 @@ class ExitEarly(Exception): pass -class TestStakeMultiple(unittest.TestCase): - """ - Test the stake_multiple function - """ - - def test_stake_multiple(self): - mock_amount: bittensor.Balance = bittensor.Balance.from_tao(1.0) - - mock_wallet = MagicMock( - spec=bittensor.wallet, - coldkey=MagicMock(), - coldkeypub=MagicMock( - # mock ss58 address - ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" - ), - hotkey=MagicMock( - ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" - ), - ) - - mock_hotkey_ss58s = ["5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg"] - - mock_amounts = [mock_amount] # more than 1000 RAO - - mock_neuron = MagicMock( - is_null=False, - ) - - mock_do_stake = MagicMock(side_effect=ExitEarly) - - mock_subtensor = MagicMock( - spec=bittensor.subtensor, - network="mock_net", - get_balance=MagicMock( - return_value=bittensor.Balance.from_tao(mock_amount.tao + 20.0) - ), # enough balance to stake - get_neuron_for_pubkey_and_subnet=MagicMock(return_value=mock_neuron), - _do_stake=mock_do_stake, +def test_stake_multiple(): + mock_amount: bittensor.Balance = bittensor.Balance.from_tao(1.0) + + mock_wallet = MagicMock( + spec=bittensor.wallet, + coldkey=MagicMock(), + coldkeypub=MagicMock( + # mock ss58 address + ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" + ), + hotkey=MagicMock( + ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" + ), + ) + + mock_hotkey_ss58s = ["5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg"] + + mock_amounts = [mock_amount] # more than 1000 RAO + + mock_neuron = MagicMock( + is_null=False, + ) + + mock_do_stake = MagicMock(side_effect=ExitEarly) + + mock_subtensor = MagicMock( + spec=bittensor.subtensor, + network="mock_net", + get_balance=MagicMock( + return_value=bittensor.Balance.from_tao(mock_amount.tao + 20.0) + ), # enough balance to stake + get_neuron_for_pubkey_and_subnet=MagicMock(return_value=mock_neuron), + _do_stake=mock_do_stake, + ) + + with pytest.raises(ExitEarly): + bittensor.subtensor.add_stake_multiple( + mock_subtensor, + wallet=mock_wallet, + hotkey_ss58s=mock_hotkey_ss58s, + amounts=mock_amounts, ) - with pytest.raises(ExitEarly): - bittensor.subtensor.add_stake_multiple( - mock_subtensor, - wallet=mock_wallet, - hotkey_ss58s=mock_hotkey_ss58s, - amounts=mock_amounts, - ) - - mock_do_stake.assert_called_once() - # args, kwargs - _, kwargs = mock_do_stake.call_args - self.assertAlmostEqual( - kwargs["ammount"], mock_amount.rao, delta=1.0 * 1e9 - ) # delta of 1.0 TAO - + mock_do_stake.assert_called_once() + # args, kwargs + _, kwargs = mock_do_stake.call_args -if __name__ == "__main__": - unittest.main() + assert kwargs["ammount"] == pytest.approx( + mock_amount.rao, rel=1e9 + ) # delta of 1.0 TAO From 19353db2dbef1be7e6b2fdd9846b0fefb6e67fee Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 30 Jan 2024 12:48:48 -0500 Subject: [PATCH 11/23] Rm deprecated utils/stats.py & refactor test_utils to pytest fmt --- bittensor/utils/stats.py | 84 --- tests/unit_tests/utils/test_utils.py | 1003 +++++++------------------- 2 files changed, 268 insertions(+), 819 deletions(-) delete mode 100644 bittensor/utils/stats.py diff --git a/bittensor/utils/stats.py b/bittensor/utils/stats.py deleted file mode 100644 index fd52dade20..0000000000 --- a/bittensor/utils/stats.py +++ /dev/null @@ -1,84 +0,0 @@ -""" A exponential moving average that updates values based on time since last update. -""" - -# The MIT License (MIT) -# Copyright © 2021 Yuma Rao - -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -# documentation files (the “Software”), to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of -# the Software. - -# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -import time - - -class timed_rolling_avg: - """A exponential moving average that updates values based on time since last update.""" - - def __init__(self, initial_value, alpha): - self.value = initial_value - self.alpha = alpha - self.last_update = time.time() - - def update(self, new_value): - """Update self.value (the moving average) with the new_value""" - now = time.time() - time_delta = now - self.last_update - self.last_update = now - new_value = new_value / time_delta - self.value = (1 - self.alpha) * self.value + self.alpha * new_value - - -class AmountPerSecondRollingAverage: - """A exponential moving average that counts quantity per second.""" - - def __init__(self, initial_value=0, alpha=0.1): - self.value = initial_value - self.alpha = alpha - self.last_update = None - - def event(self, amount): - """Update self.value (the moving average) with the new_value""" - if self.last_update == None: - self.last_update = time.time() - else: - now = time.time() - time_delta = now - self.last_update - self.last_update = now - new_value = amount / time_delta - self.value = (1 - self.alpha) * self.value + self.alpha * new_value - - def get(self) -> float: - return float(self.value) - - -class EventsPerSecondRollingAverage: - """A exponential moving average that counts the number of events per second.""" - - def __init__(self, initial_value, alpha): - self.value = initial_value - self.alpha = alpha - self.last_update = None - - def event(self): - """Update self.value (the moving average) with the new_value""" - if self.last_update == None: - self.last_update = time.time() - else: - now = time.time() - time_delta = now - self.last_update - self.last_update = now - new_value = 1 / time_delta - self.value = (1 - self.alpha) * self.value + self.alpha * new_value - - def get(self) -> float: - return float(self.value) diff --git a/tests/unit_tests/utils/test_utils.py b/tests/unit_tests/utils/test_utils.py index 356f1f2fcd..0875d921e0 100644 --- a/tests/unit_tests/utils/test_utils.py +++ b/tests/unit_tests/utils/test_utils.py @@ -1,4 +1,5 @@ # The MIT License (MIT) +# Copyright © 2021 Yuma Rao # Copyright © 2022 Opentensor Foundation # Copyright © 2023 Opentensor Technologies Inc @@ -16,759 +17,291 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import hashlib -import math -import multiprocessing -import os -import random -import subprocess -import sys -import time -import unittest -from sys import platform -from types import SimpleNamespace -from typing import Dict, Union -from unittest.mock import MagicMock, patch - -import pytest -from _pytest.fixtures import fixture - -from ddt import data, ddt, unpack - import torch -from loguru import logger -from substrateinterface.base import Keypair - -import bittensor -from bittensor.mock import MockSubtensor -from bittensor.utils.registration import _CUDASolver, _SolverBase - -from tests.helpers import _get_mock_wallet as _generate_wallet, _get_mock_keypair - - -def construct_config(): - parser = bittensor.cli.__create_parser__() - defaults = bittensor.config(parser=parser, args=[]) - - return defaults +import bittensor.utils.weight_utils as weight_utils +import pytest -def test_unbiased_topk(): - input_tensor = torch.FloatTensor( - [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] +def test_convert_weight_and_uids(): + uids = torch.tensor(list(range(10))) + weights = torch.rand(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # min weight < 0 + weights[5] = -1 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # min uid < 0 + weights[5] = 0 + uids[3] = -1 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # len(uids) != len(weights) + uids[3] = 3 + with pytest.raises(ValueError) as pytest_wrapped_e: + weight_utils.convert_weights_and_uids_for_emit(uids, weights[1:]) + + # sum(weights) == 0 + weights = torch.zeros(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + # test for overflow and underflow + for _ in range(5): + uids = torch.tensor(list(range(10))) + weights = torch.rand(10) + weight_utils.convert_weights_and_uids_for_emit(uids, weights) + + +def test_normalize_with_max_weight(): + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.01) + assert wn.max() <= 0.01 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.01) + assert wn.max() <= 0.01 + + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.02) + assert wn.max() <= 0.02 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.02) + assert wn.max() <= 0.02 + + weights = torch.rand(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.03) + assert wn.max() <= 0.03 + + weights = torch.zeros(1000) + wn = weight_utils.normalize_max_weight(weights, limit=0.03) + assert wn.max() <= 0.03 + + # Check for Limit + limit = 0.001 + weights = torch.rand(2000) + w = weights / weights.sum() + wn = weight_utils.normalize_max_weight(weights, limit=limit) + assert (w.max() >= limit and (limit - wn.max()).abs() < 0.001) or ( + w.max() < limit and wn.max() < limit ) - topk = bittensor.utils.unbiased_topk(input_tensor, 2) - assert torch.all(torch.eq(topk[0], torch.Tensor([10.0, 9.0]))) - assert torch.all(torch.eq(topk[1], torch.Tensor([9, 8]))) - - -class TestRegistrationHelpers(unittest.TestCase): - def test_create_seal_hash(self): - block_and_hotkey_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - block_and_hotkey_hash_bytes = bytes.fromhex(block_and_hotkey_hash[2:]) - nonce = 10 - seal_hash = bittensor.utils.registration._create_seal_hash( - block_and_hotkey_hash_bytes, nonce - ) - self.assertEqual( - seal_hash, - b'\xc5\x01B6"\xa8\xa5FDPK\xe49\xad\xdat\xbb:\x87d\x13/\x86\xc6:I8\x9b\x88\xf0\xc20', - ) - - def test_seal_meets_difficulty(self): - block_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - nonce = 10 - limit = int(math.pow(2, 256)) - 1 - nonce_bytes = nonce.to_bytes(8, "little") - block_bytes = block_hash.encode("utf-8")[2:] - pre_seal = nonce_bytes + block_bytes - seal = hashlib.sha256(bytearray(pre_seal)).digest() - - difficulty = 1 - meets = bittensor.utils.registration._seal_meets_difficulty( - seal, difficulty, limit - ) - assert meets == True - - difficulty = 10 - meets = bittensor.utils.registration._seal_meets_difficulty( - seal, difficulty, limit - ) - assert meets == False - - def test_solve_for_difficulty_fast(self): - block_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - subtensor = MagicMock() - subtensor.get_current_block = MagicMock(return_value=1) - subtensor.difficulty = MagicMock(return_value=1) - subtensor.substrate = MagicMock() - subtensor.get_block_hash = MagicMock(return_value=block_hash) - subtensor.is_hotkey_registered = MagicMock(return_value=False) - wallet = MagicMock( - hotkey=Keypair.create_from_mnemonic(Keypair.generate_mnemonic()), - ) - num_proc: int = 1 - limit = int(math.pow(2, 256)) - 1 - - solution = bittensor.utils.registration._solve_for_difficulty_fast( - subtensor, wallet, netuid=-1, num_processes=num_proc - ) - seal = solution.seal - - assert bittensor.utils.registration._seal_meets_difficulty(seal, 1, limit) - - subtensor.difficulty = MagicMock(return_value=10) - solution = bittensor.utils.registration._solve_for_difficulty_fast( - subtensor, wallet, netuid=-2, num_processes=num_proc - ) - seal = solution.seal - assert bittensor.utils.registration._seal_meets_difficulty(seal, 10, limit) - - def test_solve_for_difficulty_fast_registered_already(self): - # tests if the registration stops after the first block of nonces - for _ in range(10): - workblocks_before_is_registered = random.randint(1, 4) - # return False each work block but return True after a random number of blocks - is_registered_return_values = ( - [False for _ in range(workblocks_before_is_registered)] - + [True] - + [False, False] - ) - - block_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - subtensor = MagicMock() - subtensor.get_current_block = MagicMock(return_value=1) - subtensor.difficulty = MagicMock( - return_value=int(1e20) - ) # set high to make solving take a long time - subtensor.substrate = MagicMock() - subtensor.get_block_hash = MagicMock(return_value=block_hash) - subtensor.is_hotkey_registered = MagicMock( - side_effect=is_registered_return_values - ) - wallet = MagicMock( - hotkey=Keypair.create_from_mnemonic(Keypair.generate_mnemonic()), - ) - - # all arugments should return None to indicate an early return - solution = bittensor.utils.registration._solve_for_difficulty_fast( - subtensor, wallet, netuid=-2, num_processes=1, update_interval=1000 - ) - - assert solution is None - # called every time until True - assert ( - subtensor.is_hotkey_registered.call_count - == workblocks_before_is_registered + 1 - ) - - def test_solve_for_difficulty_fast_missing_hash(self): - block_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - subtensor = MagicMock() - subtensor.get_current_block = MagicMock(return_value=1) - subtensor.difficulty = MagicMock(return_value=1) - subtensor.substrate = MagicMock() - subtensor.get_block_hash = MagicMock( - side_effect=[None, None] + [block_hash] * 20 - ) - subtensor.is_hotkey_registered = MagicMock(return_value=False) - wallet = MagicMock( - hotkey=Keypair.create_from_mnemonic(Keypair.generate_mnemonic()), - ) - num_proc: int = 1 - limit = int(math.pow(2, 256)) - 1 - - solution = bittensor.utils.registration._solve_for_difficulty_fast( - subtensor, wallet, netuid=-1, num_processes=num_proc - ) - seal = solution.seal - assert bittensor.utils.registration._seal_meets_difficulty(seal, 1, limit) - subtensor.difficulty = MagicMock(return_value=10) - solution = bittensor.utils.registration._solve_for_difficulty_fast( - subtensor, wallet, netuid=-2, num_processes=num_proc - ) - seal = solution.seal - assert bittensor.utils.registration._seal_meets_difficulty(seal, 10, limit) - - def test_registration_diff_pack_unpack_under_32_bits(self): - fake_diff = pow(2, 31) # this is under 32 bits - - mock_diff = multiprocessing.Array("Q", [0, 0], lock=True) # [high, low] - bittensor.utils.registration._registration_diff_pack(fake_diff, mock_diff) - assert ( - bittensor.utils.registration._registration_diff_unpack(mock_diff) - == fake_diff - ) - - def test_registration_diff_pack_unpack_over_32_bits(self): - mock_diff = multiprocessing.Array("Q", [0, 0], lock=True) # [high, low] - fake_diff = pow(2, 32) * pow( - 2, 4 - ) # this should be too large if the bit shift is wrong (32 + 4 bits) - bittensor.utils.registration._registration_diff_pack(fake_diff, mock_diff) - assert ( - bittensor.utils.registration._registration_diff_unpack(mock_diff) - == fake_diff - ) - - def test_hash_block_with_hotkey(self): - block_hash = ( - "0xc444e4205857add79a0427401aa2518d11e85f32377eff9a946d180a54697459" - ) - block_hash_bytes = bytes.fromhex(block_hash[2:]) - - hotkey_pubkey_hex = ( - "0xba3189e99e75b6097cd94a5ecc771016b83c8432d35d14a03ab731b07112f559" - ) - hotkey_bytes = bytes.fromhex(hotkey_pubkey_hex[2:]) - - expected_hash_hex = ( - "0x7869b61229641b33a355dc34d4ef48f8d82166635237f9f10bcb215b8cb48161" - ) - expected_hash = bytes.fromhex(expected_hash_hex[2:]) - - result_hash = bittensor.utils.registration._hash_block_with_hotkey( - block_hash_bytes, hotkey_bytes - ) - self.assertEqual(result_hash, expected_hash) - - def test_update_curr_block(self): - curr_block, curr_block_num, curr_diff = _SolverBase.create_shared_memory() - - block_number: int = 1 - block_bytes = bytes.fromhex( - "9dda24e4199df410e18a43044b3069078f796922b0247b8749aecb577b09bd59" - ) - diff: int = 1 - hotkey_bytes = bytes.fromhex("0" * 64) - lock: Union[multiprocessing.Lock, MagicMock] = MagicMock() - - bittensor.utils.registration._update_curr_block( - curr_diff, - curr_block, - curr_block_num, - block_number, - block_bytes, - diff, - hotkey_bytes, - lock, - ) - - self.assertEqual(curr_block_num.value, block_number) - self.assertEqual(curr_diff[0], diff >> 32) - self.assertEqual(curr_diff[1], diff & 0xFFFFFFFF) - - hash_of_block_and_hotkey = bittensor.utils.registration._hash_block_with_hotkey( - block_bytes, hotkey_bytes - ) - self.assertEqual( - curr_block[:], [int(byte_) for byte_ in hash_of_block_and_hotkey] - ) - - def test_solve_for_nonce_block(self): - nonce_start = 0 - nonce_end = 10_000 - block_and_hotkey_hash_bytes = bytes.fromhex( - "9dda24e4199df410e18a43044b3069078f796922b0247b8749aecb577b09bd59" - ) - - limit = limit = int(math.pow(2, 256)) - 1 - block_number = 1 - - difficulty = 1 - result = bittensor.utils.registration._solve_for_nonce_block( - nonce_start, - nonce_end, - block_and_hotkey_hash_bytes, - difficulty, - limit, - block_number, - ) - - self.assertIsNotNone(result) - self.assertEqual(result.block_number, block_number) - self.assertEqual(result.difficulty, difficulty) - - # Make sure seal meets difficulty - self.assertTrue( - bittensor.utils.registration._seal_meets_difficulty( - result.seal, difficulty, limit - ) - ) - - # Test with a higher difficulty - difficulty = 10 - result = bittensor.utils.registration._solve_for_nonce_block( - nonce_start, - nonce_end, - block_and_hotkey_hash_bytes, - difficulty, - limit, - block_number, - ) - - self.assertIsNotNone(result) - self.assertEqual(result.block_number, block_number) - self.assertEqual(result.difficulty, difficulty) - - # Make sure seal meets difficulty - self.assertTrue( - bittensor.utils.registration._seal_meets_difficulty( - result.seal, difficulty, limit - ) - ) - - -class TestSS58Utils(unittest.TestCase): - def test_is_valid_ss58_address(self): - keypair = bittensor.Keypair.create_from_mnemonic( - bittensor.Keypair.generate_mnemonic(words=12), - ss58_format=bittensor.__ss58_format__, - ) - good_address = keypair.ss58_address - bad_address = good_address[:-1] + "a" - assert bittensor.utils.is_valid_ss58_address(good_address) - assert not bittensor.utils.is_valid_ss58_address(bad_address) - - def test_is_valid_ss58_address_legacy(self): - keypair = bittensor.Keypair.create_from_mnemonic( - bittensor.Keypair.generate_mnemonic(words=12), - ss58_format=42, # should be fine for legacy ss58 - ) - good_address = keypair.ss58_address - bad_address = good_address[:-1] + "a" - assert bittensor.utils.is_valid_ss58_address(good_address) - assert not bittensor.utils.is_valid_ss58_address(bad_address) - - def test_is_valid_ed25519_pubkey(self): - keypair = bittensor.Keypair.create_from_mnemonic( - bittensor.Keypair.generate_mnemonic(words=12), - ss58_format=bittensor.__ss58_format__, - ) - good_pubkey = keypair.public_key.hex() - bad_pubkey = good_pubkey[:-1] # needs to be 64 chars - assert bittensor.utils.is_valid_ed25519_pubkey(good_pubkey) - assert not bittensor.utils.is_valid_ed25519_pubkey(bad_pubkey) - - # Test with bytes - good_pubkey = keypair.public_key - bad_pubkey = good_pubkey[:-1] # needs to be 32 bytes - assert bittensor.utils.is_valid_ed25519_pubkey(good_pubkey) - assert not bittensor.utils.is_valid_ed25519_pubkey(bad_pubkey) - - -class TestUpdateCurrentBlockDuringRegistration(unittest.TestCase): - def test_check_for_newest_block_and_update_same_block(self): - # if the block is the same, the function should return the same block number - subtensor = MagicMock() - current_block_num: int = 1 - subtensor.get_current_block = MagicMock(return_value=current_block_num) - mock_hotkey_bytes = bytes.fromhex("0" * 63 + "1") - - self.assertEqual( - bittensor.utils.registration._check_for_newest_block_and_update( - subtensor, - -1, # netuid - current_block_num, # current block number is the same as the new block number - mock_hotkey_bytes, # mock hotkey bytes - MagicMock(), - MagicMock(), - MagicMock(), - MagicMock(), - MagicMock(), - MagicMock(), - MagicMock(), - ), - current_block_num, - ) - - def test_check_for_newest_block_and_update_new_block(self): - # if the block is new, the function should return the new block_number - mock_block_hash = ( - "0xba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - mock_hotkey_bytes = bytes.fromhex("0" * 63 + "1") - - current_block_num: int = 1 - current_diff: int = 0 - - mock_substrate = MagicMock() - subtensor = MagicMock( - get_block_hash=MagicMock(return_value=mock_block_hash), - substrate=mock_substrate, - difficulty=MagicMock(return_value=current_diff + 1), # new diff - ) - subtensor.get_current_block = MagicMock( - return_value=current_block_num + 1 - ) # new block - - mock_update_curr_block = MagicMock() - - mock_solvers = [ - MagicMock(newBlockEvent=MagicMock(set=MagicMock())), - MagicMock(newBlockEvent=MagicMock(set=MagicMock())), - ] - - mock_curr_stats = MagicMock( - block_number=current_block_num, - block_hash=b"", - difficulty=0, - ) - - self.assertEqual( - bittensor.utils.registration._check_for_newest_block_and_update( - subtensor, - -2, # netuid - MagicMock(), - mock_hotkey_bytes, - MagicMock(), - MagicMock(), - MagicMock(), - mock_update_curr_block, - MagicMock(), - mock_solvers, - mock_curr_stats, - ), - current_block_num + 1, - ) - - # check that the update_curr_block function was called - mock_update_curr_block.assert_called_once() - - # check that the solvers got the event - for solver in mock_solvers: - solver.newBlockEvent.set.assert_called_once() - - # check the stats were updated - self.assertEqual(mock_curr_stats.block_number, current_block_num + 1) - self.assertEqual(mock_curr_stats.block_hash, mock_block_hash) - self.assertEqual(mock_curr_stats.difficulty, current_diff + 1) - - -class TestGetBlockWithRetry(unittest.TestCase): - class MockException(Exception): - pass - - def test_get_block_with_retry_network_error_exit(self): - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=1), - difficulty=MagicMock(return_value=1), - get_block_hash=MagicMock(side_effect=self.MockException("network error")), - ) - with pytest.raises(self.MockException): - # this should raise an exception because the network error is retried only 3 times - bittensor.utils.registration._get_block_with_retry(mock_subtensor, -1) - - def test_get_block_with_retry_network_error_no_error(self): - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=1), - difficulty=MagicMock(return_value=1), - substrate=MagicMock( - get_block_hash=MagicMock( - return_value=b"ba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - ), - ) - - # this should not raise an exception because there is no error - bittensor.utils.registration._get_block_with_retry(mock_subtensor, -1) - - def test_get_block_with_retry_network_error_none_twice(self): - # Should retry twice then succeed on the third try - tries = 0 - - def block_none_twice(block_hash: bytes): - nonlocal tries - if tries == 1: - return block_hash - else: - tries += 1 - return None - - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=1), - difficulty=MagicMock(return_value=1), - substrate=MagicMock( - get_block_hash=MagicMock( - side_effect=block_none_twice( - b"ba7ea4eb0b16dee271dbef5911838c3f359fcf598c74da65a54b919b68b67279" - ) - ) - ), - ) - - # this should not raise an exception because there is no error on the third try - bittensor.utils.registration._get_block_with_retry(mock_subtensor, -1) - - -class TestPOWNotStale(unittest.TestCase): - def test_pow_not_stale_same_block_number(self): - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=1), - ) - mock_solution = bittensor.utils.registration.POWSolution( - block_number=1, # 3 less than current block number - nonce=1, - difficulty=1, - seal=b"", - ) - - assert not mock_solution.is_stale(mock_subtensor) - - def test_pow_not_stale_diff_block_number(self): - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=2), - ) - mock_solution = bittensor.utils.registration.POWSolution( - block_number=1, # 1 less than current block number - nonce=1, - difficulty=1, - seal=b"", - ) - - assert not mock_solution.is_stale(mock_subtensor) - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=3), - ) - mock_solution = bittensor.utils.registration.POWSolution( - block_number=1, # 2 less than current block number - nonce=1, - difficulty=1, - seal=b"", - ) - - assert not mock_solution.is_stale(mock_subtensor) - - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=4), - ) - mock_solution = bittensor.utils.registration.POWSolution( - block_number=1, # 3 less than current block number - nonce=1, - difficulty=1, - seal=b"", - ) - - assert not mock_solution.is_stale(mock_subtensor) - - def test_pow_not_stale_diff_block_number_too_old(self): - mock_subtensor = MagicMock( - get_current_block=MagicMock(return_value=5), - ) - mock_solution = bittensor.utils.registration.POWSolution( - block_number=1, # 4 less than current block number - nonce=1, - difficulty=1, - seal=b"", - ) - - assert mock_solution.is_stale(mock_subtensor) - - -@patch("torch.cuda.is_available", return_value=True) -class TestPOWCalled(unittest.TestCase): - def setUp(self) -> None: - # Setup mock subnet - self._subtensor = MockSubtensor() - self._subtensor.reset() - self._subtensor.create_subnet(netuid=99) + # Check for Zeros + limit = 0.01 + weights = torch.zeros(2000) + wn = weight_utils.normalize_max_weight(weights, limit=limit) + assert wn.max() == 1 / 2000 + + # Check for Ordering after normalization + weights = torch.rand(100) + wn = weight_utils.normalize_max_weight(weights, limit=1) + assert torch.equal(wn, weights / weights.sum()) + + # Check for eplison changes + eplison = 0.01 + weights, _ = torch.sort(torch.rand(100)) + x = weights / weights.sum() + limit = x[-10] + change = eplison * limit + y = weight_utils.normalize_max_weight(x, limit=limit - change) + z = weight_utils.normalize_max_weight(x, limit=limit + change) + assert (y - z).abs().sum() < eplison + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, expected", + [ + ("happy-path-1", 3, [0, 1, 2], [15, 5, 80], torch.tensor([0.15, 0.05, 0.8])), + ("happy-path-2", 4, [1, 3], [50, 50], torch.tensor([0.0, 0.5, 0.0, 0.5])), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_happy_path( + test_id, n, uids, weights, expected +): + # Act + result = weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + # Assert + assert torch.allclose(result, expected), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, expected", + [ + ("edge_case_empty", 5, [], [], torch.zeros(5)), + ("edge_case_single", 1, [0], [100], torch.tensor([1.0])), + ("edge_case_all_zeros", 4, [0, 1, 2, 3], [0, 0, 0, 0], torch.zeros(4)), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_edge_cases( + test_id, n, uids, weights, expected +): + # Act + result = weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + # Assert + assert torch.allclose(result, expected), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, exception", + [ + ("error-case-mismatched-lengths", 3, [0, 1, 3, 4, 5], [10, 20, 30], IndexError), + ("error-case-negative-n", -1, [0, 1], [10, 20], RuntimeError), + ("error-case-invalid-uids", 3, [0, 3], [10, 20], IndexError), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_error_cases( + test_id, n, uids, weights, exception +): + # Act / Assert + with pytest.raises(exception): + weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "happy-path-1", + 3, + [0, 1, 2], + [15, 5, 80], + [0, 1, 2], + torch.tensor([0.15, 0.05, 0.8]), + ), + ( + "happy-path-2", + 3, + [0, 2], + [300, 300], + [0, 1, 2], + torch.tensor([0.5, 0.0, 0.5]), + ), + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_happy_paths( + test_id, n, uids, weights, subnets, expected +): + # Act + result = weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) - def test_pow_called_for_cuda(self, mock_cuda_available): - class MockException(Exception): - pass + # Assert + assert torch.allclose(result, expected, atol=1e-4), f"Failed {test_id}" - mock_pow_register_call = MagicMock(side_effect=MockException) - mock_subtensor = MockSubtensor() - mock_subtensor.reset() - mock_subtensor.create_subnet(netuid=99) - mock_subtensor.get_neuron_for_pubkey_and_subnet = MagicMock(is_null=True) - mock_subtensor._do_pow_register = mock_pow_register_call +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "edge-1", + 1, + [0], + [0], + [0], + torch.tensor([0.0]), + ), # Single neuron with zero weight + ( + "edge-2", + 2, + [0, 1], + [0, 0], + [0, 1], + torch.tensor([0.0, 0.0]), + ), # All zero weights + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_edge_cases( + test_id, n, uids, weights, subnets, expected +): + # Act + result = weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) - mock_wallet = SimpleNamespace( - hotkey=bittensor.Keypair.create_from_seed( - "0x" + "0" * 64, ss58_format=bittensor.__ss58_format__ - ), - coldkeypub=SimpleNamespace(ss58_address=""), - ) + # Assert + assert torch.allclose(result, expected, atol=1e-4), f"Failed {test_id}" - mock_pow_is_stale = MagicMock(return_value=False) - mock_result = MagicMock( - spec=bittensor.utils.registration.POWSolution, - block_number=1, - nonce=random.randint(0, pow(2, 32)), - difficulty=1, - seal=b"\x00" * 64, - is_stale=mock_pow_is_stale, +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, exception", + [ + ("error-1", 3, [1, 3], [100, 200], [1, 2], Exception), # uid not in subnets + ("error-2", 3, [1, 2, 3], [100, 200], [1], Exception), # More uids than subnets + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_error_cases( + test_id, n, uids, weights, subnets, exception +): + # Act and Assert + with pytest.raises(exception): + weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets ) + print(f"Failed {test_id}") - with patch( - "bittensor.extrinsics.registration.create_pow", return_value=mock_result - ) as mock_create_pow: - # Should exit early - with pytest.raises(MockException): - mock_subtensor.register(mock_wallet, netuid=99, cuda=True, prompt=False) - - mock_pow_is_stale.assert_called_once() - mock_create_pow.assert_called_once() - mock_cuda_available.assert_called_once() - - call0 = mock_pow_is_stale.call_args - _, kwargs = call0 - assert kwargs["subtensor"] == mock_subtensor - - mock_pow_register_call.assert_called_once() - _, kwargs = mock_pow_register_call.call_args - kwargs["pow_result"].nonce == mock_result.nonce - - -class TestCUDASolverRun(unittest.TestCase): - def test_multi_cuda_run_updates_nonce_start(self): - class MockException(Exception): - pass - - tpb: int = 512 - update_interval: int = 70_000 - nonce_limit: int = int(math.pow(2, 64)) - 1 - - mock_solver_self = MagicMock( - spec=_CUDASolver, - tpb=tpb, - dev_id=0, - update_interval=update_interval, - stopEvent=MagicMock(is_set=MagicMock(return_value=False)), - newBlockEvent=MagicMock(is_set=MagicMock(return_value=False)), - finished_queue=MagicMock(put=MagicMock()), - limit=10000, - proc_num=0, - ) - with patch( - "bittensor.utils.registration._solve_for_nonce_block_cuda", - side_effect=[ - None, - MockException, - ], # first call returns mocked no solution, second call raises exception - ) as mock_solve_for_nonce_block_cuda: - # Should exit early - with pytest.raises(MockException): - _CUDASolver.run(mock_solver_self) - mock_solve_for_nonce_block_cuda.assert_called() - calls = mock_solve_for_nonce_block_cuda.call_args_list - self.assertEqual( - len(calls), - 2, - f"solve_for_nonce_block_cuda was called {len(calls)}. Expected 2", - ) # called only twice - - # args, kwargs - args_call_0, _ = calls[0] - initial_nonce_start: int = args_call_0[0] # fist arg should be nonce_start - self.assertIsInstance(initial_nonce_start, int) - - args_call_1, _ = calls[1] - nonce_start_after_iteration: int = args_call_1[ - 0 - ] # first arg should be nonce_start - self.assertIsInstance(nonce_start_after_iteration, int) - - # verify nonce_start is updated after each iteration - self.assertNotEqual( - nonce_start_after_iteration, - initial_nonce_start, - "nonce_start was not updated after iteration", - ) - ## Should incerase by the number of nonces tried == tpb * update_interval - self.assertEqual( - nonce_start_after_iteration, - (initial_nonce_start + update_interval * tpb) % nonce_limit, - "nonce_start was not updated by the correct amount", - ) - - -@ddt -class TestExplorerURL(unittest.TestCase): - @data( - ( - "local", - { - "opentensor": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", - "taostats": "https://x.taostats.io", - }, - ), +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ ( - "endpoint", - { - "opentensor": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", - "taostats": "https://x.taostats.io", - }, + "happy-path-1", + 5, + [1, 3, 4], + [10, 20, 30], + torch.tensor([0, 10, 0, 20, 30], dtype=torch.int64), ), ( - "finney", - { - "opentensor": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer", - "taostats": "https://x.taostats.io", - }, + "happy-path-2", + 3, + [0, 1, 2], + [7, 8, 9], + torch.tensor([7, 8, 9], dtype=torch.int64), ), - ("bad", {}), - ("", {}), - ("unknown", {}), - ) - @unpack - def test_get_explorer_root_url_by_network_from_map( - self, network: str, expected: dict - ) -> None: - self.assertEqual( - bittensor.utils.get_explorer_root_url_by_network_from_map( - network, bittensor.__network_explorer_map__ - ), - expected, - ) + ("happy-path-3", 4, [2], [15], torch.tensor([0, 0, 15, 0], dtype=torch.int64)), + ], +) +def test_happy_path(test_id, n, uids, bonds, expected_output): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) - @data( - ( - "local", - "0x123", - { - "opentensor": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer/query/0x123", - "taostats": "https://x.taostats.io/extrinsic/0x123", - }, - ), - ( - "endpoint", - "0x456", - { - "opentensor": "https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fentrypoint-finney.opentensor.ai%3A443#/explorer/query/0x456", - "taostats": "https://x.taostats.io/extrinsic/0x456", - }, - ), - ("bad", "0x789", {}), - ("", "0xabc", {}), - ("unknown", "0xdef", {}), - ) - @unpack - def test_get_explorer_url_for_network_by_network_and_block_hash( - self, network: str, block_hash: str, expected: dict - ) -> None: - self.assertEqual( - bittensor.utils.get_explorer_url_for_network( - network, block_hash, bittensor.__network_explorer_map__ - ), - expected, - ) + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" -if __name__ == "__main__": - unittest.main() +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ + ("edge-1", 1, [0], [0], torch.tensor([0], dtype=torch.int64)), # Single element + ( + "edge-2", + 10, + [], + [], + torch.zeros(10, dtype=torch.int64), + ), # Empty uids and bonds + ], +) +def test_edge_cases(test_id, n, uids, bonds, expected_output): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) + + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, bonds, exception", + [ + ("error-1", 5, [1, 3, 6], [10, 20, 30], IndexError), # uid out of bounds + ("error-2", -1, [0], [10], RuntimeError), # Negative number of neurons + ], +) +def test_error_cases(test_id, n, uids, bonds, exception): + # Act / Assert + with pytest.raises(exception): + weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) From a03e32235a3ecf56c6953cb99bf8fc52d65c12ba Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 30 Jan 2024 13:43:33 -0500 Subject: [PATCH 12/23] Refactor test_balance to pytest fmt --- tests/unit_tests/utils/test_balance.py | 948 +++++++++++++------------ 1 file changed, 475 insertions(+), 473 deletions(-) diff --git a/tests/unit_tests/utils/test_balance.py b/tests/unit_tests/utils/test_balance.py index 3b2ca26b70..129af42f01 100644 --- a/tests/unit_tests/utils/test_balance.py +++ b/tests/unit_tests/utils/test_balance.py @@ -1,5 +1,3 @@ -import unittest - import pytest from hypothesis import given from hypothesis import strategies as st @@ -8,8 +6,6 @@ from bittensor import Balance from tests.helpers import CLOSE_IN_VALUE -from tests.helpers import CLOSE_IN_VALUE - """ Test the Balance class """ @@ -30,478 +26,484 @@ def remove_zero_filter(x): return int(x * pow(10, 9)) != 0 -class TestBalance(unittest.TestCase): - @given(balance=valid_tao_numbers_strategy) - def test_balance_init(self, balance: Union[int, float]): - """ - Test the initialization of the Balance object. - """ - balance_ = Balance(balance) - if isinstance(balance, int): - assert balance_.rao == balance - elif isinstance(balance, float): - assert balance_.tao == CLOSE_IN_VALUE(balance, 0.00001) - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_add(self, balance: Union[int, float], balance2: Union[int, float]): - """ - Test the addition of two Balance objects. - """ - balance_ = Balance(balance) - balance2_ = Balance(balance2) - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - if isinstance(balance2, int): - rao2_ = balance2 - elif isinstance(balance2, float): - rao2_ = int(balance2 * pow(10, 9)) - - sum_ = balance_ + balance2_ - assert isinstance(sum_, Balance) - assert CLOSE_IN_VALUE(sum_.rao, 5) == rao_ + rao2_ - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_add_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the addition of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # convert balance2 to rao. Assume balance2 was rao - rao2_ = int(balance2) - - sum_ = balance_ + balance2_ - assert isinstance(sum_, Balance) - assert CLOSE_IN_VALUE(sum_.rao, 5) == rao_ + rao2_ - - @given(balance=valid_tao_numbers_strategy) - def test_balance_eq_other_not_balance(self, balance: Union[int, float]): - """ - Test the equality of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - rao2_: int - # convert balance2 to rao. This assumes balance2 is a rao value - rao2_ = int(balance_.rao) - - self.assertEqual( - CLOSE_IN_VALUE(rao2_, 5), - balance_, - msg=f"Balance {balance_} is not equal to {rao2_}", - ) - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_radd_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the right addition (radd) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value - rao2_ = int(balance2) - - sum_ = balance2_ + balance_ # This is an radd - assert isinstance(sum_, Balance) - assert CLOSE_IN_VALUE(sum_.rao, 5) == rao2_ + rao_ - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_sub(self, balance: Union[int, float], balance2: Union[int, float]): - """ - Test the subtraction of two Balance objects. - """ - balance_ = Balance(balance) - balance2_ = Balance(balance2) - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - if isinstance(balance2, int): - rao2_ = balance2 - elif isinstance(balance2, float): - rao2_ = int(balance2 * pow(10, 9)) - - diff_ = balance_ - balance2_ - assert isinstance(diff_, Balance) - assert CLOSE_IN_VALUE(diff_.rao, 5) == rao_ - rao2_ - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_sub_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the subtraction of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value - rao2_ = int(balance2) - - diff_ = balance_ - balance2_ - assert isinstance(diff_, Balance) - assert CLOSE_IN_VALUE(diff_.rao, 5) == rao_ - rao2_ - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_rsub_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the right subtraction (rsub) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value - rao2_ = int(balance2) - - diff_ = balance2_ - balance_ # This is an rsub - assert isinstance(diff_, Balance) - assert CLOSE_IN_VALUE(diff_.rao, 5) == rao2_ - rao_ - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_mul(self, balance: Union[int, float], balance2: Union[int, float]): - """ - Test the multiplication of two Balance objects. - """ - balance_ = Balance(balance) - balance2_ = Balance(balance2) - rao_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - if isinstance(balance2, int): - rao2_ = balance2 - elif isinstance(balance2, float): - rao2_ = int(balance2 * pow(10, 9)) - - prod_ = balance_ * balance2_ - assert isinstance(prod_, Balance) - self.assertAlmostEqual( - prod_.rao, - rao_ * rao2_, - 9, - msg="{} * {} == {} != {} * {} == {}".format( - balance_, balance2_, prod_.rao, rao_, balance2, rao_ * balance2 - ), - ) - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_mul_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the multiplication of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - - prod_ = balance_ * balance2_ - assert isinstance(prod_, Balance) - self.assertAlmostEqual(prod_.rao, int(rao_ * balance2), delta=20) - - @given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) - def test_balance_rmul_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the right multiplication (rmul) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - - prod_ = balance2_ * balance_ # This is an rmul - assert isinstance(prod_, Balance) - self.assertAlmostEqual( - prod_.rao, - int(balance2 * rao_), - delta=20, - msg=f"{balance2_} * {balance_} = {prod_} != {balance2} * {rao_} == {balance2 * rao_}", - ) - - @given( - balance=valid_tao_numbers_strategy, - balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), - ) # Avoid zero division - def test_balance_truediv( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the true division (/) of two Balance objects. - """ - balance_ = Balance(balance) - balance2_ = Balance(balance2) - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - if isinstance(balance2, int): - rao2_ = balance2 - elif isinstance(balance2, float): - rao2_ = int(balance2 * pow(10, 9)) - - quot_ = balance_ / balance2_ - assert isinstance(quot_, Balance) - self.assertAlmostEqual( - quot_.rao, - int(rao_ / rao2_), - delta=2, - msg=f"{balance_} / {balance2_} = {quot_} != {rao_} / {rao2_} == {int(rao_ / rao2_)}", - ) - - @given( - balance=valid_tao_numbers_strategy, - balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), - ) - def test_balance_truediv_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the true division (/) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value +@given(balance=valid_tao_numbers_strategy) +def test_balance_init(balance: Union[int, float]): + """ + Test the initialization of the Balance object. + """ + balance_ = Balance(balance) + if isinstance(balance, int): + assert balance_.rao == balance + elif isinstance(balance, float): + assert balance_.tao == CLOSE_IN_VALUE(balance, 0.00001) + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_add(balance: Union[int, float], balance2: Union[int, float]): + """ + Test the addition of two Balance objects. + """ + balance_ = Balance(balance) + balance2_ = Balance(balance2) + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + if isinstance(balance2, int): rao2_ = balance2 - - quot_ = balance_ / balance2_ - self.assertAlmostEqual( - quot_.rao, - int(rao_ / rao2_), - delta=10, - msg="{} / {} = {} != {}".format( - balance_, balance2_, quot_.rao, int(rao_ / rao2_) - ), - ) - - @given( - balance=valid_tao_numbers_strategy.filter(remove_zero_filter), - balance2=valid_tao_numbers_strategy, - ) # This is a filter to avoid division by zero - def test_balance_rtruediv_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the right true division (rtruediv) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value + elif isinstance(balance2, float): + rao2_ = int(balance2 * pow(10, 9)) + + sum_ = balance_ + balance2_ + assert isinstance(sum_, Balance) + assert CLOSE_IN_VALUE(sum_.rao, 5) == rao_ + rao2_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_add_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the addition of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # convert balance2 to rao. Assume balance2 was rao + rao2_ = int(balance2) + + sum_ = balance_ + balance2_ + assert isinstance(sum_, Balance) + assert CLOSE_IN_VALUE(sum_.rao, 5) == rao_ + rao2_ + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_eq_other_not_balance(balance: Union[int, float]): + """ + Test the equality of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + rao2_: int + # convert balance2 to rao. This assumes balance2 is a rao value + rao2_ = int(balance_.rao) + + assert CLOSE_IN_VALUE(rao2_, 5) == balance_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_radd_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the right addition (radd) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = int(balance2) + + sum_ = balance2_ + balance_ # This is an radd + assert isinstance(sum_, Balance) + assert CLOSE_IN_VALUE(sum_.rao, 5) == rao2_ + rao_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_sub(balance: Union[int, float], balance2: Union[int, float]): + """ + Test the subtraction of two Balance objects. + """ + balance_ = Balance(balance) + balance2_ = Balance(balance2) + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + if isinstance(balance2, int): rao2_ = balance2 - - quot_ = balance2_ / balance_ # This is an rtruediv - assert isinstance(quot_, Balance) - self.assertAlmostEqual( - quot_.rao, - int(rao2_ / rao_), - delta=5, - msg="{} / {} = {}".format(balance2_, balance_, quot_), - ) - - @given( - balance=valid_tao_numbers_strategy, - balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), - ) # Avoid zero division - def test_balance_floordiv( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the floor division (//) of two Balance objects. - """ - balance_ = Balance(balance) - balance2_ = Balance(balance2) - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - if isinstance(balance2, int): - rao2_ = balance2 - elif isinstance(balance2, float): - rao2_ = int(balance2 * pow(10, 9)) - - quot_ = balance_ // balance2_ - assert isinstance(quot_, Balance) - assert CLOSE_IN_VALUE(quot_.rao, 5) == rao_ // rao2_ - - @given( - balance=valid_tao_numbers_strategy, - balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), - ) - def test_balance_floordiv_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the floor division (//) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value + elif isinstance(balance2, float): + rao2_ = int(balance2 * pow(10, 9)) + + diff_ = balance_ - balance2_ + assert isinstance(diff_, Balance) + assert CLOSE_IN_VALUE(diff_.rao, 5) == rao_ - rao2_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_sub_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the subtraction of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = int(balance2) + + diff_ = balance_ - balance2_ + assert isinstance(diff_, Balance) + assert CLOSE_IN_VALUE(diff_.rao, 5) == rao_ - rao2_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_rsub_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the right subtraction (rsub) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = int(balance2) + + diff_ = balance2_ - balance_ # This is an rsub + assert isinstance(diff_, Balance) + assert CLOSE_IN_VALUE(diff_.rao, 5) == rao2_ - rao_ + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_mul(balance: Union[int, float], balance2: Union[int, float]): + """ + Test the multiplication of two Balance objects. + """ + balance_ = Balance(balance) + balance2_ = Balance(balance2) + rao_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + if isinstance(balance2, int): + rao2_ = balance2 + elif isinstance(balance2, float): + rao2_ = int(balance2 * pow(10, 9)) + + prod_ = balance_ * balance2_ + assert isinstance(prod_, Balance) + + assert prod_.rao == pytest.approx( + rao_ * rao2_, 9 + ), f"{balance_} * {balance2_} == {prod_.rao} != {rao_} * {balance2} == {rao_ * balance2}" + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_mul_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the multiplication of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + + prod_ = balance_ * balance2_ + assert isinstance(prod_, Balance) + + assert ( + abs(prod_.rao - int(rao_ * balance2)) <= 20 + ), f"{prod_.rao} != {int(rao_ * balance2)}" + assert prod_.rao == pytest.approx(int(rao_ * balance2)) + + +@given(balance=valid_tao_numbers_strategy, balance2=valid_tao_numbers_strategy) +def test_balance_rmul_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the right multiplication (rmul) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + + prod_ = balance2_ * balance_ # This is an rmul + assert isinstance(prod_, Balance) + + assert ( + abs(prod_.rao - int(balance2 * rao_)) <= 20 + ), f"{prod_.rao} != {int(balance2 * rao_)}" + assert prod_.rao == pytest.approx(int(balance2 * rao_)) + + +@given( + balance=valid_tao_numbers_strategy, + balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), +) # Avoid zero division +def test_balance_truediv(balance: Union[int, float], balance2: Union[int, float]): + """ + Test the true division (/) of two Balance objects. + """ + balance_ = Balance(balance) + balance2_ = Balance(balance2) + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + if isinstance(balance2, int): rao2_ = balance2 + elif isinstance(balance2, float): + rao2_ = int(balance2 * pow(10, 9)) - quot_ = balance_ // balance2_ - assert isinstance(quot_, Balance) - self.assertAlmostEqual( - quot_.rao, - rao_ // rao2_, - delta=5, - msg="{} // {} = {} != {}".format( - balance_, balance2_, quot_.rao, rao_ // rao2_ - ), - ) - - @given( - balance=valid_tao_numbers_strategy.filter(remove_zero_filter), - balance2=valid_tao_numbers_strategy, - ) # This is a filter to avoid division by zero - def test_balance_rfloordiv_other_not_balance( - self, balance: Union[int, float], balance2: Union[int, float] - ): - """ - Test the right floor division (rfloordiv) of a Balance object and a non-Balance object. - """ - balance_ = Balance(balance) - balance2_ = balance2 - rao_: int - rao2_: int - if isinstance(balance, int): - rao_ = balance - elif isinstance(balance, float): - rao_ = int(balance * pow(10, 9)) - # assume balance2 is a rao value + quot_ = balance_ / balance2_ + assert isinstance(quot_, Balance) + assert ( + abs(quot_.rao - int(rao_ / rao2_)) <= 2 + ), f"{quot_.rao} != {int(rao_ / rao2_)}" + assert quot_.rao == pytest.approx(int(rao_ / rao2_)) + + +@given( + balance=valid_tao_numbers_strategy, + balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), +) +def test_balance_truediv_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the true division (/) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = balance2 + + quot_ = balance_ / balance2_ + assert quot_.rao == pytest.approx(int(rao_ / rao2_)) + assert ( + abs(quot_.rao - int(rao_ / rao2_)) <= 10 + ), f"{quot_.rao} != {int(rao_ / rao2_)}" + + +@given( + balance=valid_tao_numbers_strategy.filter(remove_zero_filter), + balance2=valid_tao_numbers_strategy, +) # This is a filter to avoid division by zero +def test_balance_rtruediv_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the right true division (rtruediv) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = balance2 + + quot_ = balance2_ / balance_ # This is an rtruediv + assert isinstance(quot_, Balance) + expected_value = int(rao2_ / rao_) + assert ( + abs(quot_.rao - expected_value) <= 5 + ), f"{balance2_} / {balance_} = {quot_.rao} != {expected_value}" + assert quot_.rao == pytest.approx(expected_value) + + +@given( + balance=valid_tao_numbers_strategy, + balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), +) # Avoid zero division +def test_balance_floordiv(balance: Union[int, float], balance2: Union[int, float]): + """ + Test the floor division (//) of two Balance objects. + """ + balance_ = Balance(balance) + balance2_ = Balance(balance2) + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + if isinstance(balance2, int): rao2_ = balance2 + elif isinstance(balance2, float): + rao2_ = int(balance2 * pow(10, 9)) - quot_ = balance2_ // balance_ # This is an rfloordiv - assert isinstance(quot_, Balance) - self.assertAlmostEqual(quot_.rao, rao2_ // rao_, delta=5) - - @given(balance=valid_tao_numbers_strategy) - def test_balance_not_eq_none(self, balance: Union[int, float]): - """ - Test the inequality (!=) of a Balance object and None. - """ - balance_ = Balance(balance) - assert not balance_ == None - - @given(balance=valid_tao_numbers_strategy) - def test_balance_neq_none(self, balance: Union[int, float]): - """ - Test the inequality (!=) of a Balance object and None. - """ - balance_ = Balance(balance) - assert balance_ != None - - def test_balance_init_from_invalid_value(self): - """ - Test the initialization of a Balance object with an invalid value. - """ - with pytest.raises(TypeError): - Balance("invalid not a number") - - @given(balance=valid_tao_numbers_strategy) - def test_balance_add_invalid_type(self, balance: Union[int, float]): - """ - Test the addition of a Balance object with an invalid type. - """ - balance_ = Balance(balance) - with pytest.raises(NotImplementedError): - _ = balance_ + "" - - @given(balance=valid_tao_numbers_strategy) - def test_balance_sub_invalid_type(self, balance: Union[int, float]): - """ - Test the subtraction of a Balance object with an invalid type. - """ - balance_ = Balance(balance) - with pytest.raises(NotImplementedError): - _ = balance_ - "" - - @given(balance=valid_tao_numbers_strategy) - def test_balance_div_invalid_type(self, balance: Union[int, float]): - """ - Test the division of a Balance object with an invalid type. - """ - balance_ = Balance(balance) - with pytest.raises(NotImplementedError): - _ = balance_ / "" - - @given(balance=valid_tao_numbers_strategy) - def test_balance_mul_invalid_type(self, balance: Union[int, float]): - """ - Test the multiplication of a Balance object with an invalid type. - """ - balance_ = Balance(balance) - with pytest.raises(NotImplementedError): - _ = balance_ * "" - - @given(balance=valid_tao_numbers_strategy) - def test_balance_eq_invalid_type(self, balance: Union[int, float]): - """ - Test the equality of a Balance object with an invalid type. - """ - balance_ = Balance(balance) - with pytest.raises(NotImplementedError): - balance_ == "" + quot_ = balance_ // balance2_ + assert isinstance(quot_, Balance) + assert CLOSE_IN_VALUE(quot_.rao, 5) == rao_ // rao2_ + + +@given( + balance=valid_tao_numbers_strategy, + balance2=valid_tao_numbers_strategy.filter(remove_zero_filter), +) +def test_balance_floordiv_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the floor division (//) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = balance2 + + quot_ = balance_ // balance2_ + assert isinstance(quot_, Balance) + expected_value = rao_ // rao2_ + assert ( + abs(quot_.rao - expected_value) <= 5 + ), f"{balance_} // {balance2_} = {quot_.rao} != {expected_value}" + assert quot_.rao == pytest.approx(rao_ // rao2_) + + +@given( + balance=valid_tao_numbers_strategy.filter(remove_zero_filter), + balance2=valid_tao_numbers_strategy, +) # This is a filter to avoid division by zero +def test_balance_rfloordiv_other_not_balance( + balance: Union[int, float], balance2: Union[int, float] +): + """ + Test the right floor division (rfloordiv) of a Balance object and a non-Balance object. + """ + balance_ = Balance(balance) + balance2_ = balance2 + rao_: int + rao2_: int + if isinstance(balance, int): + rao_ = balance + elif isinstance(balance, float): + rao_ = int(balance * pow(10, 9)) + # assume balance2 is a rao value + rao2_ = balance2 + + quot_ = balance2_ // balance_ # This is an rfloordiv + assert isinstance(quot_, Balance) + expected_value = rao2_ // rao_ + assert quot_.rao == pytest.approx(rao2_ // rao_) + assert abs(quot_.rao - expected_value) <= 5 + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_not_eq_none(balance: Union[int, float]): + """ + Test the inequality (!=) of a Balance object and None. + """ + balance_ = Balance(balance) + assert not balance_ == None + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_neq_none(balance: Union[int, float]): + """ + Test the inequality (!=) of a Balance object and None. + """ + balance_ = Balance(balance) + assert balance_ != None + + +def test_balance_init_from_invalid_value(): + """ + Test the initialization of a Balance object with an invalid value. + """ + with pytest.raises(TypeError): + Balance("invalid not a number") + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_add_invalid_type(balance: Union[int, float]): + """ + Test the addition of a Balance object with an invalid type. + """ + balance_ = Balance(balance) + with pytest.raises(NotImplementedError): + _ = balance_ + "" + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_sub_invalid_type(balance: Union[int, float]): + """ + Test the subtraction of a Balance object with an invalid type. + """ + balance_ = Balance(balance) + with pytest.raises(NotImplementedError): + _ = balance_ - "" + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_div_invalid_type(balance: Union[int, float]): + """ + Test the division of a Balance object with an invalid type. + """ + balance_ = Balance(balance) + with pytest.raises(NotImplementedError): + _ = balance_ / "" + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_mul_invalid_type(balance: Union[int, float]): + """ + Test the multiplication of a Balance object with an invalid type. + """ + balance_ = Balance(balance) + with pytest.raises(NotImplementedError): + _ = balance_ * "" + + +@given(balance=valid_tao_numbers_strategy) +def test_balance_eq_invalid_type(balance: Union[int, float]): + """ + Test the equality of a Balance object with an invalid type. + """ + balance_ = Balance(balance) + with pytest.raises(NotImplementedError): + balance_ == "" From 536a7d5e0d1c74423670c73dfe04fbb7f8896da5 Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 30 Jan 2024 14:12:49 -0500 Subject: [PATCH 13/23] Refactor test_metagraph to pytest fmt --- tests/unit_tests/test_metagraph.py | 181 +++++++++++++++-------------- 1 file changed, 92 insertions(+), 89 deletions(-) diff --git a/tests/unit_tests/test_metagraph.py b/tests/unit_tests/test_metagraph.py index 167a0a375d..6a3caed1b2 100644 --- a/tests/unit_tests/test_metagraph.py +++ b/tests/unit_tests/test_metagraph.py @@ -15,104 +15,107 @@ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. -import unittest from unittest.mock import Mock - -import bittensor +import pytest import torch +import bittensor -class TestMetagraph(unittest.TestCase): - def setUp(self): - # Mock the subtensor and neurons - self.subtensor = Mock() - self.neurons = [ - Mock( - uid=i, - trust=i + 0.5, - consensus=i + 0.1, - incentive=i + 0.2, - dividends=i + 0.3, - rank=i + 0.4, - emission=i + 0.5, - active=i, - last_update=i, - validator_permit=True if i % 2 == 0 else False, - validator_trust=i + 0.6, - total_stake=Mock(tao=i + 0.7), - stake=i + 0.8, - axon_info="axon_info_{}".format(i), - weights=[(j, j + 0.1) for j in range(5)], # Add some mock weights - bonds=[(j, j + 0.2) for j in range(5)], # Add some mock bonds - ) - for i in range(10) - ] - - def test_set_metagraph_attributes(self): - metagraph = bittensor.metagraph(1, sync=False) - metagraph.neurons = self.neurons - metagraph._set_metagraph_attributes(block=5, subtensor=self.subtensor) - - # Check the attributes are set as expected - self.assertEqual(metagraph.n.item(), len(self.neurons)) - self.assertEqual(metagraph.block.item(), 5) - self.assertTrue( - torch.equal( - metagraph.uids, - torch.tensor( - [neuron.uid for neuron in self.neurons], dtype=torch.int64 - ), - ) - ) - self.assertTrue( - torch.equal( - metagraph.trust, - torch.tensor( - [neuron.trust for neuron in self.neurons], dtype=torch.float32 - ), - ) - ) - self.assertTrue( - torch.equal( - metagraph.consensus, - torch.tensor( - [neuron.consensus for neuron in self.neurons], dtype=torch.float32 - ), - ) +@pytest.fixture +def mock_environment(): + # Create a Mock for subtensor + subtensor = Mock() + + # Create a list of Mock Neurons + neurons = [ + Mock( + uid=i, + trust=i + 0.5, + consensus=i + 0.1, + incentive=i + 0.2, + dividends=i + 0.3, + rank=i + 0.4, + emission=i + 0.5, + active=i, + last_update=i, + validator_permit=i % 2 == 0, + validator_trust=i + 0.6, + total_stake=Mock(tao=i + 0.7), + stake=i + 0.8, + axon_info=f"axon_info_{i}", + weights=[(j, j + 0.1) for j in range(5)], + bonds=[(j, j + 0.2) for j in range(5)], ) - # Similarly for other attributes... + for i in range(10) + ] + + return subtensor, neurons - # Test the axons - self.assertEqual(metagraph.axons, [n.axon_info for n in self.neurons]) - def test_process_weights_or_bonds(self): - metagraph = bittensor.metagraph(1, sync=False) - metagraph.neurons = self.neurons +def test_set_metagraph_attributes(mock_environment): + subtensor, neurons = mock_environment + metagraph = bittensor.metagraph(1, sync=False) + metagraph.neurons = neurons + metagraph._set_metagraph_attributes(block=5, subtensor=subtensor) - # Test weights processing - weights = metagraph._process_weights_or_bonds( - data=[neuron.weights for neuron in self.neurons], attribute="weights" + # Check the attributes are set as expected + assert metagraph.n.item() == len(neurons) + assert metagraph.block.item() == 5 + assert ( + torch.equal( + metagraph.uids, + torch.tensor([neuron.uid for neuron in neurons], dtype=torch.int64), ) - self.assertEqual( - weights.shape[0], len(self.neurons) - ) # Number of rows should be equal to number of neurons - self.assertEqual( - weights.shape[1], len(self.neurons) - ) # Number of columns should be equal to number of neurons - # TODO: Add more checks to ensure the weights have been processed correctly - - # Test bonds processing - bonds = metagraph._process_weights_or_bonds( - data=[neuron.bonds for neuron in self.neurons], attribute="bonds" + == True + ) + + assert ( + torch.equal( + metagraph.trust, + torch.tensor([neuron.trust for neuron in neurons], dtype=torch.float32), + ) + == True + ) + + assert ( + torch.equal( + metagraph.consensus, + torch.tensor([neuron.consensus for neuron in neurons], dtype=torch.float32), ) - self.assertEqual( - bonds.shape[0], len(self.neurons) - ) # Number of rows should be equal to number of neurons - self.assertEqual( - bonds.shape[1], len(self.neurons) - ) # Number of columns should be equal to number of neurons - # TODO: Add more checks to ensure the bonds have been processed correctly + == True + ) + # Similarly for other attributes... + + # Test the axons + assert metagraph.axons == [n.axon_info for n in neurons] + + +def test_process_weights_or_bonds(mock_environment): + _, neurons = mock_environment + metagraph = bittensor.metagraph(1, sync=False) + metagraph.neurons = neurons + + # Test weights processing + weights = metagraph._process_weights_or_bonds( + data=[neuron.weights for neuron in neurons], attribute="weights" + ) + assert weights.shape[0] == len( + neurons + ) # Number of rows should be equal to number of neurons + assert weights.shape[1] == len( + neurons + ) # Number of columns should be equal to number of neurons + # TODO: Add more checks to ensure the weights have been processed correctly + # Test bonds processing + bonds = metagraph._process_weights_or_bonds( + data=[neuron.bonds for neuron in neurons], attribute="bonds" + ) + assert bonds.shape[0] == len( + neurons + ) # Number of rows should be equal to number of neurons + assert bonds.shape[1] == len( + neurons + ) # Number of columns should be equal to number of neurons -if __name__ == "__main__": - unittest.main() + # TODO: Add more checks to ensure the bonds have been processed correctly From 0adc5c998a4e95fe51e2055e1bbaba8302245ca3 Mon Sep 17 00:00:00 2001 From: Gus Date: Tue, 30 Jan 2024 15:09:38 -0500 Subject: [PATCH 14/23] Refactor test_wallet to pytest fmt --- tests/unit_tests/test_wallet.py | 667 ++++++++++++++++---------------- 1 file changed, 337 insertions(+), 330 deletions(-) diff --git a/tests/unit_tests/test_wallet.py b/tests/unit_tests/test_wallet.py index 119e3a3115..d64a7fb9d8 100644 --- a/tests/unit_tests/test_wallet.py +++ b/tests/unit_tests/test_wallet.py @@ -19,212 +19,227 @@ import time import pytest import random -import getpass -import unittest +import re import bittensor from rich.prompt import Confirm from ansible_vault import Vault -from unittest.mock import patch, MagicMock - - -class TestWalletUpdate(unittest.TestCase): - def setUp(self): - self.default_updated_password = "nacl_password" - self.default_legacy_password = "ansible_password" - self.empty_wallet = bittensor.wallet(name=f"mock-empty-{str(time.time())}") - self.legacy_wallet = self.create_legacy_wallet() - self.wallet = self.create_wallet() - - def legacy_encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: - console = bittensor.__console__ - with console.status(":locked_with_key: Encrypting key..."): - vault = Vault(password) - return vault.vault.encrypt(keyfile_data) - - def create_wallet(self): - # create an nacl wallet - wallet = bittensor.wallet(name=f"mock-{str(time.time())}") - with patch.object( - bittensor, - "ask_password_to_encrypt", - return_value=self.default_updated_password, - ): - wallet.create() - assert "NaCl" in str(wallet.coldkey_file) - - return wallet - - def create_legacy_wallet(self, legacy_password=None): - def _legacy_encrypt_keyfile_data(*args, **kwargs): - args = { - k: v - for k, v in zip( - self.legacy_encrypt_keyfile_data.__code__.co_varnames[: len(args)], - args, - ) - } - kwargs = {**args, **kwargs} - kwargs["password"] = legacy_password - return TestWalletUpdate.legacy_encrypt_keyfile_data(**kwargs) - - legacy_wallet = bittensor.wallet(name=f"mock-legacy-{str(time.time())}") - legacy_password = ( - self.default_legacy_password if legacy_password == None else legacy_password - ) +from unittest.mock import patch - # create a legacy ansible wallet - with patch.object( - bittensor, - "encrypt_keyfile_data", - new=_legacy_encrypt_keyfile_data, - # new = TestWalletUpdate.legacy_encrypt_keyfile_data, - ): - legacy_wallet.create() - assert "Ansible" in str(legacy_wallet.coldkey_file) - return legacy_wallet +def legacy_encrypt_keyfile_data(keyfile_data: bytes, password: str = None) -> bytes: + console = bittensor.__console__ + with console.status(":locked_with_key: Encrypting key..."): + vault = Vault(password) + return vault.vault.encrypt(keyfile_data) - def test_encrypt_and_decrypt(self): - """Test message can be encrypted and decrypted successfully with ansible/nacl.""" - json_data = { - "address": "This is the address.", - "id": "This is the id.", - "key": "This is the key.", - } - message = json.dumps(json_data).encode() - # encrypt and decrypt with nacl - encrypted_message = bittensor.encrypt_keyfile_data(message, "password") - decrypted_message = bittensor.decrypt_keyfile_data( - encrypted_message, "password" - ) - assert decrypted_message == message - assert bittensor.keyfile_data_is_encrypted(encrypted_message) - assert not bittensor.keyfile_data_is_encrypted(decrypted_message) - assert not bittensor.keyfile_data_is_encrypted_ansible(decrypted_message) - assert bittensor.keyfile_data_is_encrypted_nacl(encrypted_message) - - # encrypt and decrypt with legacy ansible - encrypted_message = TestWalletUpdate.legacy_encrypt_keyfile_data( - message, "password" - ) - decrypted_message = bittensor.decrypt_keyfile_data( - encrypted_message, "password" - ) - assert decrypted_message == message - assert bittensor.keyfile_data_is_encrypted(encrypted_message) - assert not bittensor.keyfile_data_is_encrypted(decrypted_message) - assert not bittensor.keyfile_data_is_encrypted_nacl(decrypted_message) - assert bittensor.keyfile_data_is_encrypted_ansible(encrypted_message) - - def test_check_and_update_encryption_not_updated(self): - """Test for a few cases where wallet should not be updated. - 1. When the wallet is already updated. - 2. When it is the hotkey. - 3. When the wallet is empty. - 4. When the wallet is legacy but no prompt to ask for password. - 5. When the password is wrong. - """ - # test the checking with no rewriting needs to be done. - with patch("bittensor.encrypt_keyfile_data") as encrypt: - # self.wallet is already the most updated with nacl encryption. - assert self.wallet.coldkey_file.check_and_update_encryption() - - # hotkey_file is not encrypted, thus do not need to be updated. - assert not self.wallet.hotkey_file.check_and_update_encryption() - - # empty_wallet has not been created, thus do not need to be updated. - assert not self.empty_wallet.coldkey_file.check_and_update_encryption() - - # legacy wallet cannot be updated without asking for password form prompt. - assert not self.legacy_wallet.coldkey_file.check_and_update_encryption( - no_prompt=True - ) +def create_wallet(default_updated_password): + # create an nacl wallet + wallet = bittensor.wallet(name=f"mock-{str(time.time())}") + with patch.object( + bittensor, + "ask_password_to_encrypt", + return_value=default_updated_password, + ): + wallet.create() + assert "NaCl" in str(wallet.coldkey_file) + + return wallet + - # Wrong password - legacy_wallet = self.create_legacy_wallet() - with patch("getpass.getpass", return_value="wrong_password"), patch.object( - Confirm, "ask", return_value=False - ): - assert not legacy_wallet.coldkey_file.check_and_update_encryption() - - # no renewal has been done in this test. - assert not encrypt.called - - def test_check_and_update_excryption(self, legacy_wallet=None): - """Test for the alignment of the updated VS old wallet. - 1. Same coldkey_file data. - 2. Same coldkey path. - 3. Same hotkey_file data. - 4. Same hotkey path. - 5. same password. - - Read the updated wallet in 2 ways. - 1. Directly as the output of check_and_update_encryption() - 2. Read from file using the same coldkey and hotkey name - """ - - def check_new_coldkey_file(keyfile): - new_keyfile_data = keyfile._read_keyfile_data_from_file() - new_decrypted_keyfile_data = bittensor.decrypt_keyfile_data( - new_keyfile_data, legacy_password +def create_legacy_wallet(default_legacy_password=None, legacy_password=None): + def _legacy_encrypt_keyfile_data(*args, **kwargs): + args = { + k: v + for k, v in zip( + legacy_encrypt_keyfile_data.__code__.co_varnames[: len(args)], + args, ) - new_path = legacy_wallet.coldkey_file.path - - assert old_coldkey_file_data != None - assert new_keyfile_data != None - assert not old_coldkey_file_data == new_keyfile_data - assert bittensor.keyfile_data_is_encrypted_ansible(old_coldkey_file_data) - assert bittensor.keyfile_data_is_encrypted_nacl(new_keyfile_data) - assert not bittensor.keyfile_data_is_encrypted_nacl(old_coldkey_file_data) - assert not bittensor.keyfile_data_is_encrypted_ansible(new_keyfile_data) - assert old_decrypted_coldkey_file_data == new_decrypted_keyfile_data - assert new_path == old_coldkey_path - - def check_new_hotkey_file(keyfile): - new_keyfile_data = keyfile._read_keyfile_data_from_file() - new_path = legacy_wallet.hotkey_file.path - - assert old_hotkey_file_data == new_keyfile_data - assert new_path == old_hotkey_path - assert not bittensor.keyfile_data_is_encrypted(new_keyfile_data) - - if legacy_wallet == None: - legacy_password = f"PASSword-{random.randint(0, 10000)}" - legacy_wallet = self.create_legacy_wallet(legacy_password=legacy_password) - - else: - legacy_password = self.default_legacy_password - - # get old cold keyfile data - old_coldkey_file_data = ( - legacy_wallet.coldkey_file._read_keyfile_data_from_file() - ) - old_decrypted_coldkey_file_data = bittensor.decrypt_keyfile_data( - old_coldkey_file_data, legacy_password + } + kwargs = {**args, **kwargs} + kwargs["password"] = legacy_password + return legacy_encrypt_keyfile_data(**kwargs) + + legacy_wallet = bittensor.wallet(name=f"mock-legacy-{str(time.time())}") + legacy_password = ( + default_legacy_password if legacy_password == None else legacy_password + ) + + # create a legacy ansible wallet + with patch.object( + bittensor, + "encrypt_keyfile_data", + new=_legacy_encrypt_keyfile_data, + # new = TestWalletUpdate.legacy_encrypt_keyfile_data, + ): + legacy_wallet.create() + assert "Ansible" in str(legacy_wallet.coldkey_file) + + return legacy_wallet + + +@pytest.fixture +def wallet_update_setup(): + # Setup the default passwords and wallets + default_updated_password = "nacl_password" + default_legacy_password = "ansible_password" + empty_wallet = bittensor.wallet(name=f"mock-empty-{str(time.time())}") + legacy_wallet = create_legacy_wallet( + default_legacy_password=default_legacy_password + ) + wallet = create_wallet(default_updated_password) + + return { + "default_updated_password": default_updated_password, + "default_legacy_password": default_legacy_password, + "empty_wallet": empty_wallet, + "legacy_wallet": legacy_wallet, + "wallet": wallet, + } + + +def test_encrypt_and_decrypt(): + """Test message can be encrypted and decrypted successfully with ansible/nacl.""" + json_data = { + "address": "This is the address.", + "id": "This is the id.", + "key": "This is the key.", + } + message = json.dumps(json_data).encode() + + # encrypt and decrypt with nacl + encrypted_message = bittensor.encrypt_keyfile_data(message, "password") + decrypted_message = bittensor.decrypt_keyfile_data(encrypted_message, "password") + assert decrypted_message == message + assert bittensor.keyfile_data_is_encrypted(encrypted_message) + assert not bittensor.keyfile_data_is_encrypted(decrypted_message) + assert not bittensor.keyfile_data_is_encrypted_ansible(decrypted_message) + assert bittensor.keyfile_data_is_encrypted_nacl(encrypted_message) + + # encrypt and decrypt with legacy ansible + encrypted_message = legacy_encrypt_keyfile_data(message, "password") + decrypted_message = bittensor.decrypt_keyfile_data(encrypted_message, "password") + assert decrypted_message == message + assert bittensor.keyfile_data_is_encrypted(encrypted_message) + assert not bittensor.keyfile_data_is_encrypted(decrypted_message) + assert not bittensor.keyfile_data_is_encrypted_nacl(decrypted_message) + assert bittensor.keyfile_data_is_encrypted_ansible(encrypted_message) + + +def test_check_and_update_encryption_not_updated(wallet_update_setup): + """Test for a few cases where wallet should not be updated. + 1. When the wallet is already updated. + 2. When it is the hotkey. + 3. When the wallet is empty. + 4. When the wallet is legacy but no prompt to ask for password. + 5. When the password is wrong. + """ + wallet = wallet_update_setup["wallet"] + empty_wallet = wallet_update_setup["empty_wallet"] + legacy_wallet = wallet_update_setup["legacy_wallet"] + default_legacy_password = wallet_update_setup["default_legacy_password"] + # test the checking with no rewriting needs to be done. + with patch("bittensor.encrypt_keyfile_data") as encrypt: + # self.wallet is already the most updated with nacl encryption. + assert wallet.coldkey_file.check_and_update_encryption() + + # hotkey_file is not encrypted, thus do not need to be updated. + assert not wallet.hotkey_file.check_and_update_encryption() + + # empty_wallet has not been created, thus do not need to be updated. + assert not empty_wallet.coldkey_file.check_and_update_encryption() + + # legacy wallet cannot be updated without asking for password form prompt. + assert not legacy_wallet.coldkey_file.check_and_update_encryption( + no_prompt=True ) - old_coldkey_path = legacy_wallet.coldkey_file.path - - # get old hot keyfile data - old_hotkey_file_data = legacy_wallet.hotkey_file._read_keyfile_data_from_file() - old_hotkey_path = legacy_wallet.hotkey_file.path - # update legacy_wallet from ansible to nacl - with patch("getpass.getpass", return_value=legacy_password), patch.object( - Confirm, "ask", return_value=True + # Wrong password + legacy_wallet = create_legacy_wallet( + default_legacy_password=default_legacy_password + ) + with patch("getpass.getpass", return_value="wrong_password"), patch.object( + Confirm, "ask", return_value=False ): - legacy_wallet.coldkey_file.check_and_update_encryption() - - # get new keyfile data from the same legacy wallet - check_new_coldkey_file(legacy_wallet.coldkey_file) - check_new_hotkey_file(legacy_wallet.hotkey_file) - - # get new keyfile data from wallet name - updated_legacy_wallet = bittensor.wallet( - name=legacy_wallet.name, hotkey=legacy_wallet.hotkey_str + assert not legacy_wallet.coldkey_file.check_and_update_encryption() + + # no renewal has been done in this test. + assert not encrypt.called + + +def test_check_and_update_excryption(wallet_update_setup, legacy_wallet=None): + """Test for the alignment of the updated VS old wallet. + 1. Same coldkey_file data. + 2. Same coldkey path. + 3. Same hotkey_file data. + 4. Same hotkey path. + 5. same password. + + Read the updated wallet in 2 ways. + 1. Directly as the output of check_and_update_encryption() + 2. Read from file using the same coldkey and hotkey name + """ + default_legacy_password = wallet_update_setup["default_legacy_password"] + + def check_new_coldkey_file(keyfile): + new_keyfile_data = keyfile._read_keyfile_data_from_file() + new_decrypted_keyfile_data = bittensor.decrypt_keyfile_data( + new_keyfile_data, legacy_password ) - check_new_coldkey_file(updated_legacy_wallet.coldkey_file) - check_new_hotkey_file(updated_legacy_wallet.hotkey_file) + new_path = legacy_wallet.coldkey_file.path + + assert old_coldkey_file_data != None + assert new_keyfile_data != None + assert not old_coldkey_file_data == new_keyfile_data + assert bittensor.keyfile_data_is_encrypted_ansible(old_coldkey_file_data) + assert bittensor.keyfile_data_is_encrypted_nacl(new_keyfile_data) + assert not bittensor.keyfile_data_is_encrypted_nacl(old_coldkey_file_data) + assert not bittensor.keyfile_data_is_encrypted_ansible(new_keyfile_data) + assert old_decrypted_coldkey_file_data == new_decrypted_keyfile_data + assert new_path == old_coldkey_path + + def check_new_hotkey_file(keyfile): + new_keyfile_data = keyfile._read_keyfile_data_from_file() + new_path = legacy_wallet.hotkey_file.path + + assert old_hotkey_file_data == new_keyfile_data + assert new_path == old_hotkey_path + assert not bittensor.keyfile_data_is_encrypted(new_keyfile_data) + + if legacy_wallet == None: + legacy_password = f"PASSword-{random.randint(0, 10000)}" + legacy_wallet = create_legacy_wallet(legacy_password=legacy_password) + + else: + legacy_password = default_legacy_password + + # get old cold keyfile data + old_coldkey_file_data = legacy_wallet.coldkey_file._read_keyfile_data_from_file() + old_decrypted_coldkey_file_data = bittensor.decrypt_keyfile_data( + old_coldkey_file_data, legacy_password + ) + old_coldkey_path = legacy_wallet.coldkey_file.path + + # get old hot keyfile data + old_hotkey_file_data = legacy_wallet.hotkey_file._read_keyfile_data_from_file() + old_hotkey_path = legacy_wallet.hotkey_file.path + + # update legacy_wallet from ansible to nacl + with patch("getpass.getpass", return_value=legacy_password), patch.object( + Confirm, "ask", return_value=True + ): + legacy_wallet.coldkey_file.check_and_update_encryption() + + # get new keyfile data from the same legacy wallet + check_new_coldkey_file(legacy_wallet.coldkey_file) + check_new_hotkey_file(legacy_wallet.hotkey_file) + + # get new keyfile data from wallet name + updated_legacy_wallet = bittensor.wallet( + name=legacy_wallet.name, hotkey=legacy_wallet.hotkey_str + ) + check_new_coldkey_file(updated_legacy_wallet.coldkey_file) + check_new_hotkey_file(updated_legacy_wallet.hotkey_file) # def test_password_retain(self): # [tick] test the same password works @@ -236,157 +251,149 @@ def check_new_hotkey_file(keyfile): # [tick] test that the hotkeys are not affected -class TestWallet(unittest.TestCase): - def setUp(self): - self.mock_wallet = bittensor.wallet( - name=f"mock-{str(time.time())}", - hotkey=f"mock-{str(time.time())}", - path="/tmp/tests_wallets/do_not_use", - ) - self.mock_wallet.create_new_coldkey( - use_password=False, overwrite=True, suppress=True - ) - self.mock_wallet.create_new_hotkey( - use_password=False, overwrite=True, suppress=True +@pytest.fixture +def mock_wallet(): + wallet = bittensor.wallet( + name=f"mock-{str(time.time())}", + hotkey=f"mock-{str(time.time())}", + path="/tmp/tests_wallets/do_not_use", + ) + wallet.create_new_coldkey(use_password=False, overwrite=True, suppress=True) + wallet.create_new_hotkey(use_password=False, overwrite=True, suppress=True) + + return wallet + + +def test_regen_coldkeypub_from_ss58_addr(mock_wallet): + """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from an SS58 address. + It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's SS58 address matches the input SS58 address. + It also tests the behavior when an invalid SS58 address is provided, raising a `ValueError` as expected. + """ + ss58_address = "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" + with patch.object(mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: + mock_wallet.regenerate_coldkeypub( + ss58_address=ss58_address, overwrite=True, suppress=True ) - def test_regen_coldkeypub_from_ss58_addr(self): - """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from an SS58 address. - It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's SS58 address matches the input SS58 address. - It also tests the behavior when an invalid SS58 address is provided, raising a `ValueError` as expected. - """ - ss58_address = "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" - with patch.object(self.mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: - self.mock_wallet.regenerate_coldkeypub( - ss58_address=ss58_address, overwrite=True, suppress=True - ) + mock_set_coldkeypub.assert_called_once() + keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] + assert keypair.ss58_address == ss58_address - mock_set_coldkeypub.assert_called_once() - keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] - self.assertEqual(keypair.ss58_address, ss58_address) - - ss58_address_bad = ( - "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zx" # 1 character short + ss58_address_bad = ( + "5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zx" # 1 character short + ) + with pytest.raises(ValueError): + mock_wallet.regenerate_coldkeypub( + ss58_address=ss58_address_bad, overwrite=True, suppress=True ) - with pytest.raises(ValueError): - self.mock_wallet.regenerate_coldkeypub( - ss58_address=ss58_address_bad, overwrite=True, suppress=True - ) - def test_regen_coldkeypub_from_hex_pubkey_str(self): - """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from a hex public key string. - It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's public key matches the input public key. - It also tests the behavior when an invalid public key string is provided, raising a `ValueError` as expected. - """ - pubkey_str = ( - "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" + +def test_regen_coldkeypub_from_hex_pubkey_str(mock_wallet): + """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from a hex public key string. + It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's public key matches the input public key. + It also tests the behavior when an invalid public key string is provided, raising a `ValueError` as expected. + """ + pubkey_str = "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" + with patch.object(mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: + mock_wallet.regenerate_coldkeypub( + public_key=pubkey_str, overwrite=True, suppress=True ) - with patch.object(self.mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: - self.mock_wallet.regenerate_coldkeypub( - public_key=pubkey_str, overwrite=True, suppress=True - ) - mock_set_coldkeypub.assert_called_once() - keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] - self.assertEqual("0x" + keypair.public_key.hex(), pubkey_str) + mock_set_coldkeypub.assert_called_once() + keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] + assert "0x" + keypair.public_key.hex() == pubkey_str + + pubkey_str_bad = "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512" # 1 character short + with pytest.raises(ValueError): + mock_wallet.regenerate_coldkeypub( + ss58_address=pubkey_str_bad, overwrite=True, suppress=True + ) - pubkey_str_bad = "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512" # 1 character short - with pytest.raises(ValueError): - self.mock_wallet.regenerate_coldkeypub( - ss58_address=pubkey_str_bad, overwrite=True, suppress=True - ) - def test_regen_coldkeypub_from_hex_pubkey_bytes(self): - """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from a hex public key byte string. - It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's public key matches the input public key. - """ - pubkey_str = ( - "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" +def test_regen_coldkeypub_from_hex_pubkey_bytes(mock_wallet): + """Test the `regenerate_coldkeypub` method of the wallet class, which regenerates the cold key pair from a hex public key byte string. + It checks whether the `set_coldkeypub` method is called with the expected arguments, and verifies that the generated key pair's public key matches the input public key. + """ + pubkey_str = "0x32939b6abc4d81f02dff04d2b8d1d01cc8e71c5e4c7492e4fa6a238cdca3512f" + pubkey_bytes = bytes.fromhex(pubkey_str[2:]) # Remove 0x from beginning + with patch.object(mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: + mock_wallet.regenerate_coldkeypub( + public_key=pubkey_bytes, overwrite=True, suppress=True ) - pubkey_bytes = bytes.fromhex(pubkey_str[2:]) # Remove 0x from beginning - with patch.object(self.mock_wallet, "set_coldkeypub") as mock_set_coldkeypub: - self.mock_wallet.regenerate_coldkeypub( - public_key=pubkey_bytes, overwrite=True, suppress=True - ) - mock_set_coldkeypub.assert_called_once() - keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] - self.assertEqual(keypair.public_key, pubkey_bytes) - - def test_regen_coldkeypub_no_pubkey(self): - """Test the `regenerate_coldkeypub` method of the wallet class when no public key is provided. - It verifies that a `ValueError` is raised when neither a public key nor an SS58 address is provided. - """ - with pytest.raises(ValueError): - # Must provide either public_key or ss58_address - self.mock_wallet.regenerate_coldkeypub( - ss58_address=None, public_key=None, overwrite=True, suppress=True - ) + mock_set_coldkeypub.assert_called_once() + keypair: bittensor.Keypair = mock_set_coldkeypub.call_args_list[0][0][0] + assert keypair.public_key == pubkey_bytes - def test_regen_coldkey_from_hex_seed_str(self): - """Test the `regenerate_coldkey` method of the wallet class, which regenerates the cold key pair from a hex seed string. - It checks whether the `set_coldkey` method is called with the expected arguments, and verifies that the generated key pair's seed and SS58 address match the input seed and the expected SS58 address. - It also tests the behavior when an invalid seed string is provided, raising a `ValueError` as expected. - """ - ss58_addr = "5D5cwd8DX6ij7nouVcoxDuWtJfiR1BnzCkiBVTt7DU8ft5Ta" - seed_str = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f7636" - with patch.object(self.mock_wallet, "set_coldkey") as mock_set_coldkey: - self.mock_wallet.regenerate_coldkey( - seed=seed_str, overwrite=True, suppress=True - ) - mock_set_coldkey.assert_called_once() - keypair: bittensor.Keypair = mock_set_coldkey.call_args_list[0][0][0] - self.assertRegex( - ( - keypair.seed_hex - if isinstance(keypair.seed_hex, str) - else keypair.seed_hex.hex() - ), - rf"(0x|){seed_str[2:]}", - ) - self.assertEqual( - keypair.ss58_address, ss58_addr - ) # Check that the ss58 address is correct - - seed_str_bad = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f763" # 1 character short - with pytest.raises(ValueError): - self.mock_wallet.regenerate_coldkey( - seed=seed_str_bad, overwrite=True, suppress=True - ) +def test_regen_coldkeypub_no_pubkey(mock_wallet): + """Test the `regenerate_coldkeypub` method of the wallet class when no public key is provided. + It verifies that a `ValueError` is raised when neither a public key nor an SS58 address is provided. + """ + with pytest.raises(ValueError): + # Must provide either public_key or ss58_address + mock_wallet.regenerate_coldkeypub( + ss58_address=None, public_key=None, overwrite=True, suppress=True + ) - def test_regen_hotkey_from_hex_seed_str(self): - """Test the `regenerate_coldkey` method of the wallet class, which regenerates the cold key pair from a hex seed string. - It checks whether the `set_coldkey` method is called with the expected arguments, and verifies that the generated key pair's seed and SS58 address match the input seed and the expected SS58 address. - It also tests the behavior when an invalid seed string is provided, raising a `ValueError` as expected. - """ - ss58_addr = "5D5cwd8DX6ij7nouVcoxDuWtJfiR1BnzCkiBVTt7DU8ft5Ta" - seed_str = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f7636" - with patch.object(self.mock_wallet, "set_hotkey") as mock_set_hotkey: - self.mock_wallet.regenerate_hotkey( - seed=seed_str, overwrite=True, suppress=True - ) - mock_set_hotkey.assert_called_once() - keypair: bittensor.Keypair = mock_set_hotkey.call_args_list[0][0][0] - self.assertRegex( - ( - keypair.seed_hex - if isinstance(keypair.seed_hex, str) - else keypair.seed_hex.hex() - ), - rf"(0x|){seed_str[2:]}", - ) - self.assertEqual( - keypair.ss58_address, ss58_addr - ) # Check that the ss58 address is correct - - seed_str_bad = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f763" # 1 character short - with pytest.raises(ValueError): - self.mock_wallet.regenerate_hotkey( - seed=seed_str_bad, overwrite=True, suppress=True - ) +def test_regen_coldkey_from_hex_seed_str(mock_wallet): + """Test the `regenerate_coldkey` method of the wallet class, which regenerates the cold key pair from a hex seed string. + It checks whether the `set_coldkey` method is called with the expected arguments, and verifies that the generated key pair's seed and SS58 address match the input seed and the expected SS58 address. + It also tests the behavior when an invalid seed string is provided, raising a `ValueError` as expected. + """ + ss58_addr = "5D5cwd8DX6ij7nouVcoxDuWtJfiR1BnzCkiBVTt7DU8ft5Ta" + seed_str = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f7636" + with patch.object(mock_wallet, "set_coldkey") as mock_set_coldkey: + mock_wallet.regenerate_coldkey(seed=seed_str, overwrite=True, suppress=True) + + mock_set_coldkey.assert_called_once() + keypair: bittensor.Keypair = mock_set_coldkey.call_args_list[0][0][0] + seed_hex = ( + keypair.seed_hex + if isinstance(keypair.seed_hex, str) + else keypair.seed_hex.hex() + ) + assert re.match( + rf"(0x|){seed_str[2:]}", seed_hex + ), "The seed_hex does not match the expected pattern" + assert ( + keypair.ss58_address == ss58_addr + ) # Check that the ss58 address is correct + + seed_str_bad = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f763" # 1 character short + with pytest.raises(ValueError): + mock_wallet.regenerate_coldkey(seed=seed_str_bad, overwrite=True, suppress=True) + + +def test_regen_hotkey_from_hex_seed_str(mock_wallet): + """Test the `regenerate_coldkey` method of the wallet class, which regenerates the cold key pair from a hex seed string. + It checks whether the `set_coldkey` method is called with the expected arguments, and verifies that the generated key pair's seed and SS58 address match the input seed and the expected SS58 address. + It also tests the behavior when an invalid seed string is provided, raising a `ValueError` as expected. + """ + ss58_addr = "5D5cwd8DX6ij7nouVcoxDuWtJfiR1BnzCkiBVTt7DU8ft5Ta" + seed_str = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f7636" + with patch.object(mock_wallet, "set_hotkey") as mock_set_hotkey: + mock_wallet.regenerate_hotkey(seed=seed_str, overwrite=True, suppress=True) + + mock_set_hotkey.assert_called_once() + keypair: bittensor.Keypair = mock_set_hotkey.call_args_list[0][0][0] + + seed_hex = ( + keypair.seed_hex + if isinstance(keypair.seed_hex, str) + else keypair.seed_hex.hex() + ) -if __name__ == "__main__": - unittest.main() + pattern = rf"(0x|){seed_str[2:]}" + assert re.match( + pattern, seed_hex + ), f"The seed_hex '{seed_hex}' does not match the expected pattern '{pattern}'" + assert ( + keypair.ss58_address == ss58_addr + ) # Check that the ss58 address is correct + + seed_str_bad = "0x659c024d5be809000d0d93fe378cfde020846150b01c49a201fc2a02041f763" # 1 character short + with pytest.raises(ValueError): + mock_wallet.regenerate_hotkey(seed=seed_str_bad, overwrite=True, suppress=True) From ad6808abb0de6a9a9076ef52145bd8c466965fe8 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 31 Jan 2024 09:42:01 -0500 Subject: [PATCH 15/23] Expand test coverage for axon --- tests/unit_tests/test_axon.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/unit_tests/test_axon.py b/tests/unit_tests/test_axon.py index a8e57e17a3..9eb3b49710 100644 --- a/tests/unit_tests/test_axon.py +++ b/tests/unit_tests/test_axon.py @@ -104,6 +104,29 @@ def wrong_forward_fn(synapse: NonInheritedSynapse) -> Any: server.attach(wrong_forward_fn) +def test_log_and_handle_error(): + from bittensor.axon import log_and_handle_error + + synapse = SynapseMock() + + synapse = log_and_handle_error(synapse, Exception("Error"), 500, 100) + assert synapse.axon.status_code == 500 + assert synapse.axon.status_message == "Error" + assert synapse.axon.process_time is not None + + +def test_create_error_response(): + from bittensor.axon import create_error_response + + synapse = SynapseMock() + synapse.axon.status_code = 500 + synapse.axon.status_message = "Error" + + response = create_error_response(synapse) + assert response.status_code == 500 + assert response.body == b'{"message":"Error"}' + + # Mock synapse class for testing From e5dcda2660ed5c8b41e287573f6460533895fa47 Mon Sep 17 00:00:00 2001 From: Gus Date: Wed, 31 Jan 2024 13:25:40 -0500 Subject: [PATCH 16/23] Expand test coverage for weight_utils --- tests/unit_tests/utils/test_weight_utils.py | 197 ++++++++++++++++++++ 1 file changed, 197 insertions(+) diff --git a/tests/unit_tests/utils/test_weight_utils.py b/tests/unit_tests/utils/test_weight_utils.py index c1530a8d69..0875d921e0 100644 --- a/tests/unit_tests/utils/test_weight_utils.py +++ b/tests/unit_tests/utils/test_weight_utils.py @@ -108,3 +108,200 @@ def test_normalize_with_max_weight(): y = weight_utils.normalize_max_weight(x, limit=limit - change) z = weight_utils.normalize_max_weight(x, limit=limit + change) assert (y - z).abs().sum() < eplison + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, expected", + [ + ("happy-path-1", 3, [0, 1, 2], [15, 5, 80], torch.tensor([0.15, 0.05, 0.8])), + ("happy-path-2", 4, [1, 3], [50, 50], torch.tensor([0.0, 0.5, 0.0, 0.5])), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_happy_path( + test_id, n, uids, weights, expected +): + # Act + result = weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + # Assert + assert torch.allclose(result, expected), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, expected", + [ + ("edge_case_empty", 5, [], [], torch.zeros(5)), + ("edge_case_single", 1, [0], [100], torch.tensor([1.0])), + ("edge_case_all_zeros", 4, [0, 1, 2, 3], [0, 0, 0, 0], torch.zeros(4)), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_edge_cases( + test_id, n, uids, weights, expected +): + # Act + result = weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + # Assert + assert torch.allclose(result, expected), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, exception", + [ + ("error-case-mismatched-lengths", 3, [0, 1, 3, 4, 5], [10, 20, 30], IndexError), + ("error-case-negative-n", -1, [0, 1], [10, 20], RuntimeError), + ("error-case-invalid-uids", 3, [0, 3], [10, 20], IndexError), + ], +) +def test_convert_weight_uids_and_vals_to_tensor_error_cases( + test_id, n, uids, weights, exception +): + # Act / Assert + with pytest.raises(exception): + weight_utils.convert_weight_uids_and_vals_to_tensor(n, uids, weights) + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "happy-path-1", + 3, + [0, 1, 2], + [15, 5, 80], + [0, 1, 2], + torch.tensor([0.15, 0.05, 0.8]), + ), + ( + "happy-path-2", + 3, + [0, 2], + [300, 300], + [0, 1, 2], + torch.tensor([0.5, 0.0, 0.5]), + ), + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_happy_paths( + test_id, n, uids, weights, subnets, expected +): + # Act + result = weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) + + # Assert + assert torch.allclose(result, expected, atol=1e-4), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, expected", + [ + ( + "edge-1", + 1, + [0], + [0], + [0], + torch.tensor([0.0]), + ), # Single neuron with zero weight + ( + "edge-2", + 2, + [0, 1], + [0, 0], + [0, 1], + torch.tensor([0.0, 0.0]), + ), # All zero weights + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_edge_cases( + test_id, n, uids, weights, subnets, expected +): + # Act + result = weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) + + # Assert + assert torch.allclose(result, expected, atol=1e-4), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, weights, subnets, exception", + [ + ("error-1", 3, [1, 3], [100, 200], [1, 2], Exception), # uid not in subnets + ("error-2", 3, [1, 2, 3], [100, 200], [1], Exception), # More uids than subnets + ], +) +def test_convert_root_weight_uids_and_vals_to_tensor_error_cases( + test_id, n, uids, weights, subnets, exception +): + # Act and Assert + with pytest.raises(exception): + weight_utils.convert_root_weight_uids_and_vals_to_tensor( + n, uids, weights, subnets + ) + print(f"Failed {test_id}") + + +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ + ( + "happy-path-1", + 5, + [1, 3, 4], + [10, 20, 30], + torch.tensor([0, 10, 0, 20, 30], dtype=torch.int64), + ), + ( + "happy-path-2", + 3, + [0, 1, 2], + [7, 8, 9], + torch.tensor([7, 8, 9], dtype=torch.int64), + ), + ("happy-path-3", 4, [2], [15], torch.tensor([0, 0, 15, 0], dtype=torch.int64)), + ], +) +def test_happy_path(test_id, n, uids, bonds, expected_output): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) + + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, bonds, expected_output", + [ + ("edge-1", 1, [0], [0], torch.tensor([0], dtype=torch.int64)), # Single element + ( + "edge-2", + 10, + [], + [], + torch.zeros(10, dtype=torch.int64), + ), # Empty uids and bonds + ], +) +def test_edge_cases(test_id, n, uids, bonds, expected_output): + # Act + result = weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) + + # Assert + assert torch.equal(result, expected_output), f"Failed {test_id}" + + +@pytest.mark.parametrize( + "test_id, n, uids, bonds, exception", + [ + ("error-1", 5, [1, 3, 6], [10, 20, 30], IndexError), # uid out of bounds + ("error-2", -1, [0], [10], RuntimeError), # Negative number of neurons + ], +) +def test_error_cases(test_id, n, uids, bonds, exception): + # Act / Assert + with pytest.raises(exception): + weight_utils.convert_bond_uids_and_vals_to_tensor(n, uids, bonds) From 6916495ffb33848a17b7b2169e1a7bc34fb05358 Mon Sep 17 00:00:00 2001 From: Gus Date: Thu, 1 Feb 2024 08:58:23 -0500 Subject: [PATCH 17/23] Pin black version in circle ci config --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f865826f3f..2cba954879 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -239,7 +239,7 @@ workflows: - check_compatibility: python_version: "3.11" name: check-compatibility-3.11 - + pr-requirements: jobs: - black: @@ -272,7 +272,7 @@ workflows: branches: only: - /^(release|hotfix)/.*/ - + release-requirements: jobs: - check-version-not-released: From b62d168a87f58ea389ed4591fc91b5fa93ef011c Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Thu, 1 Feb 2024 22:13:05 +0000 Subject: [PATCH 18/23] wrap set weights in a ttl multiprocessing call so we don't hang past TTL --- bittensor/extrinsics/set_weights.py | 40 ++++++++++++++++++++++++++++- bittensor/subtensor.py | 4 +-- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index d27214cd36..38aaf76be2 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -22,13 +22,14 @@ from rich.prompt import Confirm from typing import Union import bittensor.utils.weight_utils as weight_utils +import multiprocessing from loguru import logger logger = logger.opt(colors=True) -def set_weights_extrinsic( +def ttl_set_weights_extrinsic( subtensor: "bittensor.subtensor", wallet: "bittensor.wallet", netuid: int, @@ -38,10 +39,45 @@ def set_weights_extrinsic( wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, + ttl: int = 100, +): + args = ( + subtensor.chain_endpoint, + wallet, + netuid, + uids, + weights, + version_key, + wait_for_inclusion, + wait_for_finalization, + prompt, + ) + process = multiprocessing.Process(target=set_weights_extrinsic, args=args) + process.start() + process.join(timeout=ttl) + if process.is_alive(): + process.terminate() + process.join() + return False + return True + + +def set_weights_extrinsic( + subtensor_endpoint: str, + wallet: "bittensor.wallet", + netuid: int, + uids: Union[torch.LongTensor, list], + weights: Union[torch.FloatTensor, list], + version_key: int = 0, + wait_for_inclusion: bool = False, + wait_for_finalization: bool = False, + prompt: bool = False, ) -> bool: r"""Sets the given weights and values on chain for wallet hotkey account. Args: + subtensor_endpoint (bittensor.subtensor): + Subtensor endpoint to use. wallet (bittensor.wallet): Bittensor wallet object. netuid (int): @@ -62,6 +98,8 @@ def set_weights_extrinsic( success (bool): Flag is ``true`` if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is ``true``. """ + subtensor = bittensor.subtensor(subtensor_endpoint) + # First convert types. if isinstance(uids, list): uids = torch.tensor(uids, dtype=torch.int64) diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 67ab08e497..599c082d02 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -65,7 +65,7 @@ swap_hotkey_extrinsic, ) from .extrinsics.transfer import transfer_extrinsic -from .extrinsics.set_weights import set_weights_extrinsic +from .extrinsics.set_weights import set_weights_extrinsic, ttl_set_weights_extrinsic from .extrinsics.prometheus import prometheus_extrinsic from .extrinsics.delegation import ( delegate_extrinsic, @@ -569,7 +569,7 @@ def set_weights( This function is crucial in shaping the network's collective intelligence, where each neuron's learning and contribution are influenced by the weights it sets towards others【81†source】. """ - return set_weights_extrinsic( + return ttl_set_weights_extrinsic( subtensor=self, wallet=wallet, netuid=netuid, From edd1ab4101ccd301b86e5c2b65e5b48fed726d0a Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Thu, 1 Feb 2024 22:18:43 +0000 Subject: [PATCH 19/23] add ttl arg --- bittensor/extrinsics/set_weights.py | 4 ++++ bittensor/subtensor.py | 2 ++ 2 files changed, 6 insertions(+) diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index 38aaf76be2..c3f3602905 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -41,6 +41,7 @@ def ttl_set_weights_extrinsic( prompt: bool = False, ttl: int = 100, ): + r"""Sets the given weights and values on chain for wallet hotkey account.""" args = ( subtensor.chain_endpoint, wallet, @@ -72,6 +73,7 @@ def set_weights_extrinsic( wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, + ttl: int = 100, ) -> bool: r"""Sets the given weights and values on chain for wallet hotkey account. @@ -94,6 +96,8 @@ def set_weights_extrinsic( If set, waits for the extrinsic to be finalized on the chain before returning ``true``, or returns ``false`` if the extrinsic fails to be finalized within the timeout. prompt (bool): If ``true``, the call waits for confirmation from the user before proceeding. + ttl (int): + The time to live of the process. If the process does not complete within this time, it is terminated. Returns: success (bool): Flag is ``true`` if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is ``true``. diff --git a/bittensor/subtensor.py b/bittensor/subtensor.py index 599c082d02..8d74697455 100644 --- a/bittensor/subtensor.py +++ b/bittensor/subtensor.py @@ -547,6 +547,7 @@ def set_weights( wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, + ttl: int = 100, ) -> bool: """ Sets the inter-neuronal weights for the specified neuron. This process involves specifying the @@ -579,6 +580,7 @@ def set_weights( wait_for_inclusion=wait_for_inclusion, wait_for_finalization=wait_for_finalization, prompt=prompt, + ttl=ttl, ) def _do_set_weights( From 492cf4af92c9981fa80773b22886096eb9ee284c Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Fri, 2 Feb 2024 00:01:36 +0000 Subject: [PATCH 20/23] fix failing test --- tests/integration_tests/test_subtensor_integration.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/integration_tests/test_subtensor_integration.py b/tests/integration_tests/test_subtensor_integration.py index ab00e903e5..b4b6e905e5 100644 --- a/tests/integration_tests/test_subtensor_integration.py +++ b/tests/integration_tests/test_subtensor_integration.py @@ -321,6 +321,7 @@ def __init__(self): def process_events(self): return True + self.subtensor.set_weights = MagicMock(return_value=True) self.subtensor._do_set_weights = MagicMock(return_value=(True, None)) success = self.subtensor.set_weights( @@ -334,6 +335,7 @@ def process_events(self): def test_set_weights_inclusion(self): chain_weights = [0] self.subtensor._do_set_weights = MagicMock(return_value=(True, None)) + self.subtensor.set_weights = MagicMock(return_value=True) success = self.subtensor.set_weights( wallet=self.wallet, @@ -349,6 +351,7 @@ def test_set_weights_failed(self): self.subtensor._do_set_weights = MagicMock( return_value=(False, "Mock failure message") ) + self.subtensor.set_weights = MagicMock(return_value=False) fail = self.subtensor.set_weights( wallet=self.wallet, From e850873cc4973b8a1764f254d8d6b488d9cfe899 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Fri, 2 Feb 2024 00:03:58 +0000 Subject: [PATCH 21/23] remove unused ttl --- bittensor/extrinsics/set_weights.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/bittensor/extrinsics/set_weights.py b/bittensor/extrinsics/set_weights.py index c3f3602905..b93942a677 100644 --- a/bittensor/extrinsics/set_weights.py +++ b/bittensor/extrinsics/set_weights.py @@ -73,7 +73,6 @@ def set_weights_extrinsic( wait_for_inclusion: bool = False, wait_for_finalization: bool = False, prompt: bool = False, - ttl: int = 100, ) -> bool: r"""Sets the given weights and values on chain for wallet hotkey account. @@ -96,8 +95,6 @@ def set_weights_extrinsic( If set, waits for the extrinsic to be finalized on the chain before returning ``true``, or returns ``false`` if the extrinsic fails to be finalized within the timeout. prompt (bool): If ``true``, the call waits for confirmation from the user before proceeding. - ttl (int): - The time to live of the process. If the process does not complete within this time, it is terminated. Returns: success (bool): Flag is ``true`` if extrinsic was finalized or uncluded in the block. If we did not wait for finalization / inclusion, the response is ``true``. From e9aa396e3184d0a6cad41f52f436ce73043bc486 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Fri, 2 Feb 2024 00:10:25 +0000 Subject: [PATCH 22/23] release subtensor patch version 671 --- VERSION | 2 +- bittensor/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/VERSION b/VERSION index 07a7b03c99..1d42024266 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -6.7.0 \ No newline at end of file +6.7.1 \ No newline at end of file diff --git a/bittensor/__init__.py b/bittensor/__init__.py index f27a85ebd4..71d0dec8dc 100644 --- a/bittensor/__init__.py +++ b/bittensor/__init__.py @@ -27,7 +27,7 @@ nest_asyncio.apply() # Bittensor code and protocol version. -__version__ = "6.7.0" +__version__ = "6.7.1" version_split = __version__.split(".") __version_as_int__ = ( From 8b2dde494738f59a5f89abedccacc837ea586815 Mon Sep 17 00:00:00 2001 From: ifrit98 Date: Fri, 2 Feb 2024 00:10:47 +0000 Subject: [PATCH 23/23] release changelogs --- CHANGELOG.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index dc46f70733..e8d6992139 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## 6.7.1 / 2024-02-02 + +## What's Changed +* Release/6.7.0 by @ifrit98 in https://github.com/opentensor/bittensor/pull/1674 +* Eighth (final) docstrings formatting PR by @rajkaramchedu in https://github.com/opentensor/bittensor/pull/1678 +* Sixth docstrings formatting PR by @rajkaramchedu in https://github.com/opentensor/bittensor/pull/1676 +* Seventh docstrings formatting PR by @rajkaramchedu in https://github.com/opentensor/bittensor/pull/1677 +* Update README.md by @unconst in https://github.com/opentensor/bittensor/pull/1679 +* Update README.md by @unconst in https://github.com/opentensor/bittensor/pull/1680 +* black formatting by @ifrit98 in https://github.com/opentensor/bittensor/pull/1685 +* burn -> recycle for public facing code by @ifrit98 in https://github.com/opentensor/bittensor/pull/1681 +* Expands test coverage and coverts python unittest classes to pure pytest by @gus-opentensor in https://github.com/opentensor/bittensor/pull/1686 +* wrap set weights in a ttl multiprocessing call so we don't hang past TTL by @ifrit98 in https://github.com/opentensor/bittensor/pull/1687 + +## New Contributors +* @gus-opentensor made their first contribution in https://github.com/opentensor/bittensor/pull/1686 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v6.7.0...v6.7.1 + + ## 6.7.0 / 2024-01-25