From ec30b044d6b640355f0d406050f25003b077a3c0 Mon Sep 17 00:00:00 2001 From: Alexander Henkes <62153181+ahenkes1@users.noreply.github.com> Date: Thu, 8 Jun 2023 15:38:21 +0200 Subject: [PATCH 01/18] Update leaky.py Fixed a missing indentation, which caused a strange line break in the documentary. --- snntorch/_neurons/leaky.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/snntorch/_neurons/leaky.py b/snntorch/_neurons/leaky.py index f928aa71..5154ed66 100644 --- a/snntorch/_neurons/leaky.py +++ b/snntorch/_neurons/leaky.py @@ -73,8 +73,8 @@ def forward(self, x, mem1, spk1, mem2): optional :param surrogate_disable: Disables surrogate gradients regardless of - `spike_grad` argument. Useful for ONNX compatibility. Defaults - to False + `spike_grad` argument. Useful for ONNX compatibility. Defaults + to False :type surrogate_disable: bool, Optional :param init_hidden: Instantiates state variables as instance variables. From e948203d6454ccdd65341aaf74020e70c5598810 Mon Sep 17 00:00:00 2001 From: Alexander Henkes <62153181+ahenkes1@users.noreply.github.com> Date: Fri, 9 Jun 2023 15:57:42 +0200 Subject: [PATCH 02/18] Update leaky.py Added the options for graded_spikes_factor learn_graded_spikes_factor I had to import torch to cast the float to a tensor. Feel free to discuss this. --- snntorch/_neurons/leaky.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/snntorch/_neurons/leaky.py b/snntorch/_neurons/leaky.py index 5154ed66..3a03b69e 100644 --- a/snntorch/_neurons/leaky.py +++ b/snntorch/_neurons/leaky.py @@ -1,5 +1,5 @@ from .neurons import _SpikeTensor, _SpikeTorchConv, LIF - +import torch class Leaky(LIF): """ @@ -139,6 +139,8 @@ def __init__( reset_mechanism="subtract", state_quant=False, output=False, + graded_spikes_factor=1.0, + learn_graded_spikes_factor=False, ): super(Leaky, self).__init__( beta, @@ -152,7 +154,12 @@ def __init__( reset_mechanism, state_quant, output, + graded_spikes_factor=1.0, + learn_graded_spikes_factor=False, ) + + self.graded_spikes_factor = torch.as_tensor(graded_spikes_factor) + self.learn_graded_spikes_factor = learn_graded_spikes_factor if self.init_hidden: self.mem = self.init_leaky() From cee6c2482562a22cddd8ba4fdeffce86e5e2cbd7 Mon Sep 17 00:00:00 2001 From: Alexander Henkes Date: Tue, 13 Jun 2023 19:07:13 +0200 Subject: [PATCH 03/18] Corrected the registration of 'graded_spikes_factor'. It was overwritten previously, now it is correctly supered from the parent class. --- snntorch/_neurons/leaky.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/snntorch/_neurons/leaky.py b/snntorch/_neurons/leaky.py index 3a03b69e..48f8ae4b 100644 --- a/snntorch/_neurons/leaky.py +++ b/snntorch/_neurons/leaky.py @@ -154,12 +154,9 @@ def __init__( reset_mechanism, state_quant, output, - graded_spikes_factor=1.0, - learn_graded_spikes_factor=False, + graded_spikes_factor, + learn_graded_spikes_factor, ) - - self.graded_spikes_factor = torch.as_tensor(graded_spikes_factor) - self.learn_graded_spikes_factor = learn_graded_spikes_factor if self.init_hidden: self.mem = self.init_leaky() From 185e74ca8e19fbe31c20593bacf5593fbda78589 Mon Sep 17 00:00:00 2001 From: Alexander Henkes Date: Tue, 13 Jun 2023 19:22:31 +0200 Subject: [PATCH 04/18] Added a new test to check the functionality of learnable graded spikes. --- tests/test_snntorch/test_leaky.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/test_snntorch/test_leaky.py b/tests/test_snntorch/test_leaky.py index cb33fa98..a04ca744 100644 --- a/tests/test_snntorch/test_leaky.py +++ b/tests/test_snntorch/test_leaky.py @@ -42,6 +42,13 @@ def leaky_hidden_reset_none_instance(): return snn.Leaky(beta=0.5, init_hidden=True, reset_mechanism="none") +@pytest.fixture(scope="module") +def leaky_hidden_learn_graded_instance(): + return snn.Leaky( + beta=0.5, init_hidden=True, learn_graded_spikes_factor=True + ) + + class TestLeaky: def test_leaky(self, leaky_instance, input_): mem = leaky_instance.init_leaky() @@ -117,3 +124,10 @@ def test_leaky_init_hidden_reset_none( def test_leaky_cases(self, leaky_hidden_instance, input_): with pytest.raises(TypeError): leaky_hidden_instance(input_, input_) + + def test_leaky_hidden_learn_graded_instance( + self, leaky_hidden_learn_graded_instance + ): + factor = leaky_hidden_learn_graded_instance.graded_spikes_factor + + assert factor.requires_grad From 097ed7061ff326664026c6a411fe50952b6956ed Mon Sep 17 00:00:00 2001 From: gianfa Date: Sun, 2 Jul 2023 16:49:02 +0200 Subject: [PATCH 05/18] feat(neuron, synaptic): add missing args in example The example was not running because missing some args (num_inputs, num_hidden, num_outputs); thus here I just add them. --- snntorch/_neurons/synaptic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snntorch/_neurons/synaptic.py b/snntorch/_neurons/synaptic.py index fe778448..b48aa339 100644 --- a/snntorch/_neurons/synaptic.py +++ b/snntorch/_neurons/synaptic.py @@ -49,7 +49,7 @@ class Synaptic(LIF): # Define Network class Net(nn.Module): - def __init__(self): + def __init__(self, num_inputs, num_hidden, num_outputs): super().__init__() # initialize layers From 5bb554e9126d9790174d3339437423271594ae4f Mon Sep 17 00:00:00 2001 From: Timo Klein <36668911+timoklein@users.noreply.github.com> Date: Wed, 5 Jul 2023 13:11:25 +0200 Subject: [PATCH 06/18] Fix path --- examples/tutorial_regression_2.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/tutorial_regression_2.ipynb b/examples/tutorial_regression_2.ipynb index 0753e3a2..fa84e58d 100644 --- a/examples/tutorial_regression_2.ipynb +++ b/examples/tutorial_regression_2.ipynb @@ -455,7 +455,7 @@ "outputs": [], "source": [ "batch_size = 128\n", - "data_path='/data/mnist'\n", + "data_path='data/mnist'\n", "\n", "# Define a transform\n", "transform = transforms.Compose([\n", From 96ed5af3f8330fc1b47ad387f76c051c5309a6a3 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Tue, 11 Jul 2023 13:20:30 -0700 Subject: [PATCH 07/18] enable NIR compatibility --- README.rst | 2 ++ setup.py | 2 ++ snntorch/utils.py | 1 + 3 files changed, 5 insertions(+) diff --git a/README.rst b/README.rst index 562ab8de..f297bba2 100644 --- a/README.rst +++ b/README.rst @@ -104,6 +104,8 @@ The following packages need to be installed to use snnTorch: * pandas * matplotlib * math +* nir +* nirtorch They are automatically installed if snnTorch is installed using the pip command. Ensure the correct version of torch is installed for your system to enable CUDA compatibility. diff --git a/setup.py b/setup.py index 0c81ff6c..cb4a5c1b 100644 --- a/setup.py +++ b/setup.py @@ -19,6 +19,8 @@ "pandas", "matplotlib", "numpy>=1.17", + "nir", + "nirtorch", ] diff --git a/snntorch/utils.py b/snntorch/utils.py index cd12cc00..7d2c35bc 100644 --- a/snntorch/utils.py +++ b/snntorch/utils.py @@ -249,3 +249,4 @@ def _final_layer_check(net): return 4 else: # if not from snn, assume from nn with 1 return return 1 + \ No newline at end of file From bb18ef6c4ab4369a57378d3514b8ca914c95db89 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Tue, 11 Jul 2023 13:21:46 -0700 Subject: [PATCH 08/18] export functions for NIR comptability --- snntorch/export.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 snntorch/export.py diff --git a/snntorch/export.py b/snntorch/export.py new file mode 100644 index 00000000..e69de29b From c124c3081a51c6b12a86abc08088bfb479e7a7b0 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Tue, 11 Jul 2023 14:56:55 -0700 Subject: [PATCH 09/18] snntorch model to nir conversion implemented --- snntorch/export.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/snntorch/export.py b/snntorch/export.py index e69de29b..94064767 100644 --- a/snntorch/export.py +++ b/snntorch/export.py @@ -0,0 +1,36 @@ +from typing import Union, Optional +from numbers import Number + +import torch +import nir +from nirtorch import extract_nir_graph + +from snntorch import Leaky + +# eqn is assumed to be: v_t+1 = (1-1/tau)*v_t + 1/tau * v_leak + I_in / C +def _extract_snntorch_module(module:torch.nn.Module) -> Optional[nir.NIRNode]: + if isinstance(module, Leaky): + return nir.LIF( + tau = -1 / (module.beta + 1).detach(), + v_threshold = module.threshold.detach(), + v_leak = torch.zeros_like(module.beta), + r = -1 / (module.beta + 1).detach(), + ) + + elif isinstance(module, torch.nn.Linear): + if module.bias is None: # Add zero bias if none is present + return nir.Affine( + module.weight.detach(), torch.zeros(*module.weight.shape[:-1]) + ) + else: + return nir.Affine(module.weight.detach(), module.bias.detach()) + + return None + + +def to_nir( + module: torch.nn.Module, sample_data: torch.Tensor, model_name: str = "snntorch" +) -> nir.NIRNode: + return extract_nir_graph( + module, _extract_snntorch_module, sample_data, model_name=model_name + ) \ No newline at end of file From 6ff79496d0653197a1fe51ea2204b3dfb75b733f Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 10:39:44 -0700 Subject: [PATCH 10/18] add docstrings to NIR export function --- snntorch/export.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/snntorch/export.py b/snntorch/export.py index 94064767..1464cadc 100644 --- a/snntorch/export.py +++ b/snntorch/export.py @@ -31,6 +31,47 @@ def _extract_snntorch_module(module:torch.nn.Module) -> Optional[nir.NIRNode]: def to_nir( module: torch.nn.Module, sample_data: torch.Tensor, model_name: str = "snntorch" ) -> nir.NIRNode: + """Convert an snnTorch model to the Neuromorphic Intermediate Representation (NIR). + + Example:: + + import torch, torch.nn as nn + import snntorch as snn + from snntorch import export + + data_path = "untrained-snntorch.pt" + + net = nn.Sequential(nn.Linear(784, 128), + snn.Leaky(beta=0.8, init_hidden=True), + nn.Linear(128, 10), + snn.Leaky(beta=0.8, init_hidden=True, output=True)) + + # save model in pt format + torch.save(net.state_dict(), data_path) + + # load model (does nothing here, but shown for completeness) + net.load_state_dict(torch.load(data_path)) + + # generate input tensor to dynamically construct graph + x = torch.zeros(784) + + # generate NIR graph + nir_net = export.to_nir(net, x) + + + :param module: a saved snnTorch model as a parameter dictionary + :type module: torch.nn.Module + + :param sample_data: sample input data to the model + :type sample_data: torch.Tensor + + :param model_name: name of library used to train model, default: "snntorch" + :param model_name: str, optional + + :return: NIR computational graph where torch modules are represented as NIR nodes + :rtype: NIRGraph + + """ return extract_nir_graph( module, _extract_snntorch_module, sample_data, model_name=model_name ) \ No newline at end of file From 91b4e90f1922875afad178884714b99b1c2171ec Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:03:52 -0700 Subject: [PATCH 11/18] update docs: snntorch.export in index --- README.rst | 2 ++ docs/index.rst | 1 + 2 files changed, 3 insertions(+) diff --git a/README.rst b/README.rst index f297bba2..14c9253a 100644 --- a/README.rst +++ b/README.rst @@ -54,6 +54,8 @@ snnTorch contains the following components: - Description * - `snntorch `_ - a spiking neuron library like torch.nn, deeply integrated with autograd + * - `snntorch ` + - export snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ * - `snntorch.functional `_ - common arithmetic operations on spikes, e.g., loss, regularization etc. * - `snntorch.spikegen `_ diff --git a/docs/index.rst b/docs/index.rst index 9be2cdd0..1d4a5d8d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,6 +12,7 @@ snnTorch Documentation installation snntorch snntorch.backprop + snntorch.export snntorch.functional snntorch.spikegen snntorch.spikeplot From dd4f776f4ab380c6c7094e19926b5e8b8fee7d01 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:15:51 -0700 Subject: [PATCH 12/18] module table url fix --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 14c9253a..94388325 100644 --- a/README.rst +++ b/README.rst @@ -54,7 +54,7 @@ snnTorch contains the following components: - Description * - `snntorch `_ - a spiking neuron library like torch.nn, deeply integrated with autograd - * - `snntorch ` + * - `snntorch `_ - export snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ * - `snntorch.functional `_ - common arithmetic operations on spikes, e.g., loss, regularization etc. From 4ba8b9aa14e4dc6e99b95dd4c33a72ea581e9151 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:17:16 -0700 Subject: [PATCH 13/18] module table description update --- README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 94388325..2f8a3f9d 100644 --- a/README.rst +++ b/README.rst @@ -54,8 +54,9 @@ snnTorch contains the following components: - Description * - `snntorch `_ - a spiking neuron library like torch.nn, deeply integrated with autograd - * - `snntorch `_ - - export snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ + * - `snntorch.export `_ + - cross-compatibility with other SNN libraries by converting snntorch models + to a `Neuromorphic Intermediate Representation (NIR) `_ * - `snntorch.functional `_ - common arithmetic operations on spikes, e.g., loss, regularization etc. * - `snntorch.spikegen `_ From bcb3ce2be6e1e169e317e9139102b845fdda18aa Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:28:00 -0700 Subject: [PATCH 14/18] export docs fix --- README.rst | 3 +-- docs/index.rst | 1 - docs/requirements.txt | 4 +++- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/README.rst b/README.rst index 2f8a3f9d..e8f4c1de 100644 --- a/README.rst +++ b/README.rst @@ -55,8 +55,7 @@ snnTorch contains the following components: * - `snntorch `_ - a spiking neuron library like torch.nn, deeply integrated with autograd * - `snntorch.export `_ - - cross-compatibility with other SNN libraries by converting snntorch models - to a `Neuromorphic Intermediate Representation (NIR) `_ + - cross-compatibility with other SNN libraries by converting snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ * - `snntorch.functional `_ - common arithmetic operations on spikes, e.g., loss, regularization etc. * - `snntorch.spikegen `_ diff --git a/docs/index.rst b/docs/index.rst index 1d4a5d8d..8252e685 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,7 +11,6 @@ snnTorch Documentation readme installation snntorch - snntorch.backprop snntorch.export snntorch.functional snntorch.spikegen diff --git a/docs/requirements.txt b/docs/requirements.txt index 9f6f4cb4..5db9afc0 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -5,4 +5,6 @@ h5py>=3.0.0 matplotlib celluloid numpy>=1.17 -tqdm \ No newline at end of file +tqdm +nir +nirtorch \ No newline at end of file From 9538566686cde9730116c2d3ba929b6e6e3c6513 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:34:51 -0700 Subject: [PATCH 15/18] table and snntorch.exports index docs fix --- README.rst | 2 +- docs/snntorch.export.rst | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 docs/snntorch.export.rst diff --git a/README.rst b/README.rst index e8f4c1de..31ca4326 100644 --- a/README.rst +++ b/README.rst @@ -55,7 +55,7 @@ snnTorch contains the following components: * - `snntorch `_ - a spiking neuron library like torch.nn, deeply integrated with autograd * - `snntorch.export `_ - - cross-compatibility with other SNN libraries by converting snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ + - enables cross-compatibility with other SNN libraries via `NIR `_ * - `snntorch.functional `_ - common arithmetic operations on spikes, e.g., loss, regularization etc. * - `snntorch.spikegen `_ diff --git a/docs/snntorch.export.rst b/docs/snntorch.export.rst new file mode 100644 index 00000000..1ea9459a --- /dev/null +++ b/docs/snntorch.export.rst @@ -0,0 +1,8 @@ +snntorch.export +------------------------ +:mod:`snntorch.export` is a module that enables cross-compatibility with other SNN libraries by converting snntorch models to a `Neuromorphic Intermediate Representation (NIR) `_ + +.. automodule:: snntorch.export + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file From ce9bf16f01eadf4a04397bf2ded98e95ec51e18f Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:45:32 -0700 Subject: [PATCH 16/18] export docstring fix --- snntorch/export.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/snntorch/export.py b/snntorch/export.py index 1464cadc..3df0c701 100644 --- a/snntorch/export.py +++ b/snntorch/export.py @@ -66,7 +66,7 @@ def to_nir( :type sample_data: torch.Tensor :param model_name: name of library used to train model, default: "snntorch" - :param model_name: str, optional + :type model_name: str, optional :return: NIR computational graph where torch modules are represented as NIR nodes :rtype: NIRGraph From d34a9946cfdd1b2cc1c397d73e295bbdd4a085d5 Mon Sep 17 00:00:00 2001 From: jeshraghian Date: Wed, 12 Jul 2023 13:47:50 -0700 Subject: [PATCH 17/18] =?UTF-8?q?Bump=20version:=200.6.4=20=E2=86=92=200.7?= =?UTF-8?q?.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- _version.py | 2 +- docs/conf.py | 2 +- setup.cfg | 2 +- setup.py | 2 +- snntorch/_version.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/_version.py b/_version.py index 66c57288..46d9bfe6 100644 --- a/_version.py +++ b/_version.py @@ -1,2 +1,2 @@ # fmt: off -__version__ = '0.6.4' +__version__ = '0.7.0' diff --git a/docs/conf.py b/docs/conf.py index a93e8752..a404c254 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -6,7 +6,7 @@ # fmt: off -__version__ = '0.6.4' +__version__ = '0.7.0' # fmt: on diff --git a/setup.cfg b/setup.cfg index 566d02b6..912e11e7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.6.4 +current_version = 0.7.0 commit = True tag = True diff --git a/setup.py b/setup.py index cb4a5c1b..2d552653 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ # history = history_file.read() # fmt: off -__version__ = '0.6.4' +__version__ = '0.7.0' # fmt: on requirements = [ diff --git a/snntorch/_version.py b/snntorch/_version.py index 66c57288..46d9bfe6 100644 --- a/snntorch/_version.py +++ b/snntorch/_version.py @@ -1,2 +1,2 @@ # fmt: off -__version__ = '0.6.4' +__version__ = '0.7.0' From db858929a20880285bedace392a534f8a4852d85 Mon Sep 17 00:00:00 2001 From: Peng Zhou Date: Wed, 12 Jul 2023 18:15:42 -0600 Subject: [PATCH 18/18] add Synaptic support for CubaLIF, update tau/r --- snntorch/export.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/snntorch/export.py b/snntorch/export.py index 3df0c701..b9cb94ce 100644 --- a/snntorch/export.py +++ b/snntorch/export.py @@ -5,18 +5,27 @@ import nir from nirtorch import extract_nir_graph -from snntorch import Leaky +from snntorch import Leaky, Synaptic -# eqn is assumed to be: v_t+1 = (1-1/tau)*v_t + 1/tau * v_leak + I_in / C +# eqn is assumed to be: v_t+1 = (1-1/tau)*v_t + 1/tau * v_leak + I_in / C def _extract_snntorch_module(module:torch.nn.Module) -> Optional[nir.NIRNode]: if isinstance(module, Leaky): return nir.LIF( - tau = -1 / (module.beta + 1).detach(), + tau = 1 / (1 - module.beta).detach(), v_threshold = module.threshold.detach(), v_leak = torch.zeros_like(module.beta), - r = -1 / (module.beta + 1).detach(), + r = module.beta.detach(), ) - + + if isinstance(module, Synaptic): + return nir.CubaLIF( + tau_syn = 1 / (1 - module.beta).detach(), + tau_mem = 1 / (1 - module.alpha).detach(), + v_threshold = module.threshold.detach(), + v_leak = torch.zeros_like(module.beta), + r = module.beta.detach(), + ) + elif isinstance(module, torch.nn.Linear): if module.bias is None: # Add zero bias if none is present return nir.Affine( @@ -32,7 +41,7 @@ def to_nir( module: torch.nn.Module, sample_data: torch.Tensor, model_name: str = "snntorch" ) -> nir.NIRNode: """Convert an snnTorch model to the Neuromorphic Intermediate Representation (NIR). - + Example:: import torch, torch.nn as nn @@ -45,7 +54,7 @@ def to_nir( snn.Leaky(beta=0.8, init_hidden=True), nn.Linear(128, 10), snn.Leaky(beta=0.8, init_hidden=True, output=True)) - + # save model in pt format torch.save(net.state_dict(), data_path) @@ -58,7 +67,7 @@ def to_nir( # generate NIR graph nir_net = export.to_nir(net, x) - + :param module: a saved snnTorch model as a parameter dictionary :type module: torch.nn.Module @@ -70,8 +79,8 @@ def to_nir( :return: NIR computational graph where torch modules are represented as NIR nodes :rtype: NIRGraph - + """ return extract_nir_graph( module, _extract_snntorch_module, sample_data, model_name=model_name - ) \ No newline at end of file + )