@@ -708,6 +708,9 @@ def negative(input: VSA_Model) -> VSA_Model:
708
708
def soft_quantize (input : Tensor ):
709
709
"""Applies the hyperbolic tanh function to all elements of the input tensor.
710
710
711
+ .. warning::
712
+ This function does not take the VSA model class into account.
713
+
711
714
Args:
712
715
input (Tensor): input tensor.
713
716
@@ -717,12 +720,15 @@ def soft_quantize(input: Tensor):
717
720
718
721
Examples::
719
722
720
- >>> x = functional.random_hv(2, 3)
721
- >>> y = functional.bundle(x[0], x[1])
723
+ >>> x = torchhd.random_hv(2, 6)
724
+ >>> x
725
+ tensor([[ 1., 1., -1., 1., 1., 1.],
726
+ [ 1., -1., -1., -1., 1., -1.]])
727
+ >>> y = torchhd.bundle(x[0], x[1])
722
728
>>> y
723
- tensor([0., 2., 0.])
724
- >>> functional .soft_quantize(y)
725
- tensor([0. 0000, 0.9640, 0.0000])
729
+ tensor([ 2., 0., - 2., 0., 2., 0.])
730
+ >>> torchhd .soft_quantize(y)
731
+ tensor([ 0.9640, 0. 0000, - 0.9640, 0.0000, 0.9640, 0.0000])
726
732
727
733
"""
728
734
return torch .tanh (input )
@@ -731,6 +737,9 @@ def soft_quantize(input: Tensor):
731
737
def hard_quantize (input : Tensor ):
732
738
"""Applies binary quantization to all elements of the input tensor.
733
739
740
+ .. warning::
741
+ This function does not take the VSA model class into account.
742
+
734
743
Args:
735
744
input (Tensor): input tensor
736
745
@@ -740,12 +749,15 @@ def hard_quantize(input: Tensor):
740
749
741
750
Examples::
742
751
743
- >>> x = functional.random_hv(2, 3)
744
- >>> y = functional.bundle(x[0], x[1])
752
+ >>> x = torchhd.random_hv(2, 6)
753
+ >>> x
754
+ tensor([[ 1., 1., -1., 1., 1., 1.],
755
+ [ 1., -1., -1., -1., 1., -1.]])
756
+ >>> y = torchhd.bundle(x[0], x[1])
745
757
>>> y
746
- tensor([ 0., -2., -2 .])
747
- >>> functional .hard_quantize(y)
748
- tensor([ 1., -1., -1.])
758
+ tensor([ 2., 0., -2., 0., 2., 0 .])
759
+ >>> torchhd .hard_quantize(y)
760
+ tensor([ 1., -1., -1., -1., 1., -1. ])
749
761
750
762
"""
751
763
# Make sure that the output tensor has the same dtype and device
0 commit comments