Skip to content

Commit a450d03

Browse files
author
hhsecond
committed
moved out model uitlities
1 parent 6d4174c commit a450d03

File tree

4 files changed

+10
-35
lines changed

4 files changed

+10
-35
lines changed

example.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from __future__ import print_function
2-
from redisai import Client, Tensor, ScalarTensor, \
2+
from redisai import Client, Tensor, \
33
BlobTensor, DType, Device, Backend
44
from redisai import model as raimodel
55

@@ -9,8 +9,8 @@
99
print(t.value)
1010

1111
model = raimodel.Model.load('../RedisAI/examples/models/graph.pb')
12-
client.tensorset('a', ScalarTensor(DType.float, 2, 3))
13-
client.tensorset('b', ScalarTensor(DType.float, 12, 10))
12+
client.tensorset('a', Tensor.scalar(DType.float, 2, 3))
13+
client.tensorset('b', Tensor.scalar(DType.float, 12, 10))
1414
client.modelset('m', Backend.tf,
1515
Device.cpu,
1616
input=['a', 'b'],
@@ -24,8 +24,8 @@
2424
client.scriptset('ket', Device.cpu, script)
2525
client.scriptrun('ket', 'bar', input=['a', 'b'], output='c')
2626

27-
b1 = client.tensorget('c', astype=BlobTensor)
28-
b2 = client.tensorget('c', astype=BlobTensor)
27+
b1 = client.tensorget('c', as_type=BlobTensor)
28+
b2 = client.tensorget('c', as_type=BlobTensor)
2929
bt = BlobTensor(DType.float, b1.shape, b1, b2)
3030

3131
print(len(bytes(bt.blob)))
@@ -34,4 +34,4 @@
3434
client.tensorset('d', BlobTensor(DType.float, b1.shape, b1, b2))
3535

3636
tnp = b1.to_numpy()
37-
client.tensorset('e', tnp)
37+
client.tensorset('e', tnp)

redisai/__init__.py

Lines changed: 2 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,2 @@
1-
from .version import __version__
2-
from .client import (Client, Tensor, BlobTensor, DType, Device, Backend)
3-
4-
5-
def save_model(*args, **kwargs):
6-
"""
7-
Importing inside to avoid loading the TF/PyTorch/ONNX
8-
into the scope unnecessary. This function wraps the
9-
internal save model utility to make it user friendly
10-
"""
11-
from .model import Model
12-
Model.save(*args, **kwargs)
13-
14-
15-
def load_model(*args, **kwargs):
16-
"""
17-
Importing inside to avoid loading the TF/PyTorch/ONNX
18-
into the scope unnecessary. This function wraps the
19-
internal load model utility to make it user friendly
20-
"""
21-
from .model import Model
22-
return Model.load(*args, **kwargs)
1+
from .version import __version__ # noqa
2+
from .client import (Client, Tensor, BlobTensor, DType, Device, Backend) # noqa

redisai/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
np = None
1010

1111
try:
12-
from typing import Union, Any, AnyStr, ByteString, Collection, Type
12+
from typing import Union, Any, AnyStr, ByteString, Collection, Type # noqa
1313
except ImportError:
1414
pass
1515

@@ -165,7 +165,7 @@ def _to_numpy_type(t):
165165
}
166166
if t in mm:
167167
return mm[t]
168-
return t
168+
return t.lower()
169169

170170
@classmethod
171171
def from_resp(cls, dtype, shape, value):

test-requirements.txt

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1 @@
11
numpy
2-
torch
3-
tensorflow
4-
onnx
5-
skl2onnx
6-
pandas

0 commit comments

Comments
 (0)