Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 39 additions & 16 deletions tiledb/dense_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,17 +447,20 @@ def _setitem_impl(self, selection, val, nullmaps: dict):

if isinstance(val, dict):
# Create dictionary of label names and values
labels = {
name: (
data
if not isinstance(data, np.ndarray) or data.dtype == np.dtype("O")
else np.ascontiguousarray(
data, dtype=self.schema.dim_label(name).dtype
)
)
for name, data in val.items()
if self.schema.has_dim_label(name)
}
labels = {}
for name, data in val.items():
if self.schema.has_dim_label(name):
if not isinstance(data, np.ndarray) or data.dtype == np.dtype("O"):
labels[name] = data
else:
target_dtype = self.schema.dim_label(name).dtype
# Avoid unnecessary copy if already C-contiguous and correct dtype
if data.flags.c_contiguous and data.dtype == target_dtype:
labels[name] = data
else:
labels[name] = np.ascontiguousarray(
data, dtype=target_dtype
)

# Create list of attribute names and values
for attr_idx in range(self.schema.nattr):
Expand All @@ -477,7 +480,11 @@ def _setitem_impl(self, selection, val, nullmaps: dict):
nullmaps[name] = np.array(
[int(v is not None) for v in attr_val], dtype=np.uint8
)
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if not (
attr_val.flags.c_contiguous and attr_val.dtype == attr.dtype
):
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)

try:
if attr.isvar:
Expand Down Expand Up @@ -516,7 +523,11 @@ def _setitem_impl(self, selection, val, nullmaps: dict):
attr_val = np.array(
[0 if v is None else v for v in attr_val]
)
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if not (
attr_val.flags.c_contiguous and attr_val.dtype == attr.dtype
):
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)
except Exception as exc:
raise ValueError(
f"NumPy array conversion check failed for attr '{name}'"
Expand All @@ -537,7 +548,9 @@ def _setitem_impl(self, selection, val, nullmaps: dict):
attributes.append(attr._internal_name)
# object arrays are var-len and handled later
if isinstance(val, np.ndarray) and val.dtype != np.dtype("O"):
val = np.ascontiguousarray(val, dtype=attr.dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if not (val.flags.c_contiguous and val.dtype == attr.dtype):
val = np.ascontiguousarray(val, dtype=attr.dtype)
try:
if attr.isvar:
# ensure that the value is array-convertible, for example: pandas.Series
Expand All @@ -559,7 +572,9 @@ def _setitem_impl(self, selection, val, nullmaps: dict):
if attr.isnullable and name not in nullmaps:
nullmaps[name] = ~np.ma.masked_invalid(val).mask
val = np.nan_to_num(val)
val = np.ascontiguousarray(val, dtype=attr.dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if not (val.flags.c_contiguous and val.dtype == attr.dtype):
val = np.ascontiguousarray(val, dtype=attr.dtype)
except Exception as exc:
raise ValueError(
f"NumPy array conversion check failed for attr '{name}'"
Expand All @@ -581,7 +596,15 @@ def _setitem_impl(self, selection, val, nullmaps: dict):
self.uri, "r", ctx=tiledb.Ctx(self.ctx.config())
) as readable:
current = readable[selection]
current[self.view_attr] = np.ascontiguousarray(val, dtype=dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if (
isinstance(val, np.ndarray)
and val.flags.c_contiguous
and val.dtype == dtype
):
current[self.view_attr] = val
else:
current[self.view_attr] = np.ascontiguousarray(val, dtype=dtype)
# `current` is an OrderedDict
attributes.extend(current.keys())
values.extend(current.values())
Expand Down
6 changes: 5 additions & 1 deletion tiledb/highlevel.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,11 @@ def from_numpy(uri, array, config=None, ctx=None, **kwargs):
if array.dtype == object:
arr[:] = array
else:
arr.write_direct(np.ascontiguousarray(array), **kwargs)
# Avoid unnecessary copy if already C-contiguous
if array.flags.c_contiguous:
arr.write_direct(array, **kwargs)
else:
arr.write_direct(np.ascontiguousarray(array), **kwargs)

return tiledb.DenseArray(uri, mode="r", ctx=ctx)

Expand Down
25 changes: 15 additions & 10 deletions tiledb/sparse_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,15 +105,18 @@ def _setitem_impl_sparse(self, selection, val, nullmaps: dict):
val = dict({self.attr(0).name: val})

# Create dictionary for label names and values from the dictionary
labels = {
name: (
data
if not isinstance(data, np.ndarray) or data.dtype == np.dtype("O")
else np.ascontiguousarray(data, dtype=self.schema.dim_label(name).dtype)
)
for name, data in val.items()
if self.schema.has_dim_label(name)
}
labels = {}
for name, data in val.items():
if self.schema.has_dim_label(name):
if not isinstance(data, np.ndarray) or data.dtype == np.dtype("O"):
labels[name] = data
else:
target_dtype = self.schema.dim_label(name).dtype
# Avoid unnecessary copy if already C-contiguous and correct dtype
if data.flags.c_contiguous and data.dtype == target_dtype:
labels[name] = data
else:
labels[name] = np.ascontiguousarray(data, dtype=target_dtype)

# must iterate in Attr order to ensure that value order matches
for attr_idx in range(self.schema.nattr):
Expand Down Expand Up @@ -153,7 +156,9 @@ def _setitem_impl_sparse(self, selection, val, nullmaps: dict):
else:
attr_val = np.nan_to_num(attr_val)
attr_val = np.array([0 if v is None else v for v in attr_val])
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)
# Avoid unnecessary copy if already C-contiguous and correct dtype
if not (attr_val.flags.c_contiguous and attr_val.dtype == attr.dtype):
attr_val = np.ascontiguousarray(attr_val, dtype=attr.dtype)

except Exception as exc:
raise ValueError(
Expand Down
Loading