Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions src/sempy_labs/_helper_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def create_item(
definition: Optional[dict] = None,
workspace: Optional[str | UUID] = None,
folder: Optional[str | PathLike] = None,
):
) -> str:
"""
Creates an item in a Fabric workspace.

Expand All @@ -278,6 +278,11 @@ def create_item(
folder : str | os.PathLike, default=None
The folder within the workspace where the item will be created.
Defaults to None which places the item in the root of the workspace.

Returns
-------
str
The ID of the created item.
"""

(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
Expand All @@ -295,17 +300,18 @@ def create_item(
folder=folder, workspace=workspace_id
)

_base_api(
response_json = _base_api(
request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
method="post",
payload=payload,
status_codes=[201, 202],
lro_return_status_code=True,
lro_return_json=True,
client="fabric_sp",
)
print(
f"{icons.green_dot} The '{name}' {type} has been successfully created within the '{workspace_name}' workspace."
)
return response_json.get("id")


@log
Expand Down
9 changes: 7 additions & 2 deletions src/sempy_labs/eventhouse/_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def create_eventhouse(
definition: Optional[dict],
description: Optional[str] = None,
workspace: Optional[str | UUID] = None,
):
) -> str:
"""
Creates a Fabric eventhouse.

Expand All @@ -40,6 +40,11 @@ def create_eventhouse(
The Fabric workspace name or ID.
Defaults to None which resolves to the workspace of the attached lakehouse
or if no lakehouse attached, resolves to the workspace of the notebook.

Returns
-------
str
The ID of the created eventhouse.
"""

if definition is not None and not isinstance(definition, dict):
Expand All @@ -59,7 +64,7 @@ def create_eventhouse(
else None
)

create_item(
return create_item(
name=name,
type="Eventhouse",
workspace=workspace,
Expand Down
11 changes: 11 additions & 0 deletions src/sempy_labs/ontology/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from ._items import (
list_ontologies,
create_ontology,
delete_ontology,
)

__all__ = [
"list_ontologies",
"create_ontology",
"delete_ontology",
]
102 changes: 102 additions & 0 deletions src/sempy_labs/ontology/_items.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
from os import PathLike
import pandas as pd
from typing import Optional
from uuid import UUID
import sempy.fabric as fabric
from sempy._utils._log import log
from sempy_labs._helper_functions import (
_base_api,
_create_dataframe,
resolve_workspace_id,
resolve_workspace_name_and_id,
create_item,
delete_item,
)
from sempy_labs.eventhouse import create_eventhouse
from sempy_labs._kql_databases import _resolve_cluster_uri


@log
def accellerate(name: str, workspace: Optional[str | UUID] = None):

(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)

eh_name = f"eh_{name}"
lh_name = f"lh_{name}"
ont_name = f"ont_{name}"

# Create lakehouse
lakehouse_id = fabric.create_lakehouse(
display_name=lh_name, workspace=workspace_id, enable_schema=True
)

# Create eventhouse
eventhouse_id = create_eventhouse(
name=eh_name, definition={}, workspace=workspace_id
)

# Get the cluster URI of the eventhouse's KQL database
cluster_uri = _resolve_cluster_uri(kql_database=eh_name, workspace=workspace_id)

create_ontology(name=ont_name, definition={}, workspace=workspace_id)


@log
def create_ontology(
name: str,
definition: dict,
workspace: Optional[str | UUID] = None,
folder: Optional[str | PathLike] = None,
) -> str:

return create_item(
name=name,
type="Ontology",
definition=definition,
workspace=workspace,
folder=folder,
)


@log
def list_ontologies(workspace: Optional[str | UUID] = None) -> pd.DataFrame:

workspace_id = resolve_workspace_id(workspace)

columns = {
"Ontology Name": "str",
"Ontology Id": "str",
"Description": "str",
}

df = _create_dataframe(columns=columns)

responses = _base_api(
request=f"/v1/workspaces/{workspace_id}/ontologies",
uses_pagination=True,
client="fabric_sp",
)

rows = []
for r in responses:
for v in r.get("value", []):
rows.append(
{
"Ontology Name": v.get("displayName"),
"Ontology Id": v.get("id"),
"Description": v.get("description"),
}
)

if rows:
df = pd.DataFrame(rows, columns=list(columns.keys()))

return df


@log
def delete_ontology(
ontology: str | UUID, workspace: Optional[str | UUID] = None
) -> None:

delete_item(item=ontology, type="Ontology", workspace=workspace)
Loading