diff --git a/src/sempy_labs/_helper_functions.py b/src/sempy_labs/_helper_functions.py index 8d713a74..72c5d9ee 100644 --- a/src/sempy_labs/_helper_functions.py +++ b/src/sempy_labs/_helper_functions.py @@ -257,7 +257,7 @@ def create_item( definition: Optional[dict] = None, workspace: Optional[str | UUID] = None, folder: Optional[str | PathLike] = None, -): +) -> str: """ Creates an item in a Fabric workspace. @@ -278,6 +278,11 @@ def create_item( folder : str | os.PathLike, default=None The folder within the workspace where the item will be created. Defaults to None which places the item in the root of the workspace. + + Returns + ------- + str + The ID of the created item. """ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) @@ -295,17 +300,18 @@ def create_item( folder=folder, workspace=workspace_id ) - _base_api( + response_json = _base_api( request=f"/v1/workspaces/{workspace_id}/{item_type_url}", method="post", payload=payload, status_codes=[201, 202], - lro_return_status_code=True, + lro_return_json=True, client="fabric_sp", ) print( f"{icons.green_dot} The '{name}' {type} has been successfully created within the '{workspace_name}' workspace." ) + return response_json.get("id") @log diff --git a/src/sempy_labs/eventhouse/_items.py b/src/sempy_labs/eventhouse/_items.py index 924b6522..58736780 100644 --- a/src/sempy_labs/eventhouse/_items.py +++ b/src/sempy_labs/eventhouse/_items.py @@ -20,7 +20,7 @@ def create_eventhouse( definition: Optional[dict], description: Optional[str] = None, workspace: Optional[str | UUID] = None, -): +) -> str: """ Creates a Fabric eventhouse. @@ -40,6 +40,11 @@ def create_eventhouse( The Fabric workspace name or ID. Defaults to None which resolves to the workspace of the attached lakehouse or if no lakehouse attached, resolves to the workspace of the notebook. + + Returns + ------- + str + The ID of the created eventhouse. """ if definition is not None and not isinstance(definition, dict): @@ -59,7 +64,7 @@ def create_eventhouse( else None ) - create_item( + return create_item( name=name, type="Eventhouse", workspace=workspace, diff --git a/src/sempy_labs/ontology/__init__.py b/src/sempy_labs/ontology/__init__.py new file mode 100644 index 00000000..00c0b1de --- /dev/null +++ b/src/sempy_labs/ontology/__init__.py @@ -0,0 +1,11 @@ +from ._items import ( + list_ontologies, + create_ontology, + delete_ontology, +) + +__all__ = [ + "list_ontologies", + "create_ontology", + "delete_ontology", +] diff --git a/src/sempy_labs/ontology/_items.py b/src/sempy_labs/ontology/_items.py new file mode 100644 index 00000000..00c374d3 --- /dev/null +++ b/src/sempy_labs/ontology/_items.py @@ -0,0 +1,102 @@ +from os import PathLike +import pandas as pd +from typing import Optional +from uuid import UUID +import sempy.fabric as fabric +from sempy._utils._log import log +from sempy_labs._helper_functions import ( + _base_api, + _create_dataframe, + resolve_workspace_id, + resolve_workspace_name_and_id, + create_item, + delete_item, +) +from sempy_labs.eventhouse import create_eventhouse +from sempy_labs._kql_databases import _resolve_cluster_uri + + +@log +def accellerate(name: str, workspace: Optional[str | UUID] = None): + + (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace) + + eh_name = f"eh_{name}" + lh_name = f"lh_{name}" + ont_name = f"ont_{name}" + + # Create lakehouse + lakehouse_id = fabric.create_lakehouse( + display_name=lh_name, workspace=workspace_id, enable_schema=True + ) + + # Create eventhouse + eventhouse_id = create_eventhouse( + name=eh_name, definition={}, workspace=workspace_id + ) + + # Get the cluster URI of the eventhouse's KQL database + cluster_uri = _resolve_cluster_uri(kql_database=eh_name, workspace=workspace_id) + + create_ontology(name=ont_name, definition={}, workspace=workspace_id) + + +@log +def create_ontology( + name: str, + definition: dict, + workspace: Optional[str | UUID] = None, + folder: Optional[str | PathLike] = None, +) -> str: + + return create_item( + name=name, + type="Ontology", + definition=definition, + workspace=workspace, + folder=folder, + ) + + +@log +def list_ontologies(workspace: Optional[str | UUID] = None) -> pd.DataFrame: + + workspace_id = resolve_workspace_id(workspace) + + columns = { + "Ontology Name": "str", + "Ontology Id": "str", + "Description": "str", + } + + df = _create_dataframe(columns=columns) + + responses = _base_api( + request=f"/v1/workspaces/{workspace_id}/ontologies", + uses_pagination=True, + client="fabric_sp", + ) + + rows = [] + for r in responses: + for v in r.get("value", []): + rows.append( + { + "Ontology Name": v.get("displayName"), + "Ontology Id": v.get("id"), + "Description": v.get("description"), + } + ) + + if rows: + df = pd.DataFrame(rows, columns=list(columns.keys())) + + return df + + +@log +def delete_ontology( + ontology: str | UUID, workspace: Optional[str | UUID] = None +) -> None: + + delete_item(item=ontology, type="Ontology", workspace=workspace)