Power BI is turning 10, and we’re marking the occasion with a special community challenge. Use your creativity to tell a story, uncover trends, or highlight something unexpected.
Get startedJoin us at FabCon Vienna from September 15-18, 2025, for the ultimate Fabric, Power BI, SQL, and AI community-led learning event. Save €200 with code FABCOMM. Get registered
Hello,
I have compiled a whl file and placed it unde rthe custom library section and published it and when i try to import it under notebook using the environment which has this library it says "ModuleNotFoundError: No module named 'sun_five9_runreport'"
when I do %pip list I can see the library under the list. can't wrap my head on why this this is happening.
here is how i constructed setup.py
from setuptools import setup, find_packages
setup(
name="sun_five9_runreport",
version="1.0.0",
packages=find_packages(),
description="A library to fetch reports from Five9 and process them into Spark DataFrames.",
author="xxx",
company="xxx",
)
please help!
Solved! Go to Solution.
I have rebulished it with a different name it worked this time.
Here is an example library for refreshing SQL Analytics End Point
import os
import shutil
import subprocess
# Recreate the directory structure from scratch
source_dir = "/mnt/data/lakehouse_sync_new"
dist_dir = os.path.join(source_dir, "dist")
if os.path.exists(source_dir):
shutil.rmtree(source_dir)
os.makedirs(os.path.join(source_dir, "lakehouse_sync_new"))
# Create an empty __init__.py file
with open(os.path.join(source_dir, "lakehouse_sync_new", "__init__.py"), "w") as f:
f.write("")
# Write the updated script without logging
script_content_no_logging = """
import json
import time
from sempy.fabric import FabricRestClient
from sempy.fabric.exceptions import FabricHTTPException, WorkspaceNotFoundException
class LakehouseSync:
def __init__(self, tenant_id, workspace_id, lakehouse_id):
self.tenant_id = tenant_id
self.workspace_id = workspace_id
self.lakehouse_id = lakehouse_id
self.client = FabricRestClient()
self.max_retries = 60 # Timeout after 60 seconds
self.sleep_duration = 1 # Sleep 1 second between retries
def _make_get_request(self, uri):
\"\"\"Helper to make a GET request and return JSON data.\"\"\"
response = self.client.get(uri)
response.raise_for_status()
return response.json()
def _make_post_request(self, uri, payload):
\"\"\"Helper to make a POST request and return JSON data.\"\"\"
response = self.client.post(uri, json=payload)
response.raise_for_status()
return response.json()
def get_sql_endpoint(self):
\"\"\"Retrieve the SQL endpoint ID for the lakehouse.\"\"\"
uri = f"/v1/workspaces/{self.workspace_id}/lakehouses/{self.lakehouse_id}"
data = self._make_get_request(uri)
return data['properties']['sqlEndpointProperties']['id']
def sync_metadata(self, sql_endpoint_id):
\"\"\"Initiate metadata synchronization and monitor progress.\"\"\"
sync_uri = f"/v1.0/myorg/lhdatamarts/{sql_endpoint_id}"
payload = {"commands": [{"$type": "MetadataRefreshExternalCommand"}]}
# Start the sync
response = self._make_post_request(sync_uri, payload)
batch_id = response["batchId"]
progress_state = response["progressState"]
# Monitor sync progress
status_uri = f"/v1.0/myorg/lhdatamarts/{sql_endpoint_id}/batches/{batch_id}"
retries = 0
while progress_state == 'inProgress' and retries < self.max_retries:
time.sleep(self.sleep_duration)
retries += 1
status_response = self._make_get_request(status_uri)
progress_state = status_response["progressState"]
if retries >= self.max_retries:
raise TimeoutError("Sync process timed out.")
if progress_state == 'failure':
raise Exception(f"Sync failed. Details: {status_response}")
def run(self):
\"\"\"Main method to orchestrate the synchronization process.\"\"\"
sql_endpoint_id = self.get_sql_endpoint()
self.sync_metadata(sql_endpoint_id)
"""
with open(os.path.join(source_dir, "lakehouse_sync_new", "lakehouse_sync_new.py"), "w") as f:
f.write(script_content_no_logging)
# Create a minimal setup.py file
setup_content_minimal = """
from setuptools import setup, find_packages
setup(
name="lakehouse_sync_new",
version="1.0.0",
packages=find_packages(),
author="Sai Boyanapalli",
)
"""
with open(os.path.join(source_dir, "setup.py"), "w") as f:
f.write(setup_content_minimal)
# Rebuild the .whl file
subprocess.run(["python3", "setup.py", "bdist_wheel"], cwd=source_dir, capture_output=True, text=True)
I have rebulished it with a different name it worked this time.
Here is an example library for refreshing SQL Analytics End Point
import os
import shutil
import subprocess
# Recreate the directory structure from scratch
source_dir = "/mnt/data/lakehouse_sync_new"
dist_dir = os.path.join(source_dir, "dist")
if os.path.exists(source_dir):
shutil.rmtree(source_dir)
os.makedirs(os.path.join(source_dir, "lakehouse_sync_new"))
# Create an empty __init__.py file
with open(os.path.join(source_dir, "lakehouse_sync_new", "__init__.py"), "w") as f:
f.write("")
# Write the updated script without logging
script_content_no_logging = """
import json
import time
from sempy.fabric import FabricRestClient
from sempy.fabric.exceptions import FabricHTTPException, WorkspaceNotFoundException
class LakehouseSync:
def __init__(self, tenant_id, workspace_id, lakehouse_id):
self.tenant_id = tenant_id
self.workspace_id = workspace_id
self.lakehouse_id = lakehouse_id
self.client = FabricRestClient()
self.max_retries = 60 # Timeout after 60 seconds
self.sleep_duration = 1 # Sleep 1 second between retries
def _make_get_request(self, uri):
\"\"\"Helper to make a GET request and return JSON data.\"\"\"
response = self.client.get(uri)
response.raise_for_status()
return response.json()
def _make_post_request(self, uri, payload):
\"\"\"Helper to make a POST request and return JSON data.\"\"\"
response = self.client.post(uri, json=payload)
response.raise_for_status()
return response.json()
def get_sql_endpoint(self):
\"\"\"Retrieve the SQL endpoint ID for the lakehouse.\"\"\"
uri = f"/v1/workspaces/{self.workspace_id}/lakehouses/{self.lakehouse_id}"
data = self._make_get_request(uri)
return data['properties']['sqlEndpointProperties']['id']
def sync_metadata(self, sql_endpoint_id):
\"\"\"Initiate metadata synchronization and monitor progress.\"\"\"
sync_uri = f"/v1.0/myorg/lhdatamarts/{sql_endpoint_id}"
payload = {"commands": [{"$type": "MetadataRefreshExternalCommand"}]}
# Start the sync
response = self._make_post_request(sync_uri, payload)
batch_id = response["batchId"]
progress_state = response["progressState"]
# Monitor sync progress
status_uri = f"/v1.0/myorg/lhdatamarts/{sql_endpoint_id}/batches/{batch_id}"
retries = 0
while progress_state == 'inProgress' and retries < self.max_retries:
time.sleep(self.sleep_duration)
retries += 1
status_response = self._make_get_request(status_uri)
progress_state = status_response["progressState"]
if retries >= self.max_retries:
raise TimeoutError("Sync process timed out.")
if progress_state == 'failure':
raise Exception(f"Sync failed. Details: {status_response}")
def run(self):
\"\"\"Main method to orchestrate the synchronization process.\"\"\"
sql_endpoint_id = self.get_sql_endpoint()
self.sync_metadata(sql_endpoint_id)
"""
with open(os.path.join(source_dir, "lakehouse_sync_new", "lakehouse_sync_new.py"), "w") as f:
f.write(script_content_no_logging)
# Create a minimal setup.py file
setup_content_minimal = """
from setuptools import setup, find_packages
setup(
name="lakehouse_sync_new",
version="1.0.0",
packages=find_packages(),
author="Sai Boyanapalli",
)
"""
with open(os.path.join(source_dir, "setup.py"), "w") as f:
f.write(setup_content_minimal)
# Rebuild the .whl file
subprocess.run(["python3", "setup.py", "bdist_wheel"], cwd=source_dir, capture_output=True, text=True)
Wow. This is useful!
Hi @sun-sboyanapall , thank you for reaching out to the Microsoft Fabric Community Forum.
Please try the following:
print(sys.executable)
sun_five9_runreport/
__init__.py
...
If this helps, please consider marking it 'Accept as Solution' so others with similar queries may find it more easily. If not, please share the details.
Thank you.
Hi @sun-sboyanapall,
Can you try republishing once? I have previously used a custom wheel in spark and faced no issues.
Also, can you try installing in a virtual environment in your local machine to check if the module gets correctly installed and imports without issue?
This is your chance to engage directly with the engineering team behind Fabric and Power BI. Share your experiences and shape the future.
Check out the June 2025 Fabric update to learn about new features.