Join us at FabCon Atlanta from March 16 - 20, 2026, for the ultimate Fabric, Power BI, AI and SQL community-led event. Save $200 with code FABCOMM.
Register now!Calling all Data Engineers! Fabric Data Engineer (Exam DP-700) live sessions are back! Starting October 16th. Sign up.
Hi everyone,
So recently I was experimenting that can we create reports through script apart from Power BI desktop and I found out that we can do it by using the '.pbip' file extension. And I did found some success, that I was able to create the 'model.bim' file (which contains the data source: table and its columns -> This data was been extracted from an excel file), for this I used Python script so initially I had a zip template that had the basic structure which was first been extracted in the Python script (I had created a function to do the same which extracted the zip and renamed the file : I was taking the report nam from the user and then renaming the files (like Report_name.pbip, Report.Semantic, Report_Name.Report .platform).
import os
import zipfile
import json
import shutil
import pandas as pd
import zipfile
from datetime import datetime
import copy
import uuid
import time
# Paths
STATIC_DIR = os.path.join(os.path.dirname(__file__), 'Static')
MODEL_TEMPLATE_PATH = os.path.join(STATIC_DIR, 'model_template.json')
DATA_TYPE_MAPPING_PATH = os.path.join(STATIC_DIR, 'data_type_mapping.json')
REPORT_CONFIG_TEMPLATE_PATH = os.path.join(STATIC_DIR, 'report_config_template.json')
# Load static templates
def load_json_template(path):
with open(path, 'r') as f:
return json.load(f)
model_template = load_json_template(MODEL_TEMPLATE_PATH)
data_type_map = load_json_template(DATA_TYPE_MAPPING_PATH)
report_static_config = load_json_template(REPORT_CONFIG_TEMPLATE_PATH)
# Function to map data types using external mapping
def map_data_type(value):
value = str(value).strip().lower()
mapped_type = data_type_map.get(value, "string")
valid_types = {"int64", "double", "boolean", "string", "dateTime", "decimal"}
return mapped_type if mapped_type in valid_types else "string"
def prepare_report_structure(report_name):
print(" Preparing report structure...")
ZIP_FILENAME = "Template.zip"
EXTRACTION_FOLDER = "Extraction"
original_report = "reportname"
# Clear previous extraction if exists
if os.path.exists(EXTRACTION_FOLDER):
shutil.rmtree(EXTRACTION_FOLDER)
# Extract the zip
with zipfile.ZipFile(ZIP_FILENAME, 'r') as zip_ref:
zip_ref.extractall(EXTRACTION_FOLDER)
# Define paths
paths = {
"report_folder_old": os.path.join(EXTRACTION_FOLDER, f"{original_report}.Report"),
"semantic_folder_old": os.path.join(EXTRACTION_FOLDER, f"{original_report}.SemanticModel"),
"pbip_file_old": os.path.join(EXTRACTION_FOLDER, f"{original_report}.pbip"),
"report_folder_new": os.path.join(EXTRACTION_FOLDER, f"{report_name}.Report"),
"semantic_folder_new": os.path.join(EXTRACTION_FOLDER, f"{report_name}.SemanticModel"),
"pbip_file_new": os.path.join(EXTRACTION_FOLDER, f"{report_name}.pbip"),
}
# Rename folders and pbip file
os.rename(paths["report_folder_old"], paths["report_folder_new"])
os.rename(paths["semantic_folder_old"], paths["semantic_folder_new"])
os.rename(paths["pbip_file_old"], paths["pbip_file_new"])
# Update .pbip file
try:
with open(paths["pbip_file_new"], 'r', encoding='utf-8') as file:
data = json.load(file)
data["artifacts"][0]["report"]["path"] = f"{report_name}.Report"
with open(paths["pbip_file_new"], 'w', encoding='utf-8') as file:
json.dump(data, file, indent=4)
except Exception as e:
print(f"Error updating .pbip: {e}")
# Update .platform in .Report
try:
platform_path = os.path.join(paths["report_folder_new"], ".platform")
with open(platform_path, 'r', encoding='utf-8') as file:
data = json.load(file)
data["metadata"]["displayName"] = report_name
with open(platform_path, 'w', encoding='utf-8') as file:
json.dump(data, file, indent=4)
except Exception as e:
print(f"Error updating .platform in .Report: {e}")
# Update definition.pbir
try:
definition_path = os.path.join(paths["report_folder_new"], "definition.pbir")
with open(definition_path, 'r', encoding='utf-8') as file:
data = json.load(file)
data["datasetReference"]["byPath"]["path"] = f"../{report_name}.SemanticModel"
with open(definition_path, 'w', encoding='utf-8') as file:
json.dump(data, file, indent=4)
except Exception as e:
print(f"Error updating definition.pbir: {e}")
# Update .platform in .SemanticModel
try:
semantic_platform_path = os.path.join(paths["semantic_folder_new"], ".platform")
with open(semantic_platform_path, 'r', encoding='utf-8') as file:
data = json.load(file)
data["metadata"]["displayName"] = report_name
with open(semantic_platform_path, 'w', encoding='utf-8') as file:
json.dump(data, file, indent=4)
except Exception as e:
print(f"Error updating .platform in SemanticModel: {e}")
print(" Folder extraction, renaming, and file updates completed successfully.")
return paths["semantic_folder_new"]
def create_model_bim(tables, output_folder):
print(" Creating model.bim...")
start_time = time.time()
model_bim = copy.deepcopy(model_template)
query_order = [tbl["name"] for tbl in tables]
model_bim["model"]["annotations"][0]["value"] = str(query_order)
updated_tables = []
for tbl in tables:
table_name = tbl["name"]
partition = {
"name": f"{table_name}_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": (
f"let\n"
f" Source = Sql.Database(\"server_name\", \"database_name\")\n"
f"in\n"
f" Source{{[Schema=\"dbo\", Item=\"{table_name}\"]}}[Data]"
)
}
}
# Add sourceColumn to each column
for col in tbl["columns"]:
col["sourceColumn"] = col["name"]
tbl["partitions"] = [partition]
updated_tables.append(tbl)
model_bim["model"]["tables"] = updated_tables
bim_path = os.path.join(output_folder, "model.bim")
with open(bim_path, "w", encoding="utf-8") as file:
json.dump(model_bim, file, indent=4)
print(f" model.bim created at: {bim_path} in {time.time() - start_time:.2f} seconds.")
def create_visual(visual_type, rows, columns, title, x, y):
visual_id = str(uuid.uuid4())
projections = {}
if columns:
projections["Y"] = [{"queryRef": f"Sum({columns[0][0]}.{columns[0][1]})"}]
if rows:
projections["Category"] = [{"queryRef": f"{rows[0][0]}.{rows[0][1]}", "active": True}]
config = {
"name": visual_id,
"layouts": [
{
"id": 0,
"position": {
"x": x,
"y": y,
"z": 0,
"width": 400.0,
"height": 300.0,
"tabOrder": 0
}
}
],
"singleVisual": {
"visualType": visual_type.lower().replace(" ", ""),
"projections": projections,
"prototypeQuery": {},
"drillFilterOtherVisuals": True,
"hasDefaultSort": True,
"objects": {},
"vcObjects": {
"title": [
{
"properties": {
"text": {
"expr": {
"Literal": {"Value": f"'{title}'"}
}
}
}
}
]
}
}
}
return config
def create_dynamic_report_json(report_folder_path, metadata_path):
print(" Generating report.json from metadata...")
start_time = time.time()
df_types = pd.read_excel(metadata_path, sheet_name="chart_types")
df_axes = pd.read_excel(metadata_path, sheet_name="chart_axes")
visuals = []
x, y = 100.0, 100.0
visuals_per_page = 3
visual_index = 0
pages = []
for _, row in df_types.iterrows():
worksheet = row['worksheet']
plot_type = row['plot_type']
filtered_axes = df_axes[df_axes['worksheet'] == worksheet]
# Gather row and column fields
rows = filtered_axes[filtered_axes['type'].str.lower() == 'rows']
cols = filtered_axes[filtered_axes['type'].str.lower() == 'cols']
row_fields = list(rows[['table_name', 'column']].dropna().itertuples(index=False, name=None))
col_fields = list(cols[['table_name', 'column']].dropna().itertuples(index=False, name=None))
# Skip visual if missing axis info
if not row_fields or not col_fields:
print(f" Skipping visual '{worksheet}' due to missing row/column axis data.")
continue
config = create_visual(plot_type, row_fields, col_fields, worksheet, x, y)
visuals.append({
"config": json.dumps(config),
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": x,
"y": y,
"z": 0.0
})
visual_index += 1
if visual_index % visuals_per_page == 0:
x, y = 100.0, 100.0
else:
x += 420.0
for i in range(0, len(visuals), visuals_per_page):
section_id = str(uuid.uuid4())
pages.append({
"config": "{}",
"displayName": f"Page {i//visuals_per_page + 1}",
"displayOption": 1,
"filters": "[]",
"height": 720.0,
"name": section_id,
"visualContainers": visuals[i:i+visuals_per_page],
"width": 1280.0
})
report_json = {
"config": json.dumps(report_static_config["config"]),
"layoutOptimization": report_static_config.get("layoutOptimization", 0),
"resourcePackages": report_static_config.get("resourcePackages", []),
"sections": pages
}
output_path = os.path.join(report_folder_path, "report.json")
with open(output_path, "w", encoding="utf-8") as f:
json.dump(report_json, f, indent=4)
print(f" report.json created with {len(visuals)} visuals across {len(pages)} page(s) in {time.time() - start_time:.2f} seconds.")
# ---------- Main Execution ----------
report_name = input("Enter report name: ").strip()
# Step 1: Prepare report structure
semantic_model_path = prepare_report_structure(report_name)
# Step 2: Load Metadata.xlsx (only 'columns' sheet)
print(" Loading table column metadata...")
metadata_path = os.path.join("Files", "MetaData.xlsx")
df = pd.read_excel(metadata_path, sheet_name="columns")
# Group by table_name to create list of tables
tables = []
for table_name, group in df.groupby("table_name"):
columns = [
{
"name": row["caption"],
"dataType": map_data_type(row.get("type", row.get("data_type", "string")))
} for _, row in group.iterrows()
]
tables.append({"name": table_name, "columns": columns})
# Step 3: Create model.bim
create_model_bim(tables, semantic_model_path)
# Step 4: Create report.json
dynamic_report_folder_path = os.path.join("Extraction", f"{report_name}.Report")
create_dynamic_report_json(dynamic_report_folder_path, metadata_path)
General structure of the zip file: Includes a Report, Semantic Model and the PBIP file
This the model.bim file that is created (which is done from the script : 2 nd function : 'create_model_bim' and the data tables and columns are extracted from i.e. read from a Meta-data file which looks like):
Here I am generally focussing on three columns : table_name, caption (i.e. Colun name), type (Column type) that generates the model.bim structure (which consists of the skelton of the tables).
Here I was able to get the desired result like on opening the pbip file could see the data source and tables. Then I moved to create a single visual using the script (initially I explicitly specified that I want to create a bar chart with these columns and executed, it did the job)
def create_valid_report_json(report_folder_path):
import uuid
section_id = str(uuid.uuid4())
visual_id = str(uuid.uuid4())
# Copy base config
base_config = copy.deepcopy(report_static_config["config"])
# Build the visual configuration
visual_config = {
"name": visual_id,
"layouts": [
{
"id": 0,
"position": {
"x": 100.0,
"y": 100.0,
"z": 0,
"width": 400.0,
"height": 300.0,
"tabOrder": 0
}
}
],
"singleVisual": {
"visualType": "columnChart",
"projections": {
"Y": [{"queryRef": "Sum(Orders.Sales)"}],
"Category": [{"queryRef": "Orders.Customer Name", "active": True}]
},
"prototypeQuery": {
"Version": 2,
"From": [
{"Name": "Orders", "Entity": "Orders", "Type": 0}
],
"Select": [
{
"Aggregation": {
"Expression": {
"Column": {
"Expression": {"SourceRef": {"Source": "Orders"}},
"Property": "Sales"
}
},
"Function": 0
},
"Name": "Sum(Orders.Sales)",
"NativeReferenceName": "Sum of Sales"
},
{
"Column": {
"Expression": {"SourceRef": {"Source": "Orders"}},
"Property": "Customer Name"
},
"Name": "Orders.Customer Name",
"NativeReferenceName": "Customer Name"
}
],
"OrderBy": [
{
"Direction": 2,
"Expression": {
"Aggregation": {
"Expression": {
"Column": {
"Expression": {"SourceRef": {"Source": "Orders"}},
"Property": "Sales"
}
},
"Function": 0
}
}
}
]
},
"drillFilterOtherVisuals": True,
"hasDefaultSort": True,
"objects": {},
"vcObjects": {
"title": [
{
"properties": {
"text": {
"expr": {
"Literal": {"Value": "'Sales by Customer'"}
}
}
}
}
]
}
}
}
report_json = {
"config": json.dumps(base_config),
"layoutOptimization": report_static_config.get("layoutOptimization", 0),
"resourcePackages": report_static_config.get("resourcePackages", []),
"sections": [
{
"config": "{}",
"displayName": "Page 1",
"displayOption": 1,
"filters": "[]",
"height": 720.0,
"name": section_id,
"visualContainers": [
{
"config": json.dumps(visual_config),
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 100.0,
"y": 100.0,
"z": 0.0
}
],
"width": 1280.0
}
]
}
output_path = os.path.join(report_folder_path, "report.json")
with open(output_path, "w", encoding="utf-8") as f:
json.dump(report_json, f, indent=4)
print(f" report.json created at: {output_path}")
So now I thought to progress of doing this dynamically though the metada file (wherein I majorly focussing on two sheets one is chart_axes : That tells which columns are used as rows and which are as column (in the sheet it is denoted as cols), what are tool tips; and the other is chart_types which denotes what are the chart types we have). So basically I have just modified the above function (which you can see in the main script that I shared ata start to read from the meta-data file check the chart_types {source and source_reporting_system are common so that is ignored}
chart_types and chart_axes (sheet in Metdata)
So initially in the code if there was no relevant information found for a chart in chart axes I thought of creating a blank visual and continue but when I tried opening the pbip file it gave an fatal error: Failed to open report so I thought it might be due to the blank visuals so I decided that I will skip the chart if there is no information of axes found in chart axes for the respective chart and try (which is what I have done my main script last function) still I am getting the same issue:
So if anyone has any inputs or suggestion on how to debug this please do share the same and if you need any more, do let me know.
Regards,
Sidhant
Solved! Go to Solution.
Hi everyone,
A quick update so I am able to create reports in a programatic fashion it looks like:
def create_model_bim(tables, output_folder):
# Load a fresh copy of the model template
model_bim = copy.deepcopy(model_template)
# Replace {{query_order}} annotation
for annotation in model_bim["model"].get("annotations", []):
if annotation.get("value") == "{{query_order}}":
annotation["value"] = str([tbl["name"] for tbl in tables])
# Build table_column_map to validate relationships later
table_column_map = {}
with open("Static/data_type_mapping.json") as f:
type_mapping = json.load(f)
model_bim["model"]["tables"] = []
for table in tables:
column_names = [col["name"] for col in table["columns"]]
table_dict = {
"name": table["name"],
"columns": [
{
"name": col["name"],
"dataType": type_mapping.get(col["type"].lower(), "string"),
"sourceColumn": col["name"]
} for col in table["columns"]
],
"partitions": [
{
"name": f"{table['name']}_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": (
f"let Source = Sql.Database(\"server_name\", \"database_name\") "
f"in Source{{[Schema=\"dbo\", Item=\"{table['name']}\"]}}[Data]"
)
}
}
]
}
model_bim["model"]["tables"].append(table_dict)
# table_column_map[table["name"]] = [col["name"] for col in table["columns"]]
table_column_map[table["name"]] = column_names
# ✅ Load relationships sheet and parse valid entries
# metadata_path = os.path.join(os.path.dirname(output_folder), "Files", "MetaData.xlsx")
metadata_path = os.path.abspath(os.path.join(output_folder, "..", "..", "Files", "MetaData.xlsx"))
# relationships = extract_relationships_from_metadata(metadata_path)
relationships = extract_relationships_from_metadata(metadata_path, table_column_map)
if relationships:
model_bim["model"]["relationships"] = relationships
measures = extract_measures_from_metadata(metadata_path)
if measures:
measures_table = {
"name": "__Measures",
"columns": [
{
"name": "Dummy",
"dataType": "string"
}
],
"measures": measures,
"partitions": [
{
"name": "__Measures_Partition",
"mode": "import",
"source":
{
"type": "m",
"expression": "let Source = #table({\"Dummy\"}, {}) in Source"
}
}
]
}
model_bim["model"]["tables"].append(measures_table)
# Write model.bim to file
bim_path = os.path.join(output_folder, "model.bim")
with open(bim_path, "w", encoding="utf-8") as file:
json.dump(model_bim, file, indent=4)
print(f"✅ model.bim created at: {bim_path}")
#-- Report.json file creation:
def create_valid_report_json(report_folder_path, chart_types_df, chart_axes_df):
base_config = copy.deepcopy(report_static_config["config"])
def create_visual_config(visual_type, visual_index, axes):
visual_id = str(uuid.uuid4())
layout = {
"id": 0,
"position": {
"x": 100.0 + (visual_index % 2) * 450.0,
"y": 100.0 + (visual_index // 2) * 350.0,
"z": 0,
"width": 400.0,
"height": 300.0,
"tabOrder": 0
}
}
projections = {}
selects = []
from_tables = set()
for axis_type, axis_list in axes.items():
projections[axis_type] = []
for axis in axis_list:
table = axis['table_name']
column = axis['column']
full_ref = f"{table}.{column}"
from_tables.add(table)
if axis.get('aggregation'):
agg_func = 0 # sum
projections[axis_type].append({"queryRef": f"Sum({full_ref})"})
selects.append({
"Aggregation": {
"Expression": {
"Column": {
"Expression": {"SourceRef": {"Source": table}},
"Property": column
}
},
"Function": agg_func
},
"Name": f"Sum({full_ref})",
"NativeReferenceName": f"Sum of {column}"
})
else:
projections[axis_type].append({"queryRef": full_ref, "active": True})
selects.append({
"Column": {
"Expression": {"SourceRef": {"Source": table}},
"Property": column
},
"Name": full_ref,
"NativeReferenceName": column
})
visual_config = {
"name": visual_id,
"layouts": [layout],
"singleVisual": {
"visualType": visual_type,
"projections": projections,
"prototypeQuery": {
"Version": 2,
"From": [{"Name": t, "Entity": t, "Type": 0} for t in from_tables],
"Select": selects
},
"drillFilterOtherVisuals": True,
"hasDefaultSort": True,
"objects": {},
"vcObjects": {
"title": [
{
"properties": {
"text": {
"expr": {
"Literal": {
"Value": f"'{visual_type.title()} Visual {visual_index + 1}'"
}
}
}
}
}
]
}
}
}
return visual_config
visual_containers = []
for idx, row in chart_types_df.iterrows():
visual_type = row.get('plot_type')
worksheet = row.get('worksheet')
if not visual_type or not worksheet:
continue # Skip incomplete rows
relevant_axes = chart_axes_df[
(chart_axes_df['worksheet'] == worksheet) &
(chart_axes_df['type'].isin(['rows', 'cols'])) &
# (chart_axes_df['order_id'] == 0) &
(chart_axes_df['table_name'].notna())
]
axes_dict = {}
for axis_type in ['rows', 'cols']:
group = relevant_axes[relevant_axes['type'] == axis_type]
if not group.empty:
axes_dict['Category' if axis_type == 'rows' else 'Y'] = group.apply(
lambda axis_row: {
'table_name': axis_row['table_name'],
'column': axis_row['column'],
'aggregation': str(axis_row.get('aggregation', '')).strip().lower() == 'sum'
},
axis=1
).tolist()
if axes_dict:
config = create_visual_config(visual_type, idx, axes_dict)
container = {
"config": json.dumps(config),
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 100.0 + (idx % 2) * 450.0,
"y": 100.0 + (idx // 2) * 350.0,
"z": 0.0
}
visual_containers.append(container)
report_json = {
"config": json.dumps(base_config),
"layoutOptimization": 0,
"resourcePackages": [],
"sections": [
{
"config": "{}",
"displayName": "Auto Page",
"displayOption": 1,
"filters": "[]",
"height": 720.0,
"name": str(uuid.uuid4()),
"visualContainers": visual_containers,
"width": 1280.0
}
]
}
output_path = os.path.join(report_folder_path, "report.json")
with open(output_path, "w", encoding="utf-8") as f:
json.dump(report_json, f, indent=4)
print(f"✅ report.json with {len(visual_containers)} visuals written at: {output_path}")
Hi @v-dineshya ,
Just to explain you in short I am exploring can we create entire report programatically, so for that I understood we can (using the pbip file extension) so I had created a kind of boiler zip package which can be used (so I have shared the main code in my post you can check that out) so there what I am doing is from the user I am taking report name as input and then the script is divided into 3 functions:
Function-1: Unzips the zip package & renames few files and folders:
.platform:
And inside few files like (.platform, report_name.pbip) and the folders where there is report_name that's been replaced by the value provided by user then comes the Table schema creation (i.e. it includes table names and the respective columns and its type, relationships, measures creation all of this inferred from a MetaData.xlsx excel sheet and accordingly the file is created), some of the content which is static in these files for them I have created few templates that are being reffered to (in order to access it again and gain in main code)
Like: For report.json
Once this is done then the report.json (which contains visuals i.e. which visuals are being used is also inferred from the sheet (chart_axes), once done I usually open the pbip file
(Since I am using .bim I need to remove files with .tmdl extension which is present inside 'definition folder'-> cultures (en-US.tmdl)
And now I need the skeleton to be present that is the visuals and the schema (there won't be any data but as soon as I connect the appropriate DB based on the respective columns the visual will show the data), the issue (I think the report.json file is not been created propely due to whih the visuals are not been rendered properly).
So for now in report.json : I have explicitly specified two visuals (what should be the rows, columsn and the visual type) so that's been created but this needs to be dynamic.
Do you need any more inputs.
Regards,
Sidhant.
Hi @Sidhant ,
If my response has resolved your query, please mark it as the Accepted Solution to assist others. Additionally, a 'Kudos' would be appreciated if you found my response helpful.
Thank you
For referene I am even sharing the (model.bim file and report.json {could not share in the main post as the character limit was execeeded})
// model.bim
{
"compatibilityLevel": 1550,
"model": {
"annotations": [
{
"name": "PBI_QueryOrder",
"value": "['Orders', 'People', 'Returns', 'Sales Commission.csv', 'Sales Target']"
},
{
"name": "__PBI_TimeIntelligenceEnabled",
"value": "1"
},
{
"name": "PBIDesktopVersion",
"value": "2.128.1380.0 (24.04)"
},
{
"name": "PBI_ProTooling",
"value": "[\"DevMode\"]"
}
],
"culture": "en-US",
"cultures": [
{
"name": "en-US",
"linguisticMetadata": {
"content": {
"Language": "en-US",
"Version": "1.0.0"
},
"contentType": "json"
}
}
],
"defaultPowerBIDataSourceVersion": "powerBI_V3",
"tables": [
{
"name": "Orders",
"columns": [
{
"name": "Row ID",
"dataType": "int64",
"sourceColumn": "Row ID"
},
{
"name": "Order ID",
"dataType": "string",
"sourceColumn": "Order ID"
},
{
"name": "Order Date",
"dataType": "dateTime",
"sourceColumn": "Order Date"
},
{
"name": "Ship Date",
"dataType": "dateTime",
"sourceColumn": "Ship Date"
},
{
"name": "Ship Mode",
"dataType": "string",
"sourceColumn": "Ship Mode"
},
{
"name": "Customer ID",
"dataType": "string",
"sourceColumn": "Customer ID"
},
{
"name": "Customer Name",
"dataType": "string",
"sourceColumn": "Customer Name"
},
{
"name": "Segment",
"dataType": "string",
"sourceColumn": "Segment"
},
{
"name": "Country/Region",
"dataType": "string",
"sourceColumn": "Country/Region"
},
{
"name": "City",
"dataType": "string",
"sourceColumn": "City"
},
{
"name": "State/Province",
"dataType": "string",
"sourceColumn": "State/Province"
},
{
"name": "Postal Code",
"dataType": "string",
"sourceColumn": "Postal Code"
},
{
"name": "Region",
"dataType": "string",
"sourceColumn": "Region"
},
{
"name": "Product ID",
"dataType": "string",
"sourceColumn": "Product ID"
},
{
"name": "Category",
"dataType": "string",
"sourceColumn": "Category"
},
{
"name": "Sub-Category",
"dataType": "string",
"sourceColumn": "Sub-Category"
},
{
"name": "Product Name",
"dataType": "string",
"sourceColumn": "Product Name"
},
{
"name": "Sales",
"dataType": "string",
"sourceColumn": "Sales"
},
{
"name": "Quantity",
"dataType": "int64",
"sourceColumn": "Quantity"
},
{
"name": "Discount",
"dataType": "string",
"sourceColumn": "Discount"
},
{
"name": "Profit",
"dataType": "string",
"sourceColumn": "Profit"
}
],
"partitions": [
{
"name": "Orders_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": "let\n Source = Sql.Database(\"server_name\", \"database_name\")\nin\n Source{[Schema=\"dbo\", Item=\"Orders\"]}[Data]"
}
}
]
},
{
"name": "People",
"columns": [
{
"name": "Regional Manager",
"dataType": "string",
"sourceColumn": "Regional Manager"
},
{
"name": "Region (People)",
"dataType": "string",
"sourceColumn": "Region (People)"
}
],
"partitions": [
{
"name": "People_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": "let\n Source = Sql.Database(\"server_name\", \"database_name\")\nin\n Source{[Schema=\"dbo\", Item=\"People\"]}[Data]"
}
}
]
},
{
"name": "Returns",
"columns": [
{
"name": "Returned",
"dataType": "string",
"sourceColumn": "Returned"
},
{
"name": "Order ID (Returns)",
"dataType": "string",
"sourceColumn": "Order ID (Returns)"
}
],
"partitions": [
{
"name": "Returns_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": "let\n Source = Sql.Database(\"server_name\", \"database_name\")\nin\n Source{[Schema=\"dbo\", Item=\"Returns\"]}[Data]"
}
}
]
},
{
"name": "Sales Commission.csv",
"columns": [
{
"name": "Indian - Region",
"dataType": "string",
"sourceColumn": "Indian - Region"
},
{
"name": "Indian Sales Person",
"dataType": "string",
"sourceColumn": "Indian Sales Person"
},
{
"name": "Order Date",
"dataType": "dateTime",
"sourceColumn": "Order Date"
},
{
"name": "Sales",
"dataType": "int64",
"sourceColumn": "Sales"
}
],
"partitions": [
{
"name": "Sales Commission.csv_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": "let\n Source = Sql.Database(\"server_name\", \"database_name\")\nin\n Source{[Schema=\"dbo\", Item=\"Sales Commission.csv\"]}[Data]"
}
}
]
},
{
"name": "Sales Target",
"columns": [
{
"name": "Category",
"dataType": "string",
"sourceColumn": "Category"
},
{
"name": "Order Date",
"dataType": "dateTime",
"sourceColumn": "Order Date"
},
{
"name": "Segment",
"dataType": "string",
"sourceColumn": "Segment"
},
{
"name": "Sales Target (Sales Target)",
"dataType": "int64",
"sourceColumn": "Sales Target (Sales Target)"
}
],
"partitions": [
{
"name": "Sales Target_Partition",
"mode": "import",
"source": {
"type": "m",
"expression": "let\n Source = Sql.Database(\"server_name\", \"database_name\")\nin\n Source{[Schema=\"dbo\", Item=\"Sales Target\"]}[Data]"
}
}
]
}
]
}
}
//report.json
{
"config": "{\"version\": \"5.59\", \"themeCollection\": {\"baseTheme\": {\"name\": \"CY24SU10\", \"version\": \"5.61\", \"type\": 2}}, \"activeSectionIndex\": 0, \"defaultDrillFilterOtherVisuals\": true, \"linguisticSchemaSyncVersion\": 2, \"settings\": {\"useNewFilterPaneExperience\": true, \"allowChangeFilterTypes\": true, \"useStylableVisualContainerHeader\": true, \"queryLimitOption\": 6, \"exportDataMode\": 1, \"useDefaultAggregateDisplayName\": true, \"useEnhancedTooltips\": true}, \"objects\": {\"section\": [{\"properties\": {\"verticalAlignment\": {\"expr\": {\"Literal\": {\"Value\": \"'Top'\"}}}}}]}}",
"layoutOptimization": 0,
"resourcePackages": [
{
"resourcePackage": {
"disabled": false,
"items": [
{
"name": "CY24SU10",
"path": "BaseThemes/CY24SU10.json",
"type": 202
}
],
"name": "SharedResources",
"type": 2
}
}
],
"sections": [
{
"config": "{}",
"displayName": "Page 1",
"displayOption": 1,
"filters": "[]",
"height": 720.0,
"name": "129c05b3-0727-4949-a6a7-1cf20c410f6b",
"visualContainers": [
{
"config": "{\"name\": \"47ac643a-6bbd-4ee2-804c-2e3a74c008ac\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 100.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"barchart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.ctd:Customer Name)\"}], \"Category\": [{\"queryRef\": \"Orders.Region\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'CustomerOverview'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 100.0,
"y": 100.0,
"z": 0.0
},
{
"config": "{\"name\": \"e19cc948-f79d-4a63-b46a-9186694241e8\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 520.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"barchart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.sum:Sales)\"}], \"Category\": [{\"queryRef\": \"Orders.Customer Name\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'CustomerRank'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 520.0,
"y": 100.0,
"z": 0.0
},
{
"config": "{\"name\": \"3f715633-d011-4aa2-b58b-9a83a3c69fa7\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 940.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"linechart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.tmn:Order Date)\"}], \"Category\": [{\"queryRef\": \"Orders.Segment\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'Forecast'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 940.0,
"y": 100.0,
"z": 0.0
}
],
"width": 1280.0
},
{
"config": "{}",
"displayName": "Page 2",
"displayOption": 1,
"filters": "[]",
"height": 720.0,
"name": "b0e1ffae-8fe3-45d3-a7e4-e0c721c7485b",
"visualContainers": [
{
"config": "{\"name\": \"662b389d-64e7-4a84-bafe-8e44c6fe765a\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 100.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"circlechart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.Segment)\"}], \"Category\": [{\"queryRef\": \"Orders.Category\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'ProductDetails'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 100.0,
"y": 100.0,
"z": 0.0
},
{
"config": "{\"name\": \"1d2f7c56-4f41-4868-ad14-47e4f2a4e109\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 520.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"areachart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.tmn:Order Date)\"}], \"Category\": [{\"queryRef\": \"Orders.Category\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'Sales by Product'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 520.0,
"y": 100.0,
"z": 0.0
},
{
"config": "{\"name\": \"235549be-befa-45fa-a6e0-02eedee36c27\", \"layouts\": [{\"id\": 0, \"position\": {\"x\": 940.0, \"y\": 100.0, \"z\": 0, \"width\": 400.0, \"height\": 300.0, \"tabOrder\": 0}}], \"singleVisual\": {\"visualType\": \"areachart\", \"projections\": {\"Y\": [{\"queryRef\": \"Sum(Orders.tmn:Order Date)\"}], \"Category\": [{\"queryRef\": \"Orders.Segment\", \"active\": true}]}, \"prototypeQuery\": {}, \"drillFilterOtherVisuals\": true, \"hasDefaultSort\": true, \"objects\": {}, \"vcObjects\": {\"title\": [{\"properties\": {\"text\": {\"expr\": {\"Literal\": {\"Value\": \"'Sales by Segment'\"}}}}}]}}}",
"filters": "[]",
"height": 300.0,
"width": 400.0,
"x": 940.0,
"y": 100.0,
"z": 0.0
}
],
"width": 1280.0
}
]
}
Join the Fabric FabCon Global Hackathon—running virtually through Nov 3. Open to all skill levels. $10,000 in prizes!
Check out the October 2025 Power BI update to learn about new features.