Back to Blog
3 min read

Microsoft Fabric April 2024 Updates: What's New

Microsoft Fabric April 2024 Updates: What’s New

Microsoft Fabric continues to evolve as a unified analytics platform. April 2024 brings significant updates across data engineering, science, and business intelligence capabilities.

Key Updates Overview

APRIL_2024_UPDATES = {
    "copilot_improvements": [
        "Enhanced natural language to SQL",
        "Better context awareness",
        "Multi-turn conversations"
    ],
    "data_engineering": [
        "Improved Spark performance",
        "New connectors",
        "Better Delta Lake support"
    ],
    "data_science": [
        "MLflow integration enhancements",
        "AutoML improvements",
        "New experiment tracking"
    ],
    "power_bi": [
        "Copilot in reports",
        "Smart narratives",
        "Improved Q&A"
    ]
}

Accessing Fabric Programmatically

from azure.identity import DefaultAzureCredential
import requests

class FabricClient:
    """Client for Microsoft Fabric REST APIs"""

    def __init__(self, workspace_id: str):
        self.workspace_id = workspace_id
        self.credential = DefaultAzureCredential()
        self.base_url = "https://api.fabric.microsoft.com/v1"

    def _get_token(self) -> str:
        token = self.credential.get_token(
            "https://api.fabric.microsoft.com/.default"
        )
        return token.token

    def _headers(self) -> dict:
        return {
            "Authorization": f"Bearer {self._get_token()}",
            "Content-Type": "application/json"
        }

    def list_items(self, item_type: str = None) -> list:
        """List items in workspace"""
        url = f"{self.base_url}/workspaces/{self.workspace_id}/items"
        if item_type:
            url += f"?type={item_type}"

        response = requests.get(url, headers=self._headers())
        return response.json().get("value", [])

    def get_item(self, item_id: str) -> dict:
        """Get item details"""
        url = f"{self.base_url}/workspaces/{self.workspace_id}/items/{item_id}"
        response = requests.get(url, headers=self._headers())
        return response.json()

# Usage
client = FabricClient("your-workspace-id")
items = client.list_items("Lakehouse")
for item in items:
    print(f"Name: {item['displayName']}, Type: {item['type']}")

New Lakehouse Features

from pyspark.sql import SparkSession

# Enhanced Delta Lake support in Fabric
spark = SparkSession.builder.getOrCreate()

# New: Liquid clustering for better performance
spark.sql("""
    CREATE TABLE sales_data
    USING DELTA
    CLUSTER BY (region, product_category)
    AS SELECT * FROM staging_sales
""")

# New: Improved OPTIMIZE with predictive optimization
spark.sql("""
    OPTIMIZE sales_data
    WHERE date >= current_date() - INTERVAL 7 DAYS
""")

# New: Enhanced Z-ordering
spark.sql("""
    OPTIMIZE sales_data
    ZORDER BY (customer_id, order_date)
""")

Data Pipeline Improvements

# New pipeline activities and improved orchestration

pipeline_definition = {
    "name": "daily_etl_pipeline",
    "activities": [
        {
            "name": "ingest_data",
            "type": "Copy",
            "inputs": [{
                "referenceName": "source_dataset",
                "type": "DatasetReference"
            }],
            "outputs": [{
                "referenceName": "lakehouse_table",
                "type": "DatasetReference"
            }],
            "typeProperties": {
                "source": {
                    "type": "AzureBlobSource",
                    "recursive": True
                },
                "sink": {
                    "type": "LakehouseSink",
                    "tableType": "Delta"
                },
                # New: Built-in data quality checks
                "dataQualityRules": {
                    "columnCompleteness": {
                        "columns": ["id", "timestamp"],
                        "threshold": 0.99
                    }
                }
            }
        },
        {
            "name": "transform_data",
            "type": "Notebook",
            "dependsOn": [
                {
                    "activity": "ingest_data",
                    "dependencyConditions": ["Succeeded"]
                }
            ],
            "typeProperties": {
                "notebookPath": "/notebooks/transform_sales",
                "parameters": {
                    "date": {
                        "value": "@pipeline().TriggerTime",
                        "type": "Expression"
                    }
                }
            }
        }
    ]
}

Semantic Model Updates

# Working with semantic models using Semantic Link (sempy)
import sempy.fabric as fabric

# List semantic models in the workspace
datasets = fabric.list_datasets()
for ds in datasets:
    print(f"Dataset: {ds['Name']}, ID: {ds['Id']}")

# Read data from a semantic model using DAX
df = fabric.evaluate_dax(
    dataset="SalesModel",
    dax_string="""
    EVALUATE
    SUMMARIZECOLUMNS(
        Sales[ProductId],
        "Total Sales", SUM(Sales[Amount]),
        "YTD Sales", TOTALYTD(SUM(Sales[Amount]), 'Calendar'[Date])
    )
    """
)

# For creating semantic models programmatically, use the Fabric REST API
from azure.identity import DefaultAzureCredential
import requests

credential = DefaultAzureCredential()
token = credential.get_token("https://api.fabric.microsoft.com/.default").token

headers = {
    "Authorization": f"Bearer {token}",
    "Content-Type": "application/json"
}

workspace_id = "your-workspace-id"
base_url = f"https://api.fabric.microsoft.com/v1/workspaces/{workspace_id}"

# Create a semantic model item
model_payload = {
    "displayName": "SalesModel",
    "type": "SemanticModel",
    "description": "Sales semantic model with measures"
}

response = requests.post(f"{base_url}/items", headers=headers, json=model_payload)
print(f"Created semantic model: {response.json()}")

What’s Coming Next

  • Enhanced Copilot capabilities across all workloads
  • Better integration between Data Engineering and Data Science
  • New real-time analytics features
  • Improved governance and compliance tools

Conclusion

Microsoft Fabric’s April 2024 updates strengthen its position as a unified analytics platform. The improvements in Copilot, data engineering, and semantic models make it easier to build end-to-end analytics solutions.

Michael John Peña

Michael John Peña

Senior Data Engineer based in Sydney. Writing about data, cloud, and technology.