Back to Blog
6 min read

Edge Computing with Azure Stack Edge

Azure Stack Edge brings Azure compute, storage, and AI capabilities to edge locations. It enables scenarios where data needs to be processed locally due to latency, bandwidth, or regulatory requirements, while maintaining cloud connectivity for management and analytics.

Azure Stack Edge Devices

ModelGPUUse Case
Pro (GPU)NVIDIA T4AI inference, video analytics
Pro (FPGA)Intel Arria 10Hardware acceleration
Pro RRuggedHarsh environments
Mini RPortableMobile edge

Architecture Overview

+-------------------+      +------------------+      +---------------+
|   Local Apps      | ---> |  Azure Stack     | ---> |   Azure       |
|   IoT Devices     |      |  Edge Device     |      |   Cloud       |
|   Sensors         |      +------------------+      +---------------+
+-------------------+      |  - Kubernetes    |      |  - IoT Hub    |
                           |  - VM Workloads  |      |  - Storage    |
                           |  - AI Inference  |      |  - ML Service |
                           |  - Local Storage |      |  - Analytics  |
                           +------------------+      +---------------+

Managing Azure Stack Edge

from azure.mgmt.databoxedge import DataBoxEdgeManagementClient
from azure.mgmt.databoxedge.models import (
    DataBoxEdgeDevice,
    Sku,
    Share,
    StorageAccountCredential,
    User,
    Role,
    IoTRole,
    AsymmetricEncryptedSecret
)
from azure.identity import DefaultAzureCredential

class StackEdgeManager:
    def __init__(self, subscription_id: str, resource_group: str):
        self.credential = DefaultAzureCredential()
        self.client = DataBoxEdgeManagementClient(self.credential, subscription_id)
        self.resource_group = resource_group

    def create_device(self, device_name: str, location: str,
                     sku: str = "Edge") -> DataBoxEdgeDevice:
        """Create an Azure Stack Edge device resource."""

        device = DataBoxEdgeDevice(
            location=location,
            sku=Sku(name=sku)
        )

        return self.client.devices.begin_create_or_update(
            device_name,
            self.resource_group,
            device
        ).result()

    def get_activation_key(self, device_name: str) -> str:
        """Get activation key for device setup."""

        key = self.client.devices.generate_activation_key(
            device_name,
            self.resource_group
        )
        return key.activation_key

    def create_storage_account_credential(self, device_name: str,
                                         name: str,
                                         storage_account_name: str,
                                         storage_account_key: str):
        """Create storage account credential for data sync."""

        # Encrypt the storage account key
        encrypted_key = AsymmetricEncryptedSecret(
            value=storage_account_key,
            encryption_cert_thumbprint="...",
            encryption_algorithm="RSA1_5"
        )

        credential = StorageAccountCredential(
            alias=name,
            storage_account_name=storage_account_name,
            storage_account_type="GeneralPurposeStorage",
            account_key=encrypted_key,
            ssl_status="Enabled"
        )

        return self.client.storage_account_credentials.begin_create_or_update(
            device_name,
            name,
            self.resource_group,
            credential
        ).result()

    def create_share(self, device_name: str, share_name: str,
                    credential_name: str, container_name: str):
        """Create a share for data tiering to cloud."""

        share = Share(
            share_status="Online",
            monitoring_status="Enabled",
            azure_container_info={
                "storage_account_credential_id": f"/subscriptions/.../storageAccountCredentials/{credential_name}",
                "container_name": container_name,
                "data_format": "BlockBlob"
            },
            access_protocol="SMB",
            data_policy="Cloud"  # Data tiers to cloud
        )

        return self.client.shares.begin_create_or_update(
            device_name,
            share_name,
            self.resource_group,
            share
        ).result()

# Usage
edge_manager = StackEdgeManager("subscription-id", "edge-rg")

# Create device resource
device = edge_manager.create_device("factory-edge-01", "westus")

# Get activation key
activation_key = edge_manager.get_activation_key("factory-edge-01")
print(f"Activate device with: {activation_key}")

Deploying Kubernetes Workloads

from azure.mgmt.databoxedge.models import (
    KubernetesRole,
    KubernetesRoleCompute,
    KubernetesRoleStorage,
    KubernetesRoleNetwork,
    MountPointMap,
    LoadBalancerConfig
)

def configure_kubernetes(edge_manager: StackEdgeManager, device_name: str):
    """Configure Kubernetes on Azure Stack Edge."""

    kubernetes_role = KubernetesRole(
        host_platform="Linux",
        host_platform_type="KubernetesCluster",
        kubernetes_cluster_info={
            "version": "1.21"
        },
        kubernetes_role_resources={
            "compute": KubernetesRoleCompute(
                vm_profile="DS3_v2"
            ),
            "storage": KubernetesRoleStorage(
                endpoints=[
                    {"storage_account_name": "edgestorage", "container_name": "data"}
                ]
            ),
            "network": KubernetesRoleNetwork(
                load_balancer_config=LoadBalancerConfig(
                    type="MetalLB",
                    ip_ranges=[
                        {"start": "10.0.0.100", "end": "10.0.0.110"}
                    ]
                )
            )
        }
    )

    return edge_manager.client.roles.begin_create_or_update(
        device_name,
        "kubernetes-role",
        edge_manager.resource_group,
        kubernetes_role
    ).result()

Deploying IoT Edge Modules

from azure.mgmt.databoxedge.models import (
    IoTRole,
    IoTDeviceInfo,
    MountPointMap
)

def configure_iot_edge(edge_manager: StackEdgeManager, device_name: str,
                       iot_hub_resource_id: str):
    """Configure IoT Edge on Azure Stack Edge."""

    iot_role = IoTRole(
        host_platform="Linux",
        iot_device_details=IoTDeviceInfo(
            device_id=f"{device_name}-iot",
            iot_host_hub=iot_hub_resource_id,
            authentication={
                "symmetric_key": {
                    "connection_string": "HostName=..."
                }
            }
        ),
        iot_edge_device_details=IoTDeviceInfo(
            device_id=f"{device_name}-edge",
            iot_host_hub=iot_hub_resource_id
        ),
        share_mappings=[
            MountPointMap(
                share_id=f"/subscriptions/.../shares/data-share",
                role_type="IoT",
                mount_point="/data"
            )
        ]
    )

    return edge_manager.client.roles.begin_create_or_update(
        device_name,
        "iot-role",
        edge_manager.resource_group,
        iot_role
    ).result()

Running AI Workloads

# Kubernetes deployment for AI inference
ai_deployment_yaml = """
apiVersion: apps/v1
kind: Deployment
metadata:
  name: ai-inference
  namespace: default
spec:
  replicas: 1
  selector:
    matchLabels:
      app: ai-inference
  template:
    metadata:
      labels:
        app: ai-inference
    spec:
      containers:
      - name: inference
        image: myregistry.azurecr.io/inference:latest
        ports:
        - containerPort: 8080
        resources:
          limits:
            nvidia.com/gpu: 1
        volumeMounts:
        - name: data-volume
          mountPath: /data
      volumes:
      - name: data-volume
        persistentVolumeClaim:
          claimName: edge-pvc
---
apiVersion: v1
kind: Service
metadata:
  name: ai-inference-service
spec:
  type: LoadBalancer
  ports:
  - port: 80
    targetPort: 8080
  selector:
    app: ai-inference
"""

# PVC for edge storage
edge_pvc_yaml = """
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
  name: edge-pvc
spec:
  accessModes:
    - ReadWriteMany
  storageClassName: ase-default
  resources:
    requests:
      storage: 100Gi
"""

Local Data Processing

# edge_processor.py - Runs on Azure Stack Edge
import os
import json
from datetime import datetime
from azure.storage.blob import BlobServiceClient
from azure.iot.device import IoTHubModuleClient

class EdgeDataProcessor:
    def __init__(self):
        # Local storage path (mounted share)
        self.local_path = "/data/raw"
        self.processed_path = "/data/processed"

        # IoT Edge module client
        self.module_client = IoTHubModuleClient.create_from_edge_environment()

        # Azure Storage for cloud sync
        self.blob_client = BlobServiceClient.from_connection_string(
            os.environ.get("STORAGE_CONNECTION_STRING")
        )

    def process_local_data(self):
        """Process data stored locally on edge device."""

        for filename in os.listdir(self.local_path):
            filepath = os.path.join(self.local_path, filename)

            # Process the file
            processed_data = self._process_file(filepath)

            # Save locally
            output_path = os.path.join(self.processed_path, f"processed_{filename}")
            with open(output_path, 'w') as f:
                json.dump(processed_data, f)

            # Send summary to cloud via IoT Hub
            self._send_to_cloud(processed_data)

            # Move raw file to archive
            os.rename(filepath, f"/data/archive/{filename}")

    def _process_file(self, filepath: str) -> dict:
        """Process a single data file."""
        with open(filepath, 'r') as f:
            raw_data = json.load(f)

        # Perform local processing (filtering, aggregation, etc.)
        processed = {
            "timestamp": datetime.utcnow().isoformat(),
            "source_file": filepath,
            "record_count": len(raw_data),
            "summary": self._compute_summary(raw_data)
        }

        return processed

    def _compute_summary(self, data: list) -> dict:
        """Compute summary statistics."""
        if not data:
            return {}

        values = [d.get('value', 0) for d in data]
        return {
            "min": min(values),
            "max": max(values),
            "avg": sum(values) / len(values),
            "count": len(values)
        }

    def _send_to_cloud(self, data: dict):
        """Send processed data summary to cloud via IoT Hub."""
        message = json.dumps(data)
        self.module_client.send_message_to_output(message, "processed_output")

# Run processor
processor = EdgeDataProcessor()
processor.process_local_data()

Monitoring Edge Devices

from azure.mgmt.databoxedge import DataBoxEdgeManagementClient

def get_device_metrics(edge_manager: StackEdgeManager, device_name: str) -> dict:
    """Get device metrics and status."""

    # Get device status
    device = edge_manager.client.devices.get(
        device_name,
        edge_manager.resource_group
    )

    # Get alerts
    alerts = edge_manager.client.alerts.list_by_data_box_edge_device(
        device_name,
        edge_manager.resource_group
    )

    # Get storage usage
    storage_accounts = edge_manager.client.storage_accounts.list_by_data_box_edge_device(
        device_name,
        edge_manager.resource_group
    )

    metrics = {
        "device_status": device.status,
        "device_type": device.sku.name,
        "configuration_status": device.configuration_status,
        "alerts": [
            {"title": a.title, "severity": a.severity}
            for a in alerts
        ],
        "storage": [
            {"name": s.name, "status": s.storage_account_status}
            for s in storage_accounts
        ]
    }

    return metrics

def generate_edge_report(metrics: dict) -> str:
    """Generate edge device report."""

    report = []
    report.append("=== Azure Stack Edge Status Report ===\n")
    report.append(f"Device Status: {metrics['device_status']}")
    report.append(f"Device Type: {metrics['device_type']}")
    report.append(f"Configuration: {metrics['configuration_status']}")

    if metrics['alerts']:
        report.append("\nAlerts:")
        for alert in metrics['alerts']:
            report.append(f"  [{alert['severity']}] {alert['title']}")

    if metrics['storage']:
        report.append("\nStorage Accounts:")
        for storage in metrics['storage']:
            report.append(f"  {storage['name']}: {storage['status']}")

    return "\n".join(report)

Best Practices

  1. Plan Capacity: Size device based on workload requirements
  2. Enable Data Tiering: Configure automatic cloud sync
  3. Implement Redundancy: Use multiple devices for critical workloads
  4. Monitor Continuously: Set up alerts for device health
  5. Update Regularly: Keep device software up to date
  6. Secure Access: Use RBAC and network segmentation

Azure Stack Edge extends Azure to the edge, enabling scenarios from manufacturing analytics to retail intelligence with consistent Azure services and management.

Michael John Pena

Michael John Pena

Senior Data Engineer based in Sydney. Writing about data, cloud, and technology.