Azure Databricks · Capability

Azure Databricks Data Engineering

Manage Azure Databricks clusters, jobs, and workspace objects for data engineering workflows. Used by data engineers and platform administrators.

Run with Naftiko AzureDatabricksData EngineeringApache Spark

What You Can Do

GET
List clusters — List all clusters
/v1/clusters
POST
Create cluster — Create a new cluster
/v1/clusters
GET
List jobs — List all jobs
/v1/jobs
POST
Create job — Create a new job
/v1/jobs
GET
List workspace objects — List workspace objects
/v1/workspace

MCP Tools

create-cluster

Create a new Databricks cluster

list-clusters

List all Databricks clusters

read-only
get-cluster

Get details of a specific cluster

read-only
edit-cluster

Edit cluster configuration

idempotent
start-cluster

Start a terminated cluster

restart-cluster

Restart a running cluster

terminate-cluster

Terminate a running cluster

delete-cluster

Permanently delete a cluster

idempotent
list-spark-versions

List available Spark runtime versions

read-only
list-node-types

List available node types

read-only
create-job

Create a new Databricks job

list-jobs

List all Databricks jobs

read-only
get-job

Get job details

read-only
update-job

Partially update job settings

delete-job

Delete a job

idempotent
run-job-now

Trigger a one-time job run

list-job-runs

List job runs

read-only
get-job-run

Get details of a specific job run

read-only
cancel-job-run

Cancel a running job

get-job-run-output

Get the output of a completed job run

read-only
list-workspace-objects

List workspace objects in a directory

read-only
get-workspace-object-status

Get status of a workspace object

read-only
create-workspace-directory

Create a directory in the workspace

delete-workspace-object

Delete a workspace object

idempotent
import-workspace-object

Import a notebook or file into the workspace

export-workspace-object

Export a notebook or file from the workspace

read-only

APIs Used

databricks

Capability Spec

data-engineering.yaml Raw ↑
naftiko: "1.0.0-alpha1"

info:
  label: "Azure Databricks Data Engineering"
  description: "Manage Azure Databricks clusters, jobs, and workspace objects for data engineering workflows. Used by data engineers and platform administrators."
  tags:
    - Azure
    - Databricks
    - Data Engineering
    - Apache Spark
  created: "2026-04-18"
  modified: "2026-04-18"

binds:
  - namespace: env
    keys:
      DATABRICKS_TOKEN: DATABRICKS_TOKEN
      DATABRICKS_HOST: DATABRICKS_HOST

capability:
  consumes:
    - import: databricks
      location: ./shared/databricks.yaml

  exposes:
    - type: rest
      port: 8080
      namespace: databricks-engineering-api
      description: "Unified REST API for Azure Databricks data engineering."
      resources:
        - path: /v1/clusters
          name: clusters
          description: "Manage Databricks clusters"
          operations:
            - method: GET
              name: list-clusters
              description: "List all clusters"
              call: "databricks.list-clusters"
              outputParameters:
                - type: object
                  mapping: "$."
            - method: POST
              name: create-cluster
              description: "Create a new cluster"
              call: "databricks.create-cluster"
              outputParameters:
                - type: object
                  mapping: "$."
        - path: /v1/jobs
          name: jobs
          description: "Manage Databricks jobs"
          operations:
            - method: GET
              name: list-jobs
              description: "List all jobs"
              call: "databricks.list-jobs"
              outputParameters:
                - type: object
                  mapping: "$."
            - method: POST
              name: create-job
              description: "Create a new job"
              call: "databricks.create-job"
              outputParameters:
                - type: object
                  mapping: "$."
        - path: /v1/workspace
          name: workspace
          description: "Manage workspace objects"
          operations:
            - method: GET
              name: list-workspace-objects
              description: "List workspace objects"
              call: "databricks.list-workspace-objects"
              with:
                path: "rest.path"
              outputParameters:
                - type: object
                  mapping: "$."

    - type: mcp
      port: 9090
      namespace: databricks-engineering-mcp
      transport: http
      description: "MCP server for AI-assisted Azure Databricks data engineering."
      tools:
        - name: create-cluster
          description: "Create a new Databricks cluster"
          hints:
            readOnly: false
          call: "databricks.create-cluster"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-clusters
          description: "List all Databricks clusters"
          hints:
            readOnly: true
          call: "databricks.list-clusters"
          outputParameters:
            - type: object
              mapping: "$."
        - name: get-cluster
          description: "Get details of a specific cluster"
          hints:
            readOnly: true
          call: "databricks.get-cluster"
          with:
            cluster_id: "tools.cluster_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: edit-cluster
          description: "Edit cluster configuration"
          hints:
            readOnly: false
            idempotent: true
          call: "databricks.edit-cluster"
          outputParameters:
            - type: object
              mapping: "$."
        - name: start-cluster
          description: "Start a terminated cluster"
          hints:
            readOnly: false
          call: "databricks.start-cluster"
          with:
            cluster_id: "tools.cluster_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: restart-cluster
          description: "Restart a running cluster"
          hints:
            readOnly: false
          call: "databricks.restart-cluster"
          with:
            cluster_id: "tools.cluster_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: terminate-cluster
          description: "Terminate a running cluster"
          hints:
            destructive: true
          call: "databricks.terminate-cluster"
          with:
            cluster_id: "tools.cluster_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: delete-cluster
          description: "Permanently delete a cluster"
          hints:
            destructive: true
            idempotent: true
          call: "databricks.delete-cluster"
          with:
            cluster_id: "tools.cluster_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-spark-versions
          description: "List available Spark runtime versions"
          hints:
            readOnly: true
          call: "databricks.list-spark-versions"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-node-types
          description: "List available node types"
          hints:
            readOnly: true
          call: "databricks.list-node-types"
          outputParameters:
            - type: object
              mapping: "$."
        - name: create-job
          description: "Create a new Databricks job"
          hints:
            readOnly: false
          call: "databricks.create-job"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-jobs
          description: "List all Databricks jobs"
          hints:
            readOnly: true
          call: "databricks.list-jobs"
          outputParameters:
            - type: object
              mapping: "$."
        - name: get-job
          description: "Get job details"
          hints:
            readOnly: true
          call: "databricks.get-job"
          with:
            job_id: "tools.job_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: update-job
          description: "Partially update job settings"
          hints:
            readOnly: false
          call: "databricks.update-job"
          outputParameters:
            - type: object
              mapping: "$."
        - name: delete-job
          description: "Delete a job"
          hints:
            destructive: true
            idempotent: true
          call: "databricks.delete-job"
          outputParameters:
            - type: object
              mapping: "$."
        - name: run-job-now
          description: "Trigger a one-time job run"
          hints:
            readOnly: false
          call: "databricks.run-job-now"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-job-runs
          description: "List job runs"
          hints:
            readOnly: true
          call: "databricks.list-job-runs"
          outputParameters:
            - type: object
              mapping: "$."
        - name: get-job-run
          description: "Get details of a specific job run"
          hints:
            readOnly: true
          call: "databricks.get-job-run"
          with:
            run_id: "tools.run_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: cancel-job-run
          description: "Cancel a running job"
          hints:
            destructive: true
          call: "databricks.cancel-job-run"
          outputParameters:
            - type: object
              mapping: "$."
        - name: get-job-run-output
          description: "Get the output of a completed job run"
          hints:
            readOnly: true
          call: "databricks.get-job-run-output"
          with:
            run_id: "tools.run_id"
          outputParameters:
            - type: object
              mapping: "$."
        - name: list-workspace-objects
          description: "List workspace objects in a directory"
          hints:
            readOnly: true
          call: "databricks.list-workspace-objects"
          with:
            path: "tools.path"
          outputParameters:
            - type: object
              mapping: "$."
        - name: get-workspace-object-status
          description: "Get status of a workspace object"
          hints:
            readOnly: true
          call: "databricks.get-workspace-object-status"
          with:
            path: "tools.path"
          outputParameters:
            - type: object
              mapping: "$."
        - name: create-workspace-directory
          description: "Create a directory in the workspace"
          hints:
            readOnly: false
          call: "databricks.create-workspace-directory"
          outputParameters:
            - type: object
              mapping: "$."
        - name: delete-workspace-object
          description: "Delete a workspace object"
          hints:
            destructive: true
            idempotent: true
          call: "databricks.delete-workspace-object"
          outputParameters:
            - type: object
              mapping: "$."
        - name: import-workspace-object
          description: "Import a notebook or file into the workspace"
          hints:
            readOnly: false
          call: "databricks.import-workspace-object"
          outputParameters:
            - type: object
              mapping: "$."
        - name: export-workspace-object
          description: "Export a notebook or file from the workspace"
          hints:
            readOnly: true
          call: "databricks.export-workspace-object"
          with:
            path: "tools.path"
          outputParameters:
            - type: object
              mapping: "$."