Skip to content

Commit

Permalink
Move all dbconnect run options to Databricks Run Icon (#1066)
Browse files Browse the repository at this point in the history
## Changes
* The run icon has new options to run the current python file and debug
it.
* The run and debug options run with a bootstrap script
(`dbconnect_bootstrap.py`) that
* Sets up the env vars by recursing up the file tree and searching for
.databricks.env
  * Sets up the Databricks globals
 
## Tests
<!-- How is this tested? -->
  • Loading branch information
kartikgupta-db authored Feb 28, 2024
1 parent bb63006 commit c28620a
Show file tree
Hide file tree
Showing 16 changed files with 600 additions and 391 deletions.
3 changes: 2 additions & 1 deletion packages/databricks-vscode/.vscodeignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,5 @@ scripts/**
coverage/
.nyc_output/
logs/
extension/
extension/
**/*.vsix
62 changes: 45 additions & 17 deletions packages/databricks-vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -135,17 +135,14 @@
"command": "databricks.run.runEditorContentsAsWorkflow",
"title": "Run File as Workflow",
"category": "Databricks",
"enablement": "!inDebugMode",
"icon": {
"dark": "resources/dark/logo.svg",
"light": "resources/light/logo.svg"
}
"enablement": "!inDebugMode && databricks.context.activated && databricks.context.loggedIn",
"icon": "$(play)"
},
{
"command": "databricks.run.runEditorContents",
"title": "Upload and Run File",
"category": "Databricks",
"enablement": "!inDebugMode",
"enablement": "!inDebugMode && databricks.context.activated && databricks.context.loggedIn",
"icon": "$(play)"
},
{
Expand Down Expand Up @@ -255,6 +252,18 @@
"enablement": "databricks.context.activated",
"category": "Databricks"
},
{
"command": "databricks.run.dbconnect.debug",
"title": "Databricks Connect: Debug current file",
"enablement": "databricks.context.activated && databricks.context.loggedIn",
"category": "Databricks"
},
{
"command": "databricks.run.dbconnect.run",
"title": "Databricks Connect: Run current file",
"enablement": "databricks.context.activated && databricks.context.loggedIn",
"category": "Databricks"
},
{
"command": "databricks.bundle.showLogs",
"title": "Show bundle logs",
Expand Down Expand Up @@ -444,6 +453,16 @@
"command": "databricks.run.runEditorContentsAsWorkflow",
"when": "resourceLangId == python || resourceLangId == scala || resourceLangId == r || resourceLangId == sql || resourceExtname == .ipynb",
"group": "1_remote@2"
},
{
"command": "databricks.run.dbconnect.debug",
"when": "resourceLangId == python",
"group": "2_local@1"
},
{
"command": "databricks.run.dbconnect.run",
"when": "resourceLangId == python",
"group": "2_local@2"
}
],
"commandPalette": [
Expand Down Expand Up @@ -474,11 +493,7 @@
],
"explorer/context": [
{
"command": "databricks.run.runEditorContents",
"when": "resourceLangId == python"
},
{
"command": "databricks.run.runEditorContentsAsWorkflow",
"submenu": "databricks.run",
"when": "resourceLangId == python"
}
]
Expand All @@ -493,7 +508,7 @@
"id": "databricks.run",
"label": "Run on Databricks",
"icon": {
"dark": "resources/dark/logo.svg",
"dark": "resources/dark/databricks-run-icon.svg",
"light": "resources/light/logo.svg"
}
}
Expand Down Expand Up @@ -637,6 +652,19 @@
}
}
]
},
{
"type": "python",
"configurationAttributes": {
"launch": {
"properties": {
"databricks": {
"type": "boolean",
"description": "Setup databricks environment variables and globals."
}
}
}
}
}
],
"configuration": [
Expand Down Expand Up @@ -808,11 +836,11 @@
"@typescript-eslint/parser": "^6.14.0",
"@typescript-eslint/utils": "^6.14.0",
"@vscode/test-electron": "^2.3.8",
"@wdio/cli": "^8.26.1",
"@wdio/local-runner": "^8.26.1",
"@wdio/mocha-framework": "^8.24.12",
"@wdio/spec-reporter": "^8.24.12",
"@wdio/types": "^8.24.12",
"@wdio/cli": "^8.32.3",
"@wdio/local-runner": "^8.32.3",
"@wdio/mocha-framework": "^8.32.3",
"@wdio/spec-reporter": "^8.32.2",
"@wdio/types": "^8.32.2",
"chai": "^4.3.10",
"esbuild": "^0.19.9",
"eslint": "^8.55.0",
Expand Down
18 changes: 18 additions & 0 deletions packages/databricks-vscode/resources/dark/databricks-run-icon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
18 changes: 18 additions & 0 deletions packages/databricks-vscode/resources/light/databricks-run-icon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
61 changes: 61 additions & 0 deletions packages/databricks-vscode/resources/python/dbconnect-bootstrap.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import os

# Load environment variables from .databricks/.databricks.env
# We only look for the folder in the current working directory
# since for all commands laucnhed from
def load_env_file_from_cwd(path: str):
if not os.path.isdir(path):
return

env_file_path = os.path.join(path, ".databricks", ".databricks.env")
if not os.path.exists(os.path.dirname(env_file_path)):
return

with open(env_file_path, "r") as f:
for line in f.readlines():
key, value = line.strip().split("=", 1)
os.environ[key] = value
return

load_env_file_from_cwd(os.getcwd())

log_level = os.environ.get("DATABRICKS_VSCODE_LOG_LEVEL")
log_level = log_level if log_level is not None else "WARN"

import logging
logging.basicConfig(level=log_level)

db_globals = {}

from databricks.sdk.runtime import dbutils
db_globals['dbutils'] = dbutils

# "table", "sc", "sqlContext" are missing
try:
from pyspark.sql import functions as udf, SparkSession
from databricks.connect import DatabricksSession
spark: SparkSession = DatabricksSession.builder.getOrCreate()
sql = spark.sql
db_globals['spark'] = spark
db_globals['sql'] = sql
db_globals['udf'] = udf
except Exception as e:
logging.debug(f"Failed to create DatabricksSession: {e}")

# We do this to prevent importing widgets implementation prematurely
# The widget import should prompt users to use the implementation
# which has ipywidget support.
def getArgument(*args, **kwargs):
return dbutils.widgets.getArgument(*args, **kwargs)

db_globals['getArgument'] = getArgument

from runpy import run_path
import sys

script = sys.argv[1]
sys.argv = sys.argv[1:]
logging.debug(f"Running ${script}")
logging.debug(f"args: ${sys.argv[1:]}")

run_path(script, init_globals=db_globals, run_name="__main__")
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {BaseModelWithStateCache} from "../../configuration/models/BaseModelWithS
import {UrlUtils} from "../../utils";
import {Mutex} from "../../locking";
import * as lodash from "lodash";
import {withOnErrorHandler} from "../../utils/onErrorDecorator";

export type BundlePreValidateState = {
host?: URL;
Expand All @@ -26,9 +27,14 @@ export class BundlePreValidateModel extends BaseModelWithStateCache<BundlePreVal
) {
super();
this.disposables.push(
this.bunldeFileWatcher.onDidChange(async () => {
await this.stateCache.refresh();
})
this.bunldeFileWatcher.onDidChange(
withOnErrorHandler(
async () => {
await this.stateCache.refresh();
},
{popup: false, log: true, throw: false}
)
)
);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import {WorkspaceClient, ApiClient, logging} from "@databricks/databricks-sdk";
import {
WorkspaceClient,
ApiClient,
logging,
AuthType as SdkAuthType,
} from "@databricks/databricks-sdk";
import {Cluster} from "../sdk-extensions";
import {EventEmitter, Uri, window, Disposable, commands} from "vscode";
import {CliWrapper, ProcessError} from "../cli/CliWrapper";
Expand Down Expand Up @@ -187,6 +192,9 @@ export class ConnectionManager implements Disposable {
return this._workspaceClient?.apiClient;
}

get authType(): SdkAuthType | undefined {
return this.apiClient?.config.authType;
}
private async loginWithSavedAuth() {
await this.disconnect();
const authProvider = await this.resolveAuth();
Expand Down
52 changes: 37 additions & 15 deletions packages/databricks-vscode/src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ import {BundleRunStatusManager} from "./bundle/run/BundleRunStatusManager";
import {BundleProjectManager} from "./bundle/BundleProjectManager";
import {TreeItemDecorationProvider} from "./ui/bundle-resource-explorer/DecorationProvider";
import {BundleInitWizard} from "./bundle/BundleInitWizard";
import {DatabricksDebugConfigurationProvider} from "./run/DatabricksDebugConfigurationProvider";

const customWhenContext = new CustomWhenContext();

Expand Down Expand Up @@ -319,33 +320,31 @@ export async function activate(
featureManager
);

const databricksEnvFileManager = new DatabricksEnvFileManager(
workspaceUri,
featureManager,
connectionManager,
configModel
);

const notebookInitScriptManager = new NotebookInitScriptManager(
workspaceUri,
context,
connectionManager,
featureManager,
pythonExtensionWrapper
pythonExtensionWrapper,
databricksEnvFileManager,
configModel
);

context.subscriptions.push(
dbConnectStatusBarButton,
notebookInitScriptManager,
telemetry.registerCommand(
"databricks.notebookInitScript.verify",
notebookInitScriptManager.verifyInitScriptCommand,
notebookInitScriptManager
)
);

const databricksEnvFileManager = new DatabricksEnvFileManager(
workspaceUri,
featureManager,
dbConnectStatusBarButton,
connectionManager,
context,
notebookInitScriptManager
);

context.subscriptions.push(
),
workspace.onDidOpenNotebookDocument(() =>
featureManager.isEnabled("notebooks.dbconnect")
),
Expand Down Expand Up @@ -572,7 +571,16 @@ export async function activate(
);

// Run/debug group
const runCommands = new RunCommands(connectionManager);
const databricksDebugConfigurationProvider =
new DatabricksDebugConfigurationProvider(context);

const runCommands = new RunCommands(
connectionManager,
workspace.workspaceFolders[0],
pythonExtensionWrapper,
featureManager,
context
);
const debugFactory = new DatabricksDebugAdapterFactory(
connectionManager,
configModel,
Expand All @@ -587,6 +595,20 @@ export async function activate(
);

context.subscriptions.push(
debug.registerDebugConfigurationProvider(
"python",
databricksDebugConfigurationProvider
),
telemetry.registerCommand(
"databricks.run.dbconnect.debug",
runCommands.debugFileUsingDbconnect,
runCommands
),
telemetry.registerCommand(
"databricks.run.dbconnect.run",
runCommands.runFileUsingDbconnect,
runCommands
),
telemetry.registerCommand(
"databricks.run.runEditorContents",
runCommands.runEditorContentsCommand(),
Expand Down
Loading

0 comments on commit c28620a

Please sign in to comment.