Skip to content

Commit

Permalink
Export SPARK_REMOTE env var for profile authentication type (#825)
Browse files Browse the repository at this point in the history
## Changes
<!-- Summary of your changes that are easy to understand -->

## Tests
<!-- How is this tested? -->
  • Loading branch information
kartikgupta-db authored Aug 3, 2023
1 parent e0d4307 commit 736a8bf
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -179,10 +179,10 @@ export class DatabricksEnvFileManager implements Disposable {

const data = Object.entries({
...(this.getDatabrickseEnvVars() || {}),
...(EnvVarGenerators.getDbConnectEnvVars(
...((await EnvVarGenerators.getDbConnectEnvVars(
this.connectionManager,
this.workspacePath
) || {}),
)) || {}),
...this.getIdeEnvVars(),
...((await this.getUserEnvVars()) || {}),
...(await this.getNotebookEnvVars()),
Expand Down Expand Up @@ -219,10 +219,10 @@ export class DatabricksEnvFileManager implements Disposable {
Object.entries({
...(this.getDatabrickseEnvVars() || {}),
...this.getIdeEnvVars(),
...(EnvVarGenerators.getDbConnectEnvVars(
...((await EnvVarGenerators.getDbConnectEnvVars(
this.connectionManager,
this.workspacePath
) || {}),
)) || {}),
...(await this.getNotebookEnvVars()),
}).forEach(([key, value]) => {
if (value === undefined) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -226,10 +226,10 @@ export class NotebookInitScriptManager implements Disposable {
...(EnvVarGenerators.getCommonDatabricksEnvVars(
this.connectionManager
) ?? {}),
...(EnvVarGenerators.getDbConnectEnvVars(
...((await EnvVarGenerators.getDbConnectEnvVars(
this.connectionManager,
this.workspacePath
) ?? {}),
)) ?? {}),
...(EnvVarGenerators.getIdeEnvVars() ?? {}),
...((await this.getUserEnvVars()) ?? {}),
};
Expand Down
32 changes: 30 additions & 2 deletions packages/databricks-vscode/src/utils/envVarGenerators.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,16 +90,44 @@ export function getCommonDatabricksEnvVars(
/* eslint-enable @typescript-eslint/naming-convention */
}

export function getDbConnectEnvVars(
async function getPatToken(connectionManager: ConnectionManager) {
const headers: Record<string, string> = {};
await connectionManager.workspaceClient?.apiClient.config.authenticate(
headers
);
return headers["Authorization"]?.split(" ")[1];
}

async function getSparkRemoteEnvVar(connectionManager: ConnectionManager) {
const host = connectionManager.databricksWorkspace?.host.authority;
const authType =
connectionManager.databricksWorkspace?.authProvider.authType;

// We export spark remote only for profile auth type. This is to support
// SparkSession builder in oss spark connect (and also dbconnect).
// For all other auth types, we don't export spark remote and expect users
// to use DatabricksSession for full functionality.
if (host && connectionManager.cluster && authType === "profile") {
const pat = await getPatToken(connectionManager);
if (pat) {
return {
// eslint-disable-next-line @typescript-eslint/naming-convention
SPARK_REMOTE: `sc://${host}:443/;token=${pat};use_ssl=true;x-databricks-cluster-id=${connectionManager.cluster.id}`,
};
}
}
}

export async function getDbConnectEnvVars(
connectionManager: ConnectionManager,
workspacePath: Uri
) {
const userAgent = getUserAgent(connectionManager);

/* eslint-disable @typescript-eslint/naming-convention */
return {
SPARK_CONNECT_USER_AGENT: userAgent,
DATABRICKS_PROJECT_ROOT: workspacePath.fsPath,
...((await getSparkRemoteEnvVar(connectionManager)) || {}),
};
/* eslint-enable @typescript-eslint/naming-convention */
}
Expand Down

0 comments on commit 736a8bf

Please sign in to comment.