diff --git a/packages/databricks-vscode-types/index.ts b/packages/databricks-vscode-types/index.ts index 39a145070..ab129cdf3 100644 --- a/packages/databricks-vscode-types/index.ts +++ b/packages/databricks-vscode-types/index.ts @@ -1,4 +1,5 @@ -import {ApiClient, WorkspaceClient, Cluster} from "@databricks/databricks-sdk"; +import {ApiClient, WorkspaceClient} from "@databricks/databricks-sdk"; +import {Cluster} from "databricks/src/sdk-extensions"; import {Event} from "vscode"; export type ConnectionState = "CONNECTED" | "CONNECTING" | "DISCONNECTED"; diff --git a/packages/databricks-vscode-types/package.json b/packages/databricks-vscode-types/package.json index c183eb5d8..7a46d8fbe 100644 --- a/packages/databricks-vscode-types/package.json +++ b/packages/databricks-vscode-types/package.json @@ -29,6 +29,7 @@ "typescript": "~5.1.6" }, "dependencies": { - "@databricks/databricks-sdk": "../../vendor/databricks-sdk.tgz" + "@databricks/databricks-sdk": "../../vendor/databricks-sdk.tgz", + "databricks": "*" } } diff --git a/packages/databricks-vscode/package.json b/packages/databricks-vscode/package.json index 47328ee24..3a9abfe25 100644 --- a/packages/databricks-vscode/package.json +++ b/packages/databricks-vscode/package.json @@ -677,7 +677,8 @@ "test:unit": "yarn run build && node ./out/test/runTest.js", "test:integ:prepare": "yarn run package", "test:integ:run": "wdio run src/test/e2e/wdio.conf.ts", - "test:integ": "yarn run test:integ:prepare && yarn run test:integ:run", + "test:integ:sdk": "ts-mocha --type-check 'src/**/*.integ.ts'", + "test:integ": "yarn run test:integ:prepare && yarn run test:integ:run && yarn run test:integ:sdk", "test:cov": "nyc yarn run test:unit", "test": "yarn run test:lint && yarn run test:unit", "clean": "rm -rf node_modules out .vscode-test", @@ -727,6 +728,7 @@ "nyc": "^15.1.0", "prettier": "^3.0.0", "tmp-promise": "^3.0.3", + "ts-mocha": "^10.0.0", "ts-mockito": "^2.6.1", "ts-node": "^10.9.1", "typescript": "~5.1.6", diff --git a/packages/databricks-vscode/src/cli/CliWrapper.test.ts b/packages/databricks-vscode/src/cli/CliWrapper.test.ts index 215467544..cfa57db57 100644 --- a/packages/databricks-vscode/src/cli/CliWrapper.test.ts +++ b/packages/databricks-vscode/src/cli/CliWrapper.test.ts @@ -13,7 +13,7 @@ import {writeFile} from "node:fs/promises"; import {CliWrapper} from "./CliWrapper"; import path from "node:path"; import {Context} from "@databricks/databricks-sdk/dist/context"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; const execFile = promisify(execFileCb); @@ -152,15 +152,18 @@ host = example.com const profiles = await cli.listProfiles( path, new Context({ - logger: NamedLogger.getOrCreate("cli-wrapper-test", { - factory: () => { - return { - log: (level, msg, meta) => { - logs.push({level, msg, meta}); - }, - }; - }, - }), + logger: logging.NamedLogger.getOrCreate( + "cli-wrapper-test", + { + factory: () => { + return { + log: (level, msg, meta) => { + logs.push({level, msg, meta}); + }, + }; + }, + } + ), }) ); assert.equal(profiles.length, 2); diff --git a/packages/databricks-vscode/src/cli/CliWrapper.ts b/packages/databricks-vscode/src/cli/CliWrapper.ts index 2758c7653..04f5ab7ae 100644 --- a/packages/databricks-vscode/src/cli/CliWrapper.ts +++ b/packages/databricks-vscode/src/cli/CliWrapper.ts @@ -3,11 +3,12 @@ import {ExtensionContext, window, commands} from "vscode"; import {SyncDestinationMapper} from "../sync/SyncDestination"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; import {promisify} from "node:util"; -import {withLogContext} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "../logger"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; import {Cloud} from "../utils/constants"; +const withLogContext = logging.withLogContext; const execFile = promisify(execFileCb); export interface Command { diff --git a/packages/databricks-vscode/src/cli/DatabricksCliSyncParser.ts b/packages/databricks-vscode/src/cli/DatabricksCliSyncParser.ts index ec7e6fbb6..32d51be2a 100644 --- a/packages/databricks-vscode/src/cli/DatabricksCliSyncParser.ts +++ b/packages/databricks-vscode/src/cli/DatabricksCliSyncParser.ts @@ -1,13 +1,13 @@ -import {LEVELS, NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {EventEmitter} from "vscode"; import {Loggers} from "../logger"; import {SyncState} from "../sync"; -const databricksLogLevelToSdk = new Map([ - ["DEBUG", LEVELS.debug], - ["INFO", LEVELS.info], - ["WARN", LEVELS.warn], - ["ERROR", LEVELS.error], +const databricksLogLevelToSdk = new Map([ + ["DEBUG", logging.LEVELS.debug], + ["INFO", logging.LEVELS.info], + ["WARN", logging.LEVELS.warn], + ["ERROR", logging.LEVELS.error], ]); type EventBase = { @@ -96,7 +96,7 @@ export class DatabricksCliSyncParser { public processStderr(data: string) { const logLines = data.split("\n"); - let currentLogLevel: LEVELS = LEVELS.info; + let currentLogLevel: logging.LEVELS = logging.LEVELS.info; for (let i = 0; i < logLines.length; i++) { const line = logLines[i].trim(); if (line.length === 0) { @@ -111,9 +111,13 @@ export class DatabricksCliSyncParser { databricksLogLevelToSdk.get(typeMatch[1]) ?? currentLogLevel; } - NamedLogger.getOrCreate(Loggers.CLI).log(currentLogLevel, line, { - outfile: "stderr", - }); + logging.NamedLogger.getOrCreate(Loggers.CLI).log( + currentLogLevel, + line, + { + outfile: "stderr", + } + ); this.writeEmitter.fire(line.trim() + "\r\n"); if (this.matchForErrors(line)) { return; @@ -150,14 +154,14 @@ export class DatabricksCliSyncParser { if (line.length === 0) { continue; } - NamedLogger.getOrCreate(Loggers.CLI).info(line, { + logging.NamedLogger.getOrCreate(Loggers.CLI).info(line, { outfile: "stdout", }); try { this.processLine(line); } catch (error: any) { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Error parsing JSON line from databricks sync stdout: " + error ); diff --git a/packages/databricks-vscode/src/cli/SyncTasks.ts b/packages/databricks-vscode/src/cli/SyncTasks.ts index a383d716a..29dfdcd45 100644 --- a/packages/databricks-vscode/src/cli/SyncTasks.ts +++ b/packages/databricks-vscode/src/cli/SyncTasks.ts @@ -14,13 +14,15 @@ import {CliWrapper, Command, SyncType} from "./CliWrapper"; import {ChildProcess, spawn, SpawnOptions} from "node:child_process"; import {SyncState} from "../sync/CodeSynchronizer"; import {DatabricksCliSyncParser} from "./DatabricksCliSyncParser"; -import {withLogContext} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "../logger"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; import {PackageMetaData} from "../utils/packageJsonUtils"; import {RWLock} from "../locking"; import {EnvVarGenerators} from "../utils"; +const {withLogContext} = logging; + export const TASK_SYNC_TYPE = { syncFull: "sync-full", sync: "sync", diff --git a/packages/databricks-vscode/src/cluster/ClusterListDataProvider.test.ts b/packages/databricks-vscode/src/cluster/ClusterListDataProvider.test.ts index 3068b9ad3..a3fb15ddc 100644 --- a/packages/databricks-vscode/src/cluster/ClusterListDataProvider.test.ts +++ b/packages/databricks-vscode/src/cluster/ClusterListDataProvider.test.ts @@ -5,7 +5,8 @@ import {mock, when, instance} from "ts-mockito"; import {ClusterModel} from "./ClusterModel"; import {Disposable} from "vscode"; import {ClusterListDataProvider} from "./ClusterListDataProvider"; -import {ApiClient, Cluster, compute} from "@databricks/databricks-sdk"; +import {ApiClient, compute} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions/Cluster"; import {resolveProviderResult} from "../test/utils"; const mockListClustersResponse: compute.ListClustersResponse = { diff --git a/packages/databricks-vscode/src/cluster/ClusterListDataProvider.ts b/packages/databricks-vscode/src/cluster/ClusterListDataProvider.ts index 754986679..2173f4cac 100644 --- a/packages/databricks-vscode/src/cluster/ClusterListDataProvider.ts +++ b/packages/databricks-vscode/src/cluster/ClusterListDataProvider.ts @@ -9,7 +9,7 @@ import { TreeItem, TreeItemCollapsibleState, } from "vscode"; -import {Cluster} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {ClusterModel} from "./ClusterModel"; /** diff --git a/packages/databricks-vscode/src/cluster/ClusterLoader.ts b/packages/databricks-vscode/src/cluster/ClusterLoader.ts index 54ed3a604..5e2b5e43f 100644 --- a/packages/databricks-vscode/src/cluster/ClusterLoader.ts +++ b/packages/databricks-vscode/src/cluster/ClusterLoader.ts @@ -1,6 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {Cluster, Time, TimeUnits} from "@databricks/databricks-sdk"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {Time, TimeUnits, logging} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {Disposable, Event, EventEmitter} from "vscode"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {Loggers} from "../logger"; @@ -142,7 +142,9 @@ export class ClusterLoader implements Disposable { resolve(); }) .catch((e) => { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate( + Loggers.Extension + ).error( `Error fetching permission for cluster ${c.name}`, e ); @@ -175,7 +177,7 @@ export class ClusterLoader implements Disposable { try { await this._load(); } catch (e) { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Error loading clusters", e ); diff --git a/packages/databricks-vscode/src/cluster/ClusterManager.test.ts b/packages/databricks-vscode/src/cluster/ClusterManager.test.ts index 4e7a5fbed..dfe01718b 100644 --- a/packages/databricks-vscode/src/cluster/ClusterManager.test.ts +++ b/packages/databricks-vscode/src/cluster/ClusterManager.test.ts @@ -3,11 +3,11 @@ import {ClusterManager} from "./ClusterManager"; import { ApiClient, compute, - Cluster, Time, TimeUnits, retries, } from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {ClusterFixture} from "@databricks/databricks-sdk/dist/test/fixtures"; import { anything, diff --git a/packages/databricks-vscode/src/cluster/ClusterManager.ts b/packages/databricks-vscode/src/cluster/ClusterManager.ts index df8018422..55030124e 100644 --- a/packages/databricks-vscode/src/cluster/ClusterManager.ts +++ b/packages/databricks-vscode/src/cluster/ClusterManager.ts @@ -1,4 +1,5 @@ -import {compute, Cluster, Time, TimeUnits} from "@databricks/databricks-sdk"; +import {compute, Time, TimeUnits} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {CancellationTokenSource, Disposable} from "vscode"; export class ClusterManager implements Disposable { diff --git a/packages/databricks-vscode/src/cluster/ClusterModel.test.ts b/packages/databricks-vscode/src/cluster/ClusterModel.test.ts index 0ff282209..a5da7440f 100644 --- a/packages/databricks-vscode/src/cluster/ClusterModel.test.ts +++ b/packages/databricks-vscode/src/cluster/ClusterModel.test.ts @@ -2,12 +2,8 @@ import assert from "assert"; import {mock, when, anything, anyString, instance, spy} from "ts-mockito"; -import { - ApiClient, - Cluster, - WorkspaceClient, - compute, -} from "@databricks/databricks-sdk"; +import {ApiClient, WorkspaceClient, compute} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {ClusterModel} from "./ClusterModel"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {Disposable} from "vscode"; diff --git a/packages/databricks-vscode/src/cluster/ClusterModel.ts b/packages/databricks-vscode/src/cluster/ClusterModel.ts index 715a8a872..866e3f811 100644 --- a/packages/databricks-vscode/src/cluster/ClusterModel.ts +++ b/packages/databricks-vscode/src/cluster/ClusterModel.ts @@ -1,6 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {Cluster, compute} from "@databricks/databricks-sdk"; +import {compute} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {Disposable, Event, EventEmitter} from "vscode"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {ClusterLoader} from "./ClusterLoader"; diff --git a/packages/databricks-vscode/src/configuration/ConfigurationDataProvider.test.ts b/packages/databricks-vscode/src/configuration/ConfigurationDataProvider.test.ts index 235488968..177374faa 100644 --- a/packages/databricks-vscode/src/configuration/ConfigurationDataProvider.test.ts +++ b/packages/databricks-vscode/src/configuration/ConfigurationDataProvider.test.ts @@ -4,7 +4,8 @@ import assert from "assert"; import {mock, when, instance, anything} from "ts-mockito"; import {Disposable} from "vscode"; import {ConfigurationDataProvider} from "./ConfigurationDataProvider"; -import {ApiClient, Cluster} from "@databricks/databricks-sdk"; +import {ApiClient} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {ConnectionManager} from "./ConnectionManager"; import {resolveProviderResult} from "../test/utils"; import {SyncDestinationMapper} from "../sync/SyncDestination"; diff --git a/packages/databricks-vscode/src/configuration/ConnectionCommands.test.ts b/packages/databricks-vscode/src/configuration/ConnectionCommands.test.ts index d0f769e50..4206d3f2c 100644 --- a/packages/databricks-vscode/src/configuration/ConnectionCommands.test.ts +++ b/packages/databricks-vscode/src/configuration/ConnectionCommands.test.ts @@ -1,5 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {ApiClient, Cluster} from "@databricks/databricks-sdk"; +import {ApiClient} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import assert from "assert"; import {mock} from "ts-mockito"; import {formatQuickPickClusterDetails} from "./ConnectionCommands"; diff --git a/packages/databricks-vscode/src/configuration/ConnectionCommands.ts b/packages/databricks-vscode/src/configuration/ConnectionCommands.ts index 7ee1a8198..9c65f8f48 100644 --- a/packages/databricks-vscode/src/configuration/ConnectionCommands.ts +++ b/packages/databricks-vscode/src/configuration/ConnectionCommands.ts @@ -1,8 +1,4 @@ -import { - Cluster, - WorkspaceFsEntity, - WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; +import {Cluster, WorkspaceFsEntity, WorkspaceFsUtils} from "../sdk-extensions"; import {homedir} from "node:os"; import { Disposable, diff --git a/packages/databricks-vscode/src/configuration/ConnectionManager.ts b/packages/databricks-vscode/src/configuration/ConnectionManager.ts index 95f02136a..296d4fa6f 100644 --- a/packages/databricks-vscode/src/configuration/ConnectionManager.ts +++ b/packages/databricks-vscode/src/configuration/ConnectionManager.ts @@ -1,10 +1,5 @@ -import { - WorkspaceClient, - Cluster, - WorkspaceFsEntity, - WorkspaceFsUtils, - ApiClient, -} from "@databricks/databricks-sdk"; +import {WorkspaceClient, ApiClient, logging} from "@databricks/databricks-sdk"; +import {Cluster, WorkspaceFsEntity, WorkspaceFsUtils} from "../sdk-extensions"; import { env, EventEmitter, @@ -26,12 +21,13 @@ import { import {configureWorkspaceWizard} from "./configureWorkspaceWizard"; import {ClusterManager} from "../cluster/ClusterManager"; import {DatabricksWorkspace} from "./DatabricksWorkspace"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; import {Loggers} from "../logger"; import {CustomWhenContext} from "../vscode-objs/CustomWhenContext"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; import {WorkspaceStateManager} from "../vscode-objs/WorkspaceState"; +// eslint-disable-next-line @typescript-eslint/naming-convention +const {NamedLogger} = logging; export type ConnectionState = "CONNECTED" | "CONNECTING" | "DISCONNECTED"; /** diff --git a/packages/databricks-vscode/src/configuration/DatabricksWorkspace.ts b/packages/databricks-vscode/src/configuration/DatabricksWorkspace.ts index 03ce10986..37fe410ec 100644 --- a/packages/databricks-vscode/src/configuration/DatabricksWorkspace.ts +++ b/packages/databricks-vscode/src/configuration/DatabricksWorkspace.ts @@ -1,12 +1,6 @@ -import { - WorkspaceClient, - Cluster, - iam, - WorkspaceConf, - WorkspaceConfProps, -} from "@databricks/databricks-sdk"; +import {WorkspaceClient, iam, logging} from "@databricks/databricks-sdk"; +import {Cluster, WorkspaceConf, WorkspaceConfProps} from "../sdk-extensions"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; -import {withLogContext} from "@databricks/databricks-sdk/dist/logging"; import {Uri} from "vscode"; import {Loggers} from "../logger"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; @@ -93,7 +87,7 @@ export class DatabricksWorkspace { } } - @withLogContext(Loggers.Extension, "DatabricksWorkspace.load") + @logging.withLogContext(Loggers.Extension, "DatabricksWorkspace.load") static async load( client: WorkspaceClient, authProvider: AuthProvider, diff --git a/packages/databricks-vscode/src/configuration/auth/AzureCliCheck.ts b/packages/databricks-vscode/src/configuration/auth/AzureCliCheck.ts index 7298931ae..52e85751f 100644 --- a/packages/databricks-vscode/src/configuration/auth/AzureCliCheck.ts +++ b/packages/databricks-vscode/src/configuration/auth/AzureCliCheck.ts @@ -2,13 +2,16 @@ import { ExecUtils, ProductVersion, WorkspaceClient, + logging, } from "@databricks/databricks-sdk"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; import {commands, Disposable, Uri, window} from "vscode"; import {Loggers} from "../../logger"; import {AzureCliAuthProvider} from "./AuthProvider"; import {orchestrate, OrchestrationLoopError, Step} from "./orchestrate"; +// eslint-disable-next-line @typescript-eslint/naming-convention +const {NamedLogger} = logging; + // eslint-disable-next-line @typescript-eslint/no-var-requires const extensionVersion = require("../../../package.json") .version as ProductVersion; @@ -25,7 +28,7 @@ type AzureStepName = export class AzureCliCheck implements Disposable { private disposables: Disposable[] = []; private isCodeSpaces: boolean; - private logger: NamedLogger; + private logger: logging.NamedLogger; tenantId: string | undefined; azureLoginAppId: string | undefined; diff --git a/packages/databricks-vscode/src/configuration/auth/MetadataService.test.ts b/packages/databricks-vscode/src/configuration/auth/MetadataService.test.ts index 481beb77c..fe25b496d 100644 --- a/packages/databricks-vscode/src/configuration/auth/MetadataService.test.ts +++ b/packages/databricks-vscode/src/configuration/auth/MetadataService.test.ts @@ -1,7 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {MetadataService} from "./MetadataService"; -import {fetch} from "@databricks/databricks-sdk"; - import * as assert from "assert"; import { ApiClient, @@ -11,8 +9,10 @@ import { MetadataServiceVersion, MetadataServiceVersionHeader, RequestVisitor, + logging, + fetch, } from "@databricks/databricks-sdk"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +const {NamedLogger} = logging; describe(__filename, function () { this.timeout(10_000); diff --git a/packages/databricks-vscode/src/configuration/auth/MetadataService.ts b/packages/databricks-vscode/src/configuration/auth/MetadataService.ts index 8ef8a38ec..6ecb4645e 100644 --- a/packages/databricks-vscode/src/configuration/auth/MetadataService.ts +++ b/packages/databricks-vscode/src/configuration/auth/MetadataService.ts @@ -9,8 +9,8 @@ import { MetadataServiceVersion, MetadataServiceVersionHeader, ServerResponse, + logging, } from "@databricks/databricks-sdk"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; export class MetadataService implements Disposable { private server: @@ -22,7 +22,7 @@ export class MetadataService implements Disposable { constructor( apiClient: ApiClient | undefined, - private logger: NamedLogger + private logger: logging.NamedLogger ) { this.updateMagic(); this._apiClient = apiClient; diff --git a/packages/databricks-vscode/src/configuration/auth/MetadataServiceManager.ts b/packages/databricks-vscode/src/configuration/auth/MetadataServiceManager.ts index c88f63706..ea62ad24e 100644 --- a/packages/databricks-vscode/src/configuration/auth/MetadataServiceManager.ts +++ b/packages/databricks-vscode/src/configuration/auth/MetadataServiceManager.ts @@ -1,4 +1,4 @@ -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Disposable} from "vscode"; import {ConnectionManager} from "../ConnectionManager"; import {MetadataService} from "./MetadataService"; @@ -10,7 +10,7 @@ export class MetadataServiceManager implements Disposable { constructor(private readonly connctionManager: ConnectionManager) { this.metadataSerivce = new MetadataService( undefined, - NamedLogger.getOrCreate("Extension") + logging.NamedLogger.getOrCreate("Extension") ); this.disposables.push( diff --git a/packages/databricks-vscode/src/configuration/auth/orchestrate.ts b/packages/databricks-vscode/src/configuration/auth/orchestrate.ts index be77af349..1f262f0d4 100644 --- a/packages/databricks-vscode/src/configuration/auth/orchestrate.ts +++ b/packages/databricks-vscode/src/configuration/auth/orchestrate.ts @@ -1,4 +1,4 @@ -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; export type Step = () => Promise< SuccessResult | NextResult | ErrorResult @@ -31,7 +31,7 @@ export async function orchestrate( steps: Record>, start: KEYS, maxSteps = 20, - logger?: NamedLogger + logger?: logging.NamedLogger ): Promise { let counter = 0; diff --git a/packages/databricks-vscode/src/extension.ts b/packages/databricks-vscode/src/extension.ts index 8d80debd6..490e15277 100644 --- a/packages/databricks-vscode/src/extension.ts +++ b/packages/databricks-vscode/src/extension.ts @@ -23,7 +23,7 @@ import {QuickstartCommands} from "./quickstart/QuickstartCommands"; import {showQuickStartOnFirstUse} from "./quickstart/QuickStart"; import {PublicApi} from "@databricks/databricks-vscode-types"; import {LoggerManager, Loggers} from "./logger"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {workspaceConfigs} from "./vscode-objs/WorkspaceConfigs"; import {PackageJsonUtils, UtilsCommands} from "./utils"; import {ConfigureAutocomplete} from "./language/ConfigureAutocomplete"; @@ -100,7 +100,7 @@ export async function activate( const telemetry = Telemetry.createDefault(); const packageMetadata = await PackageJsonUtils.getMetadata(context); - NamedLogger.getOrCreate(Loggers.Extension).debug("Metadata", { + logging.NamedLogger.getOrCreate(Loggers.Extension).debug("Metadata", { metadata: packageMetadata, }); @@ -299,7 +299,7 @@ export async function activate( ); notebookInitScriptManager.updateInitScript().catch((e) => { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Failed to update init script", e ); @@ -517,7 +517,10 @@ export async function activate( ); showQuickStartOnFirstUse(context).catch((e) => { - NamedLogger.getOrCreate("Extension").error("Quick Start error", e); + logging.NamedLogger.getOrCreate("Extension").error( + "Quick Start error", + e + ); }); // Utils @@ -538,21 +541,24 @@ export async function activate( // generate a json schema for bundle root and load a custom provider into // redhat.vscode-yaml extension to validate bundle config files with this schema generateBundleSchema(cli).catch((e) => { - NamedLogger.getOrCreate("Extension").error( + logging.NamedLogger.getOrCreate("Extension").error( "Failed to load bundle schema: ", e ); }); connectionManager.login(false).catch((e) => { - NamedLogger.getOrCreate(Loggers.Extension).error("Login error", e); + logging.NamedLogger.getOrCreate(Loggers.Extension).error( + "Login error", + e + ); }); setDbnbCellLimits( workspace.workspaceFolders[0].uri, connectionManager ).catch((e) => { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Error while setting jupyter configs for parsing databricks notebooks", e ); diff --git a/packages/databricks-vscode/src/feature-manager/FeatureManager.ts b/packages/databricks-vscode/src/feature-manager/FeatureManager.ts index e52ab71a6..cc35e4970 100644 --- a/packages/databricks-vscode/src/feature-manager/FeatureManager.ts +++ b/packages/databricks-vscode/src/feature-manager/FeatureManager.ts @@ -3,7 +3,7 @@ import {Mutex} from "../locking"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; import {DisabledFeature} from "./DisabledFeature"; import {EnabledFeature} from "./EnabledFeature"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "../logger"; export type FeatureEnableAction = (...args: any[]) => Promise; @@ -136,7 +136,7 @@ export class FeatureManager implements Disposable { reject(e); }); }).catch((e) => { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( `Error checking feature state ${id}`, e ); diff --git a/packages/databricks-vscode/src/file-managers/DatabricksEnvFileManager.ts b/packages/databricks-vscode/src/file-managers/DatabricksEnvFileManager.ts index c11c8f12d..bb7e36345 100644 --- a/packages/databricks-vscode/src/file-managers/DatabricksEnvFileManager.ts +++ b/packages/databricks-vscode/src/file-managers/DatabricksEnvFileManager.ts @@ -15,7 +15,6 @@ import {SystemVariables} from "../vscode-objs/SystemVariables"; import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "../logger"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; import {DbConnectStatusBarButton} from "../language/DbConnectStatusBarButton"; import {EnvVarGenerators, FileUtils} from "../utils"; import {NotebookInitScriptManager} from "../language/notebooks/NotebookInitScriptManager"; @@ -73,11 +72,14 @@ export class DatabricksEnvFileManager implements Disposable { this.userEnvPath = Uri.file( systemVariableResolver.resolve(this.unresolvedUserEnvFile) ); - NamedLogger.getOrCreate(Loggers.Extension).debug("Env file locations", { - unresolvedDatabricksEnvFile: this.unresolvedDatabricksEnvFile, - unresolvedUserEnvFile: this.unresolvedUserEnvFile, - msEnvFile: workspaceConfigs.msPythonEnvFile, - }); + logging.NamedLogger.getOrCreate(Loggers.Extension).debug( + "Env file locations", + { + unresolvedDatabricksEnvFile: this.unresolvedDatabricksEnvFile, + unresolvedUserEnvFile: this.unresolvedUserEnvFile, + msEnvFile: workspaceConfigs.msPythonEnvFile, + } + ); } public async init() { diff --git a/packages/databricks-vscode/src/file-managers/ProjectConfigFile.ts b/packages/databricks-vscode/src/file-managers/ProjectConfigFile.ts index f636a2dbd..6d7ea89fa 100644 --- a/packages/databricks-vscode/src/file-managers/ProjectConfigFile.ts +++ b/packages/databricks-vscode/src/file-managers/ProjectConfigFile.ts @@ -5,9 +5,8 @@ import { ProfileAuthProvider, } from "../configuration/auth/AuthProvider"; import {Uri} from "vscode"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; import {Loggers} from "../logger"; -import {Config} from "@databricks/databricks-sdk"; +import {Config, logging} from "@databricks/databricks-sdk"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; export interface ProjectConfig { @@ -129,7 +128,7 @@ export class ProjectConfigFile { authProvider = AuthProvider.fromJSON(config, cliPath); } } catch (e: any) { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Error parsing project config file", e ); diff --git a/packages/databricks-vscode/src/language/ConfigureAutocomplete.ts b/packages/databricks-vscode/src/language/ConfigureAutocomplete.ts index b8ebb5a95..954b32b20 100644 --- a/packages/databricks-vscode/src/language/ConfigureAutocomplete.ts +++ b/packages/databricks-vscode/src/language/ConfigureAutocomplete.ts @@ -1,4 +1,4 @@ -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {appendFile, mkdir, readdir, readFile} from "fs/promises"; import path from "path"; import { @@ -75,7 +75,7 @@ export class ConfigureAutocomplete implements Disposable { try { return await fn(); } catch (e) { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Error configuring autocomplete", e ); diff --git a/packages/databricks-vscode/src/language/DbConnectAccessVerifier.ts b/packages/databricks-vscode/src/language/DbConnectAccessVerifier.ts index 0ec641a68..e3c79d2ef 100644 --- a/packages/databricks-vscode/src/language/DbConnectAccessVerifier.ts +++ b/packages/databricks-vscode/src/language/DbConnectAccessVerifier.ts @@ -1,4 +1,5 @@ -import {Cluster, logging} from "@databricks/databricks-sdk"; +import {logging} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {window, commands} from "vscode"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {MultiStepAccessVerifier} from "../feature-manager/MultiStepAccessVerfier"; diff --git a/packages/databricks-vscode/src/language/notebooks/NotebookInitScriptManager.ts b/packages/databricks-vscode/src/language/notebooks/NotebookInitScriptManager.ts index 9d9d4f05a..096f8561a 100644 --- a/packages/databricks-vscode/src/language/notebooks/NotebookInitScriptManager.ts +++ b/packages/databricks-vscode/src/language/notebooks/NotebookInitScriptManager.ts @@ -13,7 +13,7 @@ import {mkdir, cp, rm, readdir} from "fs/promises"; import {glob} from "glob"; import {ConnectionManager} from "../../configuration/ConnectionManager"; import {FeatureManager} from "../../feature-manager/FeatureManager"; -import {withLogContext} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "../../logger"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; import {Mutex} from "../../locking"; @@ -24,7 +24,9 @@ import {EnvVarGenerators, FileUtils} from "../../utils"; import {workspaceConfigs} from "../../vscode-objs/WorkspaceConfigs"; import {SystemVariables} from "../../vscode-objs/SystemVariables"; import {LocalUri} from "../../sync/SyncDestination"; + const execFile = promisify(ef); +const withLogContext = logging.withLogContext; async function isDbnbTextEditor(editor?: TextEditor) { return ( diff --git a/packages/databricks-vscode/src/logger/LoggerManager.test.ts b/packages/databricks-vscode/src/logger/LoggerManager.test.ts index 1f02c5181..af688a4b2 100644 --- a/packages/databricks-vscode/src/logger/LoggerManager.test.ts +++ b/packages/databricks-vscode/src/logger/LoggerManager.test.ts @@ -1,6 +1,4 @@ -import "@databricks/databricks-sdk/dist"; -import {Time, TimeUnits} from "@databricks/databricks-sdk/dist"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {Time, TimeUnits, logging} from "@databricks/databricks-sdk"; import assert from "assert"; import {mkdtemp, readFile} from "fs/promises"; import {remove} from "fs-extra"; @@ -25,8 +23,10 @@ describe(__filename, function () { const manager = new LoggerManager(instance(mockContext)); await manager.initLoggers(); - NamedLogger.getOrCreate(Loggers.Extension).debug("test message"); - NamedLogger.getOrCreate(Loggers.CLI).debug("test message"); + logging.NamedLogger.getOrCreate(Loggers.Extension).debug( + "test message" + ); + logging.NamedLogger.getOrCreate(Loggers.CLI).debug("test message"); await new Promise((resolve) => setTimeout( diff --git a/packages/databricks-vscode/src/logger/LoggerManager.ts b/packages/databricks-vscode/src/logger/LoggerManager.ts index 951aaed7c..43ba4b110 100644 --- a/packages/databricks-vscode/src/logger/LoggerManager.ts +++ b/packages/databricks-vscode/src/logger/LoggerManager.ts @@ -1,13 +1,13 @@ -import { - NamedLogger, - ExposedLoggers, -} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {env, ExtensionContext, window} from "vscode"; import {loggers, format, transports} from "winston"; import {getOutputConsoleTransport} from "./outputConsoleTransport"; import {unlink, access, mkdir} from "fs/promises"; import path from "path"; +// eslint-disable-next-line @typescript-eslint/naming-convention +const {NamedLogger, ExposedLoggers} = logging; + export class LoggerManager { constructor(readonly context: ExtensionContext) {} diff --git a/packages/databricks-vscode/src/logger/utils.ts b/packages/databricks-vscode/src/logger/utils.ts index 6567d3803..c83f3df75 100644 --- a/packages/databricks-vscode/src/logger/utils.ts +++ b/packages/databricks-vscode/src/logger/utils.ts @@ -1,4 +1,4 @@ -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {Loggers} from "./LoggerManager"; export interface TryAndLogErrorOpts { @@ -25,7 +25,10 @@ export async function tryAndLogErrorAsync( try { return await fn(); } catch (e) { - NamedLogger.getOrCreate(mergedOpts.logger).error(mergedOpts.message, e); + logging.NamedLogger.getOrCreate(mergedOpts.logger).error( + mergedOpts.message, + e + ); if (mergedOpts.shouldThrow) { throw e; } @@ -44,7 +47,10 @@ export function tryAndLogError( try { return fn(); } catch (e) { - NamedLogger.getOrCreate(mergedOpts.logger).error(mergedOpts.message, e); + logging.NamedLogger.getOrCreate(mergedOpts.logger).error( + mergedOpts.message, + e + ); if (mergedOpts.shouldThrow) { throw e; } diff --git a/packages/databricks-vscode/src/run/DatabricksRuntime.test.ts b/packages/databricks-vscode/src/run/DatabricksRuntime.test.ts index 7139556d4..164d63857 100644 --- a/packages/databricks-vscode/src/run/DatabricksRuntime.test.ts +++ b/packages/databricks-vscode/src/run/DatabricksRuntime.test.ts @@ -3,7 +3,7 @@ import assert from "assert"; import {mock, when, instance, anything, verify, capture} from "ts-mockito"; import {Disposable, ExtensionContext, Uri} from "vscode"; -import {Cluster, Command, ExecutionContext} from "@databricks/databricks-sdk"; +import {Cluster, Command, ExecutionContext} from "../sdk-extensions"; import {DatabricksRuntime, OutputEvent} from "./DatabricksRuntime"; import {ConnectionManager} from "../configuration/ConnectionManager"; import { diff --git a/packages/databricks-vscode/src/run/WorkflowOutputPanel.ts b/packages/databricks-vscode/src/run/WorkflowOutputPanel.ts index 5e7eac3bc..956552829 100644 --- a/packages/databricks-vscode/src/run/WorkflowOutputPanel.ts +++ b/packages/databricks-vscode/src/run/WorkflowOutputPanel.ts @@ -1,4 +1,5 @@ -import {Cluster, WorkflowRun, jobs} from "@databricks/databricks-sdk"; +import {jobs} from "@databricks/databricks-sdk"; +import {Cluster, WorkflowRun} from "../sdk-extensions"; import * as fs from "node:fs/promises"; import {Disposable, Uri, WebviewPanel} from "vscode"; diff --git a/packages/databricks-vscode/src/run/WorkflowRunner.ts b/packages/databricks-vscode/src/run/WorkflowRunner.ts index 1204f673a..ab701856d 100644 --- a/packages/databricks-vscode/src/run/WorkflowRunner.ts +++ b/packages/databricks-vscode/src/run/WorkflowRunner.ts @@ -1,4 +1,5 @@ -import {Cluster, WorkflowRun, jobs, ApiError} from "@databricks/databricks-sdk"; +import {jobs, ApiError} from "@databricks/databricks-sdk"; +import {Cluster, WorkflowRun} from "../sdk-extensions"; import {basename} from "node:path"; import { CancellationToken, diff --git a/packages/databricks-vscode/src/sdk-extensions/Cluster.integ.ts b/packages/databricks-vscode/src/sdk-extensions/Cluster.integ.ts new file mode 100644 index 000000000..dc252d91e --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Cluster.integ.ts @@ -0,0 +1,72 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import {CancellationToken, Cluster} from "@databricks/databricks-sdk"; +import assert from "node:assert"; +import {IntegrationTestSetup} from "./test/IntegrationTestSetup"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + + this.timeout(10 * 60 * 1000); + + before(async () => { + integSetup = await IntegrationTestSetup.getInstance(); + }); + + it("should create an execution context", async () => { + assert(await integSetup.cluster.canExecute()); + + const ctx = await integSetup.cluster.createExecutionContext(); + const {result} = await ctx.execute("print('hello')"); + + assert(result.results); + assert(result.results.resultType === "text"); + assert.equal(result.results.data, "hello"); + }); + + it("should load a cluster by name", async () => { + const clusterA = integSetup.cluster; + + const clusterB = await Cluster.fromClusterName( + integSetup.client.apiClient, + clusterA.details.cluster_name! + ); + + assert(clusterA.id); + assert.equal(clusterA.id, clusterB?.id); + }); + + // skipping because running the test takes too long + it.skip("should start a stopping cluster", async () => { + let listener: any; + const token: CancellationToken = { + isCancellationRequested: false, + onCancellationRequested: (_listener) => { + listener = _listener; + }, + }; + + const cluster = integSetup.cluster; + // stop cluster + await Promise.race([ + cluster.stop(token, async (info) => + // eslint-disable-next-line no-console + console.log(`Stopping - ${info.state}`) + ), + new Promise((resolve) => { + // cancel stop + setTimeout(() => { + token.isCancellationRequested = true; + listener(); + resolve(); + }, 500); + }), + ]); + + // start cluster + await cluster.start(undefined, (state) => + // eslint-disable-next-line no-console + console.log(`Starting ${state}`) + ); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/Cluster.test.ts b/packages/databricks-vscode/src/sdk-extensions/Cluster.test.ts new file mode 100644 index 000000000..18a959668 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Cluster.test.ts @@ -0,0 +1,321 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import {ApiClient, Time, TimeUnits, compute} from "@databricks/databricks-sdk"; +import {Cluster} from "./Cluster"; +import * as assert from "node:assert"; +import {mock, when, instance, deepEqual, verify, anything} from "ts-mockito"; +import {getMockTestCluster} from "./test/ClusterFixtures"; +import {TokenFixture} from "./test/TokenFixtures"; +import FakeTimers from "@sinonjs/fake-timers"; + +describe(__filename, function () { + this.timeout(new Time(10, TimeUnits.minutes).toMillSeconds().value); + + let mockedClient: ApiClient; + let mockedCluster: Cluster; + let testClusterDetails: compute.ClusterDetails; + let fakeTimer: FakeTimers.InstalledClock; + + beforeEach(async () => { + ({mockedCluster, mockedClient, testClusterDetails} = + await getMockTestCluster()); + + fakeTimer = FakeTimers.install(); + }); + + afterEach(() => { + fakeTimer.uninstall(); + }); + + it("calling start on a non terminated state should not throw an error", async () => { + when( + mockedClient.request( + "/api/2.0/clusters/get", + "GET", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ).thenResolve( + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "RUNNING", + } + ); + + await mockedCluster.refresh(); + assert.notEqual(mockedCluster.state, "RUNNING"); + + const startPromise = mockedCluster.start(); + await fakeTimer.runToLastAsync(); + await startPromise; + assert.equal(mockedCluster.state, "RUNNING"); + + verify( + mockedClient.request( + "/api/2.0/clusters/get", + anything(), + anything(), + anything() + ) + ).times(6); + + verify( + mockedClient.request( + "/api/2.0/clusters/start", + anything(), + anything(), + anything() + ) + ).never(); + }); + + it("should terminate cluster", async () => { + when( + mockedClient.request( + "/api/2.0/clusters/get", + "GET", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ).thenResolve( + { + ...testClusterDetails, + state: "RUNNING", + }, + { + ...testClusterDetails, + state: "TERMINATING", + }, + { + ...testClusterDetails, + state: "TERMINATED", + } + ); + + when( + mockedClient.request( + "/api/2.0/clusters/delete", + "POST", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ).thenResolve({}); + + assert.equal(mockedCluster.state, "RUNNING"); + + const stopPromise = mockedCluster.stop(); + await fakeTimer.runToLastAsync(); + await stopPromise; + + assert.equal(mockedCluster.state, "TERMINATED"); + + verify( + mockedClient.request( + "/api/2.0/clusters/get", + anything(), + anything(), + anything() + ) + ).times(3); + + verify( + mockedClient.request( + "/api/2.0/clusters/delete", + anything(), + anything(), + anything() + ) + ).once(); + }); + + it("should terminate non running clusters", async () => { + when( + mockedClient.request( + "/api/2.0/clusters/get", + "GET", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ).thenResolve( + { + ...testClusterDetails, + state: "PENDING", + }, + { + ...testClusterDetails, + state: "TERMINATING", + }, + { + ...testClusterDetails, + state: "TERMINATED", + } + ); + + await mockedCluster.refresh(); + assert.notEqual(mockedCluster.state, "RUNNING"); + + const stopPromise = mockedCluster.stop(); + await fakeTimer.runToLastAsync(); + await stopPromise; + + assert.equal(mockedCluster.state, "TERMINATED"); + + verify( + mockedClient.request( + "/api/2.0/clusters/get", + anything(), + anything(), + anything() + ) + ).times(3); + + verify( + mockedClient.request( + "/api/2.0/clusters/delete", + anything(), + anything(), + anything() + ) + ).once(); + }); + + it("should cancel cluster start", async () => { + const whenMockGetCluster = when( + mockedClient.request( + "/api/2.0/clusters/get", + "GET", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ); + + whenMockGetCluster.thenResolve({ + ...testClusterDetails, + state: "PENDING", + }); + + when( + mockedClient.request( + "/api/2.0/clusters/delete", + "POST", + deepEqual({ + cluster_id: testClusterDetails.cluster_id, + }), + anything() + ) + ).thenCall(() => { + whenMockGetCluster.thenResolve( + { + ...testClusterDetails, + state: "TERMINATING", + }, + { + ...testClusterDetails, + state: "TERMINATED", + } + ); + return {}; + }); + + const token = mock(TokenFixture); + when(token.isCancellationRequested).thenReturn(false, false, true); + //mocked cluster is initially in running state, this gets it to pending state + await mockedCluster.refresh(); + + assert.equal(mockedCluster.state, "PENDING"); + const startPromise = mockedCluster.start(instance(token)); + await fakeTimer.runToLastAsync(); + await startPromise; + + verify(token.isCancellationRequested).thrice(); + }); + + it("should parse DBR from spark_version", () => { + const mockedClient = mock(ApiClient); + const clusterDetails = { + spark_version: "7.3.x-scala2.12", + }; + const cluster = new Cluster(instance(mockedClient), clusterDetails); + + const versions = [ + ["11.x-snapshot-aarch64-scala2.12", [11, "x", "x"]], + ["10.4.x-scala2.12", [10, 4, "x"]], + ["7.3.x-scala2.12", [7, 3, "x"]], + [ + "custom:custom-local__11.3.x-snapshot-cpu-ml-scala2.12__unknown__head__7335a01__cb1aa83__jenkins__641f1a5__format-2.lz4", + [11, 3, "x"], + ], + ]; + + for (const [sparkVersion, expectedDbr] of versions) { + clusterDetails.spark_version = sparkVersion as string; + assert.deepEqual(cluster.dbrVersion, expectedDbr); + } + }); + + it("should return correct URLs", async () => { + const mockedClient = mock(ApiClient); + when(mockedClient.host).thenResolve( + new URL("https://test.cloud.databricks.com") + ); + const clusterDetails = { + cluster_id: "1118-013127-82wynr8t", + }; + const cluster = new Cluster(instance(mockedClient), clusterDetails); + + assert.equal( + await cluster.url, + "https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/configuration" + ); + + assert.equal( + await cluster.driverLogsUrl, + "https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/driverLogs" + ); + + assert.equal( + await cluster.metricsUrl, + "https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/metrics" + ); + + assert.equal( + await cluster.getSparkUiUrl(), + "https://test.cloud.databricks.com/#setting/clusters/1118-013127-82wynr8t/sparkUi" + ); + + assert.equal( + await cluster.getSparkUiUrl("7189805239423176682"), + "https://test.cloud.databricks.com/#setting/sparkui/1118-013127-82wynr8t/driver-7189805239423176682" + ); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/Cluster.ts b/packages/databricks-vscode/src/sdk-extensions/Cluster.ts new file mode 100644 index 000000000..cb2c44f7e --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Cluster.ts @@ -0,0 +1,457 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import { + ApiClient, + Time, + TimeUnits, + retry, + retries, + jobs, + iam, + compute, + CancellationToken, + logging, +} from "@databricks/databricks-sdk"; +import {ExecutionContext} from "./ExecutionContext"; +import {WorkflowRun} from "./WorkflowRun"; +import {Context, context} from "@databricks/databricks-sdk/dist/context"; + +const {ExposedLoggers, withLogContext} = logging; + +export class ClusterRetriableError extends retries.RetriableError {} +export class ClusterError extends Error {} +export class Cluster { + private clusterApi: compute.ClustersService; + private _canExecute?: boolean; + private _hasExecutePerms?: boolean; + + constructor( + private client: ApiClient, + private clusterDetails: compute.ClusterDetails + ) { + this.clusterApi = new compute.ClustersService(client); + } + + get id(): string { + return this.clusterDetails.cluster_id!; + } + + get name(): string { + return this.clusterDetails.cluster_name!; + } + + get url(): Promise { + return (async () => + `https://${(await this.client.host).host}/#setting/clusters/${ + this.id + }/configuration`)(); + } + + get driverLogsUrl(): Promise { + return (async () => + `https://${(await this.client.host).host}/#setting/clusters/${ + this.id + }/driverLogs`)(); + } + + get metricsUrl(): Promise { + return (async () => + `https://${(await this.client.host).host}/#setting/clusters/${ + this.id + }/metrics`)(); + } + + async getSparkUiUrl(sparkContextId?: string): Promise { + const host = (await this.client.host).host; + + if (sparkContextId) { + return `https://${host}/#setting/sparkui/${this.id}/driver-${sparkContextId}`; + } else { + return `https://${host}/#setting/clusters/${this.id}/sparkUi`; + } + } + + get memoryMb(): number | undefined { + return this.clusterDetails.cluster_memory_mb; + } + + get cores(): number | undefined { + return this.clusterDetails.cluster_cores; + } + + get sparkVersion(): string { + return this.clusterDetails.spark_version!; + } + + get dbrVersion(): Array { + const sparkVersion = this.clusterDetails.spark_version!; + const match = sparkVersion.match(/^(custom:.*?__)?(.*?)-/); + if (!match) { + return ["x", "x", "x"]; + } + const parts = match[2].split("."); + return [ + parseInt(parts[0], 10) || "x", + parseInt(parts[1], 10) || "x", + parseInt(parts[2], 10) || "x", + ]; + } + + get creator(): string { + return this.clusterDetails.creator_user_name || ""; + } + + get state(): compute.State { + return this.clusterDetails.state!; + } + + get stateMessage(): string { + return this.clusterDetails.state_message || ""; + } + + get source(): compute.ClusterSource { + return this.clusterDetails.cluster_source!; + } + + get details() { + return this.clusterDetails; + } + set details(details: compute.ClusterDetails) { + this.clusterDetails = details; + } + + get accessMode(): + | compute.DataSecurityMode + | "SHARED" + | "LEGACY_SINGLE_USER_PASSTHROUGH" + | "LEGACY_SINGLE_USER_STANDARD" { + //TODO: deprecate data_security_mode once access_mode is available everywhere + return ( + (this.details as any).access_mode ?? this.details.data_security_mode + ); + } + + isUc() { + return ["SINGLE_USER", "SHARED", "USER_ISOLATION"].includes( + this.accessMode + ); + } + + isSingleUser() { + const modeProperty = this.accessMode; + + return ( + modeProperty !== undefined && + [ + "SINGLE_USER", + "LEGACY_SINGLE_USER_PASSTHROUGH", + "LEGACY_SINGLE_USER_STANDARD", + //enums unique to data_security_mode + "LEGACY_SINGLE_USER", + ].includes(modeProperty) + ); + } + + isValidSingleUser(userName?: string) { + return ( + this.isSingleUser() && this.details.single_user_name === userName + ); + } + + get hasExecutePermsCached() { + return this._hasExecutePerms; + } + + async hasExecutePerms(userDetails?: iam.User) { + if (userDetails === undefined) { + return (this._hasExecutePerms = false); + } + + if (this.isSingleUser()) { + return (this._hasExecutePerms = this.isValidSingleUser( + userDetails.userName + )); + } + + const permissionApi = new iam.PermissionsService(this.client); + const perms = await permissionApi.get({ + request_object_id: this.id, + request_object_type: "clusters", + }); + + return (this._hasExecutePerms = + (perms.access_control_list ?? []).find((ac) => { + return ( + ac.user_name === userDetails.userName || + userDetails.groups + ?.map((v) => v.display) + .includes(ac.group_name ?? "") + ); + }) !== undefined); + } + + async refresh() { + this.details = await this.clusterApi.get({ + cluster_id: this.clusterDetails.cluster_id!, + }); + } + + async start( + token?: CancellationToken, + onProgress: (state: compute.State) => void = () => {} + ) { + await this.refresh(); + onProgress(this.state); + + if (this.state === "RUNNING") { + return; + } + + if ( + this.state === "TERMINATED" || + this.state === "ERROR" || + this.state === "UNKNOWN" + ) { + await this.clusterApi.start({ + cluster_id: this.id, + }); + } + + // wait for cluster to be stopped before re-starting + if (this.state === "TERMINATING") { + await retry({ + timeout: new Time(1, TimeUnits.minutes), + retryPolicy: new retries.LinearRetryPolicy( + new Time(1, TimeUnits.seconds) + ), + fn: async () => { + if (token?.isCancellationRequested) { + return; + } + await this.refresh(); + onProgress(this.state); + + if (this.state === "TERMINATING") { + throw new retries.RetriableError(); + } + }, + }); + await this.clusterApi.start({ + cluster_id: this.id, + }); + } + + this._canExecute = undefined; + await retry({ + fn: async () => { + if (token?.isCancellationRequested) { + return; + } + + await this.refresh(); + onProgress(this.state); + + switch (this.state) { + case "RUNNING": + return; + case "TERMINATED": + throw new ClusterError( + `Cluster[${ + this.name + }]: CurrentState - Terminated; Reason - ${JSON.stringify( + this.clusterDetails.termination_reason + )}` + ); + case "ERROR": + throw new ClusterError( + `Cluster[${this.name}]: Error in starting the cluster (${this.clusterDetails.state_message})` + ); + default: + throw new ClusterRetriableError( + `Cluster[${this.name}]: CurrentState - ${this.state}; Reason - ${this.clusterDetails.state_message}` + ); + } + }, + }); + } + + async stop( + token?: CancellationToken, + onProgress?: (newPollResponse: compute.ClusterDetails) => Promise + ) { + this.details = await ( + await this.clusterApi.delete( + { + cluster_id: this.id, + }, + new Context({cancellationToken: token}) + ) + ).wait({ + onProgress: async (clusterInfo) => { + this.details = clusterInfo; + if (onProgress) { + await onProgress(clusterInfo); + } + }, + }); + } + + async createExecutionContext( + language: compute.Language = "python" + ): Promise { + return await ExecutionContext.create(this.client, this, language); + } + + get canExecuteCached() { + return this._canExecute; + } + + @withLogContext(ExposedLoggers.SDK) + async canExecute(@context ctx?: Context): Promise { + let executionContext: ExecutionContext | undefined; + try { + executionContext = await this.createExecutionContext(); + const result = await executionContext.execute("1==1"); + this._canExecute = + result.result?.results?.resultType === "error" ? false : true; + } catch (e) { + ctx?.logger?.error(`Can't execute code on cluster ${this.id}`, e); + this._canExecute = false; + } finally { + if (executionContext) { + await executionContext.destroy(); + } + } + return this._canExecute ?? false; + } + + static async fromClusterName( + client: ApiClient, + clusterName: string + ): Promise { + const clusterApi = new compute.ClustersService(client); + + for await (const clusterInfo of clusterApi.list({can_use_client: ""})) { + if (clusterInfo.cluster_name === clusterName) { + const cluster = await clusterApi.get({ + cluster_id: clusterInfo.cluster_id!, + }); + return new Cluster(client, cluster); + } + } + + return; + } + + static async fromClusterId( + client: ApiClient, + clusterId: string + ): Promise { + const clusterApi = new compute.ClustersService(client); + const response = await clusterApi.get({cluster_id: clusterId}); + return new Cluster(client, response); + } + + static async *list(client: ApiClient): AsyncIterable { + const clusterApi = new compute.ClustersService(client); + + for await (const clusterInfo of clusterApi.list({can_use_client: ""})) { + yield new Cluster(client, clusterInfo); + } + } + + async submitRun(submitRunRequest: jobs.SubmitRun): Promise { + const jobsService = new jobs.JobsService(this.client); + const res = await jobsService.submit(submitRunRequest); + return await WorkflowRun.fromId(this.client, res.run_id!); + } + + /** + * Run a notebook as a workflow on a cluster and export result as HTML + */ + async runNotebookAndWait({ + path, + parameters = {}, + onProgress, + token, + }: { + path: string; + parameters?: Record; + onProgress?: (state: jobs.RunLifeCycleState, run: WorkflowRun) => void; + token?: CancellationToken; + }) { + const run = await this.submitRun({ + tasks: [ + { + task_key: "js_sdk_job_run", + existing_cluster_id: this.id, + notebook_task: { + notebook_path: path, + base_parameters: parameters, + }, + depends_on: [], + libraries: [], + }, + ], + }); + + await this.waitForWorkflowCompletion(run, onProgress, token); + return await run.export(); + } + + /** + * Run a python file as a workflow on a cluster + */ + async runPythonAndWait({ + path, + args = [], + onProgress, + token, + }: { + path: string; + args?: string[]; + onProgress?: (state: jobs.RunLifeCycleState, run: WorkflowRun) => void; + token?: CancellationToken; + }): Promise { + const run = await this.submitRun({ + tasks: [ + { + task_key: "js_sdk_job_run", + existing_cluster_id: this.id, + spark_python_task: { + python_file: path, + parameters: args, + }, + }, + ], + }); + + await this.waitForWorkflowCompletion(run, onProgress, token); + return await run.getOutput(); + } + + private async waitForWorkflowCompletion( + run: WorkflowRun, + onProgress?: (state: jobs.RunLifeCycleState, run: WorkflowRun) => void, + token?: CancellationToken + ): Promise { + while (true) { + if (run.lifeCycleState === "INTERNAL_ERROR") { + return; + } + if (run.lifeCycleState === "TERMINATED") { + return; + } + + await new Promise((resolve) => setTimeout(resolve, 3000)); + + if (token && token.isCancellationRequested) { + await run.cancel(); + return; + } + + await run.update(); + onProgress && onProgress(run.lifeCycleState!, run); + } + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/Command.ts b/packages/databricks-vscode/src/sdk-extensions/Command.ts new file mode 100644 index 000000000..977984a69 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Command.ts @@ -0,0 +1,174 @@ +import {EventEmitter} from "events"; +import { + CancellationToken, + Time, + retries, + retry, + compute, +} from "@databricks/databricks-sdk"; +import {ExecutionContext} from "./ExecutionContext"; + +interface CommandErrorParams { + commandId: string; + clusterId: string; + contextId: string; + message?: string; +} + +function getCommandErrorMessage(errorParams: CommandErrorParams): string { + return `Command [${errorParams.commandId}] Context [${errorParams.contextId}] Cluster [${errorParams.clusterId}]: ${errorParams.message}`; +} +class CommandRetriableError extends retries.RetriableError { + constructor(errorParams: CommandErrorParams) { + super(getCommandErrorMessage(errorParams)); + } +} +class CommandError extends Error { + constructor(errorParams: CommandErrorParams) { + super(getCommandErrorMessage(errorParams)); + } +} + +export interface CommandWithResult { + cmd: Command; + result: compute.CommandStatusResponse; +} + +export type StatusUpdateListener = ( + result: compute.CommandStatusResponse +) => void; + +export class Command extends EventEmitter { + readonly context: ExecutionContext; + readonly commandsApi: compute.CommandExecutionService; + result?: compute.CommandStatusResponse; + id?: string; + + private static statusUpdateEvent = "statusUpdate"; + + private constructor(context: ExecutionContext) { + super(); + this.context = context; + this.commandsApi = new compute.CommandExecutionService(context.client); + } + + private get commandErrorParams(): CommandErrorParams { + return { + commandId: this.id!, + clusterId: this.context.cluster.id, + contextId: this.context.id!, + }; + } + + async refresh() { + this.result = await this.commandsApi.commandStatus({ + clusterId: this.context.cluster.id, + contextId: this.context.id!, + commandId: this.id!, + }); + } + + async cancel() { + await this.commandsApi.cancel({ + commandId: this.id!, + contextId: this.context.id!, + clusterId: this.context.cluster.id!, + }); + + await retry({ + fn: async () => { + await this.refresh(); + // The API surfaces an exception when a command is cancelled + // The cancellation itself proceeds as expected, but the status + // is FINISHED instead of CANCELLED. + if ( + this.result!.results?.resultType === "error" && + !this.result!.results!.cause!.includes( + "CommandCancelledException" + ) + ) { + throw new CommandError({ + ...this.commandErrorParams, + message: this.result!.results.cause, + }); + } + + if (["Cancelled", "Finished"].includes(this.result!.status!)) { + return; + } + + if (this.result!.status === "Error") { + throw new CommandError({ + ...this.commandErrorParams, + message: "Error while cancelling the command", + }); + } + + throw new CommandRetriableError({ + ...this.commandErrorParams, + message: `Current state of command is ${ + this.result!.status + }`, + }); + }, + }); + } + + async response( + cancellationToken?: CancellationToken, + timeout: Time = retries.DEFAULT_MAX_TIMEOUT + ): Promise { + await retry({ + timeout: timeout, + fn: async () => { + await this.refresh(); + + this.emit(Command.statusUpdateEvent, this.result!); + + if ( + !["Cancelled", "Error", "Finished"].includes( + this.result!.status! + ) + ) { + if (cancellationToken?.isCancellationRequested) { + await this.cancel(); + return; + } + throw new CommandRetriableError({ + ...this.commandErrorParams, + message: `Current state of command is ${ + this.result!.status + }`, + }); + } + }, + }); + + return this.result!; + } + + static async execute( + context: ExecutionContext, + command: string, + onStatusUpdate: StatusUpdateListener = () => {}, + cancellationToken?: CancellationToken, + timeout: Time = retries.DEFAULT_MAX_TIMEOUT + ): Promise { + const cmd = new Command(context); + + cmd.on(Command.statusUpdateEvent, onStatusUpdate); + + const executeApiResponse = await cmd.commandsApi.execute({ + clusterId: cmd.context.cluster.id, + contextId: cmd.context.id!, + language: cmd.context.language, + command, + }); + + cmd.id = executeApiResponse.id; + + const executionResult = await cmd.response(cancellationToken, timeout); + + return {cmd: cmd, result: executionResult}; + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.integ.ts b/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.integ.ts new file mode 100644 index 000000000..165438c63 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.integ.ts @@ -0,0 +1,71 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import {retries} from "@databricks/databricks-sdk"; +import {ExecutionContext} from "./ExecutionContext"; +import assert from "assert"; + +import {IntegrationTestSetup} from "./test/IntegrationTestSetup"; +import {TokenFixture} from "./test/TokenFixtures"; +import {mock, when, instance} from "ts-mockito"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + + this.timeout(10 * 60 * 1000); + + before(async () => { + integSetup = await IntegrationTestSetup.getInstance(); + }); + + it("should run python with high level API", async () => { + const context = await ExecutionContext.create( + integSetup.client.apiClient, + integSetup.cluster + ); + + let statusUpdateCalled = false; + let {cmd, result} = await context.execute( + "print('juhu')", + () => (statusUpdateCalled = true) + ); + assert(cmd); + assert(statusUpdateCalled); + assert(result.results); + assert(result.results.resultType === "text"); + assert.equal(result.results.data, "juhu"); + + statusUpdateCalled = false; + ({cmd, result} = await context.execute("print('kinners')")); + assert(cmd); + assert(!statusUpdateCalled); + assert(result.results); + assert(result.results.resultType === "text"); + assert.equal(result.results.data, "kinners"); + + await context.destroy(); + }); + + it("should cancel running command", async () => { + const context = await ExecutionContext.create( + integSetup.client.apiClient, + integSetup.cluster + ); + + const token = mock(TokenFixture); + when(token.isCancellationRequested).thenReturn(false, false, true); + + const {cmd, result} = await context.execute( + "while True: pass", + undefined, + instance(token), + retries.DEFAULT_MAX_TIMEOUT + ); + // The API surfaces an exception when a command is cancelled + // The cancellation itself proceeds as expected, but the status + // is FINISHED instead of CANCELLED + assert(cmd); + assert.equal(result.status, "Finished"); + assert(result.results?.resultType === "error"); + assert(result.results!.cause!.includes("CommandCancelledException")); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.ts b/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.ts new file mode 100644 index 000000000..040031d73 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/ExecutionContext.ts @@ -0,0 +1,65 @@ +import { + ApiClient, + CancellationToken, + Time, + compute, + retries, +} from "@databricks/databricks-sdk"; +import {Cluster} from "./Cluster"; +import {Command, CommandWithResult, StatusUpdateListener} from "./Command"; + +export class ExecutionContext { + readonly executionContextApi: compute.CommandExecutionService; + id?: string; + + private constructor( + readonly client: ApiClient, + readonly cluster: Cluster, + readonly language: compute.Language + ) { + this.executionContextApi = new compute.CommandExecutionService(client); + } + + static async create( + client: ApiClient, + cluster: Cluster, + language: compute.Language = "python" + ): Promise { + const context = new ExecutionContext(client, cluster, language); + const response = await ( + await context.executionContextApi.create({ + clusterId: context.cluster.id, + language: context.language, + }) + ).wait(); + + context.id = response.id; + return context; + } + + async execute( + command: string, + onStatusUpdate: StatusUpdateListener = () => {}, + token?: CancellationToken, + timeout: Time = retries.DEFAULT_MAX_TIMEOUT + ): Promise { + return await Command.execute( + this, + command, + onStatusUpdate, + token, + timeout + ); + } + + async destroy() { + if (!this.id) { + return; + } + + await this.executionContextApi.destroy({ + clusterId: this.cluster.id, + contextId: this.id, + }); + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/Repos.integ.ts b/packages/databricks-vscode/src/sdk-extensions/Repos.integ.ts new file mode 100644 index 000000000..92f2a5469 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Repos.integ.ts @@ -0,0 +1,116 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import {CancellationToken, workspace} from "@databricks/databricks-sdk"; +import {IntegrationTestSetup} from "./test/IntegrationTestSetup"; +import {Context} from "@databricks/databricks-sdk/dist/context"; +import * as assert from "node:assert"; +import {Repo} from "./Repos"; +import {randomUUID} from "node:crypto"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + const repoDir = "/Repos/js-sdk-tests"; + let testRepoDetails: workspace.RepoInfo; + + this.timeout(10 * 60 * 1000); + + async function createRandomRepo( + repoService?: workspace.ReposService + ): Promise { + repoService = + repoService ?? + new workspace.ReposService(integSetup.client.apiClient); + const id = randomUUID(); + const resp = await repoService.create({ + path: `${repoDir}/test-${id}`, + url: "https://github.com/fjakobs/empty-repo.git", + provider: "github", + }); + assert.equal(resp.path, `${repoDir}/test-${id}`); + + return resp; + } + + before(async () => { + integSetup = await IntegrationTestSetup.getInstance(); + const workspaceService = new workspace.WorkspaceService( + integSetup.client.apiClient + ); + await workspaceService.mkdirs({ + path: repoDir, + }); + + testRepoDetails = await createRandomRepo( + new workspace.ReposService(integSetup.client.apiClient) + ); + }); + + after(async () => { + const repos = new workspace.ReposService(integSetup.client.apiClient); + await repos.delete({repo_id: testRepoDetails.id!}); + }); + + it("should list repos by prefix", async () => { + const repos = []; + for await (const repo of Repo.list(integSetup.client.apiClient, { + path_prefix: repoDir, + })) { + repos.push(repo); + } + + assert.ok(repos.length > 0); + }); + + // skip test as it takes too long to run + it.skip("should list all repos", async () => { + const repos = []; + for await (const repo of Repo.list(integSetup.client.apiClient, {})) { + repos.push(repo); + } + + assert.ok(repos.length > 0); + }); + + it("should cancel listing repos", async () => { + let listener: any; + const token: CancellationToken = { + isCancellationRequested: false, + onCancellationRequested: (_listener) => { + listener = _listener; + }, + }; + + const response = Repo.list( + integSetup.client.apiClient, + { + path_prefix: repoDir, + }, + new Context({cancellationToken: token}) + ); + + setTimeout(() => { + token.isCancellationRequested = true; + listener && listener(); + }, 100); + + // reponse should finish soon after cancellation + const start = Date.now(); + try { + for await (const repo of response) { + assert.ok(repo); + } + } catch (err: any) { + assert.equal(err.name, "AbortError"); + } + + assert.ok(Date.now() - start < 300); + }); + + it("Should find the exact matching repo if multiple repos with same prefix in fromPath", async () => { + const actual = await Repo.fromPath( + integSetup.client.apiClient, + testRepoDetails.path! + ); + assert.equal(actual.path, testRepoDetails.path); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/Repos.ts b/packages/databricks-vscode/src/sdk-extensions/Repos.ts new file mode 100644 index 000000000..5fb629229 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/Repos.ts @@ -0,0 +1,91 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import {ApiClient, workspace, logging} from "@databricks/databricks-sdk"; +// eslint-disable-next-line @typescript-eslint/no-unused-vars +import {context, Context} from "@databricks/databricks-sdk/dist/context"; + +export interface RepoList { + repos: Repo[]; + next_page_token: any; +} + +export class RepoError extends Error {} + +export class Repo { + private readonly reposApi; + + constructor( + private readonly client: ApiClient, + private details: workspace.RepoInfo + ) { + this.reposApi = new workspace.ReposService(this.client); + } + + async refresh() { + this.details = await this.reposApi.get({repo_id: this.id}); + return this.details; + } + + get id(): number { + return this.details.id!; + } + + get path(): string { + return this.details.path!; + } + + get url(): Promise { + return (async () => + `${(await this.client.host).host}#folder/${this.id}`)(); + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + static async create( + client: ApiClient, + req: workspace.CreateRepo, + @context context?: Context + ) { + const repoService = new workspace.ReposService(client); + return new Repo(client, await repoService.create(req, context)); + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + static async *list( + client: ApiClient, + req: workspace.ListReposRequest, + @context context?: Context + ): AsyncIterable { + const reposApi = new workspace.ReposService(client); + + for await (const repo of reposApi.list(req, context)) { + yield new Repo(client, repo); + } + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + static async fromPath( + client: ApiClient, + path: string, + @context context?: Context + ) { + const repos: Array = []; + let exactRepo: Repo | undefined; + for await (const repo of this.list( + client, + { + path_prefix: path, + }, + context + )) { + if (repo.path === path) { + exactRepo = repo; + } + repos.push(repo); + } + + if (repos.length !== 1 && !exactRepo) { + throw new RepoError(`${repos.length} repos match prefix ${path}`); + } + + return exactRepo ?? repos[0]; + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.integ.ts b/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.integ.ts new file mode 100644 index 000000000..b9b397a47 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.integ.ts @@ -0,0 +1,162 @@ +import assert from "assert"; +import {jobs} from "@databricks/databricks-sdk"; +import {IntegrationTestSetup} from "./test/IntegrationTestSetup"; +import {Cluster} from "./Cluster"; +import {WorkflowRun} from "./WorkflowRun"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + + this.timeout(10 * 60 * 1000); + + before(async () => { + integSetup = await IntegrationTestSetup.getInstance(); + }); + + it("should run a python job", async () => { + const cluster = await Cluster.fromClusterId( + integSetup.client.apiClient, + integSetup.cluster.id + ); + + const dbfsApi = integSetup.client.dbfs; + const jobPath = `/tmp/sdk-js-integ-${integSetup.testRunId}.py`; + + await dbfsApi.put({ + path: jobPath, + contents: Buffer.from( + "# Databricks notebook source\nprint('hello from job')" + ).toString("base64"), + overwrite: true, + }); + + try { + const progress: Array = []; + const output = await cluster.runPythonAndWait({ + path: `dbfs:${jobPath}`, + onProgress: ( + _state: jobs.RunLifeCycleState, + run: WorkflowRun + ) => { + progress.push(run); + }, + }); + + assert(progress.length > 1); + assert.equal( + progress[progress.length - 1].lifeCycleState, + "TERMINATED" + ); + assert.equal(output.logs?.trim(), "hello from job"); + } finally { + await dbfsApi.delete({path: jobPath}); + } + }); + + it("should run a notebook job", async () => { + const cluster = await Cluster.fromClusterId( + integSetup.client.apiClient, + integSetup.cluster.id + ); + + const jobPath = `/tmp/js-sdk-jobs-tests/sdk-js-integ-${integSetup.testRunId}.py`; + await integSetup.client.workspace.mkdirs({ + path: "/tmp/js-sdk-jobs-tests", + }); + + await integSetup.client.workspace.import({ + path: jobPath, + format: "SOURCE", + language: "PYTHON", + content: Buffer.from( + "# Databricks notebook source\nprint('hello from job')" + ).toString("base64"), + overwrite: true, + }); + + try { + const progress: Array = []; + const output = await cluster.runNotebookAndWait({ + path: `${jobPath}`, + onProgress: ( + _state: jobs.RunLifeCycleState, + run: WorkflowRun + ) => { + progress.push(run); + }, + }); + + assert(progress.length > 1); + assert.equal( + progress[progress.length - 1].lifeCycleState, + "TERMINATED" + ); + + assert( + output.views && + output.views.length > 0 && + output.views[0].content + ); + assert(output.views[0].content.startsWith("")); + } finally { + await integSetup.client.workspace.delete({path: jobPath}); + } + }); + + it("should run a broken notebook job", async () => { + const cluster = await Cluster.fromClusterId( + integSetup.client.apiClient, + integSetup.cluster.id + ); + + const jobPath = `/tmp/js-sdk-jobs-tests/sdk-js-integ-${integSetup.testRunId}.py`; + await integSetup.client.workspace.mkdirs({ + path: "/tmp/js-sdk-jobs-tests", + }); + + await integSetup.client.workspace.import({ + path: jobPath, + format: "SOURCE", + language: "PYTHON", + content: Buffer.from( + `# Databricks notebook source +# COMMAND ---------- + +pr int("Cell 1") + +# COMMAND ---------- + +print("Cell 2")` + ).toString("base64"), + overwrite: true, + }); + + try { + const progress: Array = []; + const output = await cluster.runNotebookAndWait({ + path: `${jobPath}`, + onProgress: ( + _state: jobs.RunLifeCycleState, + run: WorkflowRun + ) => { + progress.push(run); + }, + }); + + assert(progress.length > 1); + assert.equal( + progress[progress.length - 1].lifeCycleState, + "INTERNAL_ERROR" + ); + + assert( + output.views && + output.views.length > 0 && + output.views[0].content + ); + assert(output.views[0].content.startsWith("")); + } finally { + await integSetup.client.workspace.delete({path: jobPath}); + } + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.ts b/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.ts new file mode 100644 index 000000000..3a6ebfde2 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/WorkflowRun.ts @@ -0,0 +1,74 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import {ApiClient, jobs} from "@databricks/databricks-sdk"; + +export class WorkflowRun { + constructor( + readonly client: ApiClient, + private details: jobs.Run + ) {} + + static async fromId( + client: ApiClient, + runId: number + ): Promise { + const jobsService = new jobs.JobsService(client); + return new WorkflowRun( + client, + await jobsService.getRun({run_id: runId}) + ); + } + + get lifeCycleState(): jobs.RunLifeCycleState { + return this.details.state?.life_cycle_state || "INTERNAL_ERROR"; + } + + get state(): jobs.RunState | undefined { + return this.details.state; + } + + get tasks(): Array | undefined { + return this.details.tasks; + } + + get runPageUrl(): string { + return this.details.run_page_url || ""; + } + + async cancel(): Promise { + const jobsService = new jobs.JobsService(this.client); + await jobsService.cancelRun({run_id: this.details.run_id!}); + } + + async update(): Promise { + const jobsService = new jobs.JobsService(this.client); + this.details = await jobsService.getRun({run_id: this.details.run_id!}); + } + + async getOutput(task?: jobs.RunTask): Promise { + task = task || this.tasks![0]; + if (!task) { + throw new Error("Run has no tasks"); + } + + const jobsService = new jobs.JobsService(this.client); + return jobsService.getRunOutput({run_id: task.run_id!}); + } + + async export(task?: jobs.RunTask): Promise { + task = task || this.tasks![0]; + if ( + this.lifeCycleState !== "TERMINATED" && + this.lifeCycleState !== "INTERNAL_ERROR" + ) { + throw new Error("Run is not terminated"); + } + if (!this.tasks || !this.tasks.length) { + throw new Error("Run has no tasks"); + } + + const jobsService = new jobs.JobsService(this.client); + return await jobsService.exportRun({ + run_id: task.run_id!, + }); + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.integ.ts b/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.integ.ts new file mode 100644 index 000000000..37ca4c604 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.integ.ts @@ -0,0 +1,39 @@ +import {ApiError} from "@databricks/databricks-sdk"; +import {WorkspaceConf} from "./WorkspaceConf"; +import assert from "assert"; + +import {IntegrationTestSetup} from "./test/IntegrationTestSetup"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + + this.timeout(10 * 60 * 1000); + + before(async function () { + integSetup = await IntegrationTestSetup.getInstance(); + try { + const wsConf = new WorkspaceConf(integSetup.client.apiClient); + await wsConf.getStatus(["enableProjectTypeInWorkspace"]); + } catch (e: unknown) { + if (e instanceof ApiError && e.statusCode === 403) { + // eslint-disable-next-line no-console + console.log( + "Workspace conf tests require administrator permissions" + ); + this.skip(); + } + } + }); + + it("should read configuration properties", async () => { + const wsConf = new WorkspaceConf(integSetup.client.apiClient); + + const state = await wsConf.getStatus([ + "enableProjectTypeInWorkspace", + "enableWorkspaceFilesystem", + ]); + + assert("enableProjectTypeInWorkspace" in state); + assert("enableWorkspaceFilesystem" in state); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.ts b/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.ts new file mode 100644 index 000000000..8f6061b25 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/WorkspaceConf.ts @@ -0,0 +1,65 @@ +import {ApiClient, settings, logging} from "@databricks/databricks-sdk"; +import {context, Context} from "@databricks/databricks-sdk/dist/context"; + +type StringBool = "true" | "false" | ""; + +/** + * Partial list of workspace conf properties. + */ +export interface WorkspaceConfProps { + /** + * Enable or disable Repos. You should see a new Repos icon in your workspace's left navigation when this feature is enabled. + */ + enableProjectTypeInWorkspace: StringBool; + + /** + * Enable or disable the Files in Repos feature. + * + * When Files in Repos is set to 'DBR 8.4+', arbitrary files will be + * included in Repo operations and can be accessed from clusters + * running DBR 8.4 and above. + * + * When Files in Repos is set to 'DBR 11.0+', arbitrary files will be + * included in Repo operations and can be accessed from clusters + * running DBR 11.0 and above. + * + * When Files in Repos is disabled, arbitrary files will not be included + * in Repo operations and cannot be accessed from clusters. + */ + enableWorkspaceFilesystem: "dbr8.4+" | "dbr11.0+" | "false" | "true"; +} + +/** + * Types interface to the workspace conf service. + * + * This class provides strong typing for a subset of the workspace conf + * properties. + * + * In order to set arbitrary properties use the API wrapper directly. + */ +export class WorkspaceConf { + constructor(private readonly client: ApiClient) {} + + @logging.withLogContext(logging.ExposedLoggers.SDK) + async getStatus( + keys: Array, + @context ctx?: Context + ): Promise> { + const wsConfApi = new settings.WorkspaceConfService(this.client); + return await wsConfApi.getStatus( + { + keys: keys.join(","), + }, + ctx + ); + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + async setStatus( + request: Partial, + @context ctx?: Context + ): Promise> { + const wsConfApi = new settings.WorkspaceConfService(this.client); + return await wsConfApi.setStatus(request, ctx); + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/index.ts b/packages/databricks-vscode/src/sdk-extensions/index.ts new file mode 100644 index 000000000..52bd746b1 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/index.ts @@ -0,0 +1,8 @@ +export * from "./wsfs"; +export * as WorkspaceFsUtils from "./wsfs/utils"; +export * from "./Cluster"; +export * from "./Command"; +export * from "./ExecutionContext"; +export * from "./WorkflowRun"; +export * from "./WorkspaceConf"; +export * from "./Repos"; diff --git a/packages/databricks-vscode/src/sdk-extensions/test/ClusterFixtures.ts b/packages/databricks-vscode/src/sdk-extensions/test/ClusterFixtures.ts new file mode 100644 index 000000000..711e49811 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/test/ClusterFixtures.ts @@ -0,0 +1,34 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import {Cluster} from "../Cluster"; +import {mock, when, resetCalls, instance, anything} from "ts-mockito"; +import {compute, ApiClient} from "@databricks/databricks-sdk"; + +const testClusterDetails: compute.ClusterDetails = { + cluster_id: "testClusterId", + cluster_name: "testClusterName", +}; + +export async function getMockTestCluster() { + const mockedClient = mock(ApiClient); + when( + mockedClient.request( + "/api/2.0/clusters/get", + "GET", + anything(), + anything() + ) + ).thenResolve({ + ...testClusterDetails, + state: "RUNNING", + }); + + const mockedCluster = await Cluster.fromClusterId( + instance(mockedClient), + testClusterDetails.cluster_id! + ); + + resetCalls(mockedClient); + + return {mockedCluster, mockedClient, testClusterDetails}; +} diff --git a/packages/databricks-vscode/src/sdk-extensions/test/IntegrationTestSetup.ts b/packages/databricks-vscode/src/sdk-extensions/test/IntegrationTestSetup.ts new file mode 100644 index 000000000..36e54d3eb --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/test/IntegrationTestSetup.ts @@ -0,0 +1,53 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +import * as crypto from "crypto"; +import {Cluster} from "../Cluster"; +import {WorkspaceClient} from "@databricks/databricks-sdk"; + +export class IntegrationTestSetup { + readonly testRunId: string; + + constructor( + readonly client: WorkspaceClient, + readonly cluster: Cluster + ) { + this.testRunId = crypto.randomUUID(); + } + + private static _instance: IntegrationTestSetup; + static async getInstance(): Promise { + if (!this._instance) { + const client = new WorkspaceClient( + {}, + { + product: "integration-tests", + productVersion: "0.0.1", + } + ); + + if (!process.env["TEST_DEFAULT_CLUSTER_ID"]) { + throw new Error( + "Environment variable 'TEST_DEFAULT_CLUSTER_ID' must be set" + ); + } + + const clusterId = + process.env["TEST_DEFAULT_CLUSTER_ID"]!.split("'").join(""); + + const cluster = await Cluster.fromClusterId( + client.apiClient, + clusterId + ); + await cluster.start(); + + this._instance = new IntegrationTestSetup(client, cluster); + } + return this._instance; + } +} + +export function sleep(timeout: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, timeout); + }); +} diff --git a/packages/databricks-vscode/src/sdk-extensions/test/TokenFixtures.ts b/packages/databricks-vscode/src/sdk-extensions/test/TokenFixtures.ts new file mode 100644 index 000000000..f208892b1 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/test/TokenFixtures.ts @@ -0,0 +1,13 @@ +import {CancellationToken} from "@databricks/databricks-sdk"; + +export class TokenFixture implements CancellationToken { + private listeners: Array<(e?: any) => any> = []; + + get isCancellationRequested() { + return false; + } + + onCancellationRequested(f: (e?: any) => any) { + this.listeners.push(f); + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFs.integ.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFs.integ.ts new file mode 100644 index 000000000..d3654fb37 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFs.integ.ts @@ -0,0 +1,103 @@ +import assert from "node:assert"; +import {randomUUID} from "node:crypto"; +import {posix} from "path"; +import path from "node:path/posix"; +import {IntegrationTestSetup} from "../test/IntegrationTestSetup"; +import {isDirectory} from "./utils"; +import {WorkspaceFsDir} from "./WorkspaceFsDir"; +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; + +describe(__filename, function () { + let integSetup: IntegrationTestSetup; + let testDirPath: string; + let rootDir: WorkspaceFsDir; + + this.timeout(10 * 60 * 1000); + + before(async () => { + integSetup = await IntegrationTestSetup.getInstance(); + const me = (await integSetup.client.currentUser.me()).userName; + assert.ok(me !== undefined, "No currentUser.userName"); + + testDirPath = `/Users/${me}/vscode-integ-tests/${randomUUID()}`; + await integSetup.client.workspace.mkdirs({ + path: testDirPath, + }); + }); + + after(async () => { + try { + await integSetup.client.workspace.delete({ + path: testDirPath, + recursive: true, + }); + } catch (e: unknown) { + // eslint-disable-next-line no-console + console.error(`Can't cleanup ${testDirPath}`, e); + } + }); + + beforeEach(async () => { + const dir = await WorkspaceFsEntity.fromPath( + integSetup.client, + testDirPath + ); + assert.ok(dir !== undefined); + assert.ok(isDirectory(dir)); + rootDir = dir; + }); + + it("should should create a directory", async () => { + const dirPath = `test-${randomUUID()}`; + const createdDir = await rootDir.mkdir(dirPath); + + assert.ok(createdDir !== undefined); + assert.ok(createdDir.type === "DIRECTORY"); + assert.ok(createdDir.path === posix.join(testDirPath, dirPath)); + assert.ok((await createdDir.parent)?.path === testDirPath); + }); + + it("should list a directory", async () => { + const newDirs = []; + for (let i = 0; i < 5; i++) { + const dirName = `test-${randomUUID()}`; + newDirs.push(dirName); + await rootDir.mkdir(dirName); + } + + const actual = await rootDir.children; + + newDirs.forEach((dirName) => { + assert.ok( + actual.find( + (e) => e.path === posix.join(testDirPath, dirName) + ) !== undefined + ); + }); + }); + + it("should not allow creation of directory in invalid paths", async () => { + const dirName = `test-${randomUUID()}`; + const dir = await rootDir.mkdir(dirName); + assert.ok(dir !== undefined); + + await assert.rejects(async () => await dir.mkdir("/a")); + await assert.rejects(async () => await dir.mkdir("../../a")); + await assert.doesNotReject( + async () => await dir.mkdir(`../${dirName}/a`) + ); + }); + + it("should create a file", async () => { + const file = await rootDir.createFile("test.txt", "some content"); + assert.ok(file?.details.path === path.join(rootDir.path, "test.txt")); + + const content = await integSetup.client.workspace.export({ + path: file.details.path, + }); + + assert.ok(content.content !== undefined); + const buff = Buffer.from(content.content, "base64"); + assert.equal(buff.toString("utf-8"), "some content"); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.test.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.test.ts new file mode 100644 index 000000000..d1ccb8bb0 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.test.ts @@ -0,0 +1,60 @@ +import assert from "assert"; +import {posix} from "path"; +import {anything, deepEqual, instance, mock, when} from "ts-mockito"; +import {WorkspaceClient, workspace} from "@databricks/databricks-sdk"; +import {isDirectory} from "./utils"; +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; + +describe(__filename, () => { + let mockWorkspaceClient: WorkspaceClient; + let mockWorkspaceService: workspace.WorkspaceService; + + before(() => { + mockWorkspaceClient = mock(WorkspaceClient); + mockWorkspaceService = mock(workspace.WorkspaceService); + when(mockWorkspaceClient.workspace).thenReturn( + instance(mockWorkspaceService) + ); + }); + + function mockDirectory(path: string) { + when( + mockWorkspaceService.getStatus(deepEqual({path}), anything()) + ).thenResolve({ + // eslint-disable-next-line @typescript-eslint/naming-convention + object_type: "DIRECTORY", + // eslint-disable-next-line @typescript-eslint/naming-convention + object_id: 123, + path: path, + }); + } + + it("should return correct absolute child path", async () => { + const path = "/root/a/b"; + mockDirectory(path); + + const root = await WorkspaceFsEntity.fromPath( + instance(mockWorkspaceClient), + path + ); + assert.ok(isDirectory(root)); + + assert.equal(root.getAbsoluteChildPath(path), path); + assert.equal( + root.getAbsoluteChildPath(posix.resolve(path, "..", "..")), + undefined + ); + assert.equal( + root.getAbsoluteChildPath(posix.resolve(path, "..")), + undefined + ); + assert.ok( + root.getAbsoluteChildPath(posix.resolve(path, "c", "..", "..")) === + undefined + ); + assert.ok( + root.getAbsoluteChildPath(posix.resolve(path, "c", "d")) === + posix.resolve(path, "c", "d") + ); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.ts new file mode 100644 index 000000000..e8258acbf --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsDir.ts @@ -0,0 +1,128 @@ +import {posix} from "path"; +import {Context, context} from "@databricks/databricks-sdk/dist/context"; +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; +import {logging} from "@databricks/databricks-sdk"; +import {isDirectory, isFile} from "./utils"; + +export class WorkspaceFsDir extends WorkspaceFsEntity { + override async generateUrl(host: URL): Promise { + return `${host.host}#folder/${this.details.object_id}`; + } + + public getAbsoluteChildPath(path: string) { + //Since this.path returns path value from details returned by the API, + //it is always absolute. So we can directly use it here. + const resolved = posix.resolve(this.path, path); + const relative = posix.relative(this.path, resolved); + + if ( + !posix.isAbsolute(relative) && + !relative.startsWith(".." + posix.sep) && + relative !== ".." + ) { + return resolved; + } + + return undefined; + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + async mkdir(path: string, @context ctx?: Context) { + const validPath = this.getAbsoluteChildPath(path); + if (!validPath) { + const err = new Error( + `Can't create ${path} as child of ${this.path}: Invalid path` + ); + ctx?.logger?.error( + `Can't create ${path} as child of ${this.path}`, + err + ); + throw err; + } + + try { + await this._workspaceFsService.mkdirs({path: validPath}); + } catch (e: unknown) { + let err: any = e; + if (e instanceof Error) { + if (e.message.includes("RESOURCE_ALREADY_EXISTS")) { + err = new Error( + `Can't create ${path} as child of ${this.path}: A file with same path exists` + ); + } + } + ctx?.logger?.error( + `Can't create ${path} as child of ${this.path}`, + err + ); + throw err; + } + + const entity = await WorkspaceFsEntity.fromPath( + this.wsClient, + validPath, + ctx + ); + if (isDirectory(entity)) { + return entity; + } + + return undefined; + } + + @logging.withLogContext(logging.ExposedLoggers.SDK) + async createFile( + path: string, + content: string, + overwrite = true, + @context ctx?: Context + ) { + const validPath = this.getAbsoluteChildPath(path); + if (!validPath) { + const err = new Error( + `Can't create ${path} as child of ${this.path}: Invalid path` + ); + ctx?.logger?.error( + `Can't create ${path} as child of ${this.path}`, + err + ); + throw err; + } + + try { + await this._workspaceFsService.import( + { + path: validPath, + overwrite, + format: "AUTO", + content: Buffer.from(content).toString("base64"), + }, + ctx + ); + } catch (e) { + ctx?.logger?.error("Error writing ${validPath} file", e); + throw e; + } + + let entity = await WorkspaceFsEntity.fromPath( + this.wsClient, + validPath, + ctx + ); + + if (entity === undefined) { + //try to read notebook + entity = await WorkspaceFsEntity.fromPath( + this.wsClient, + validPath.replace(/^(\/.*)\.(py|ipynb|scala|r|sql)/g, "$1"), + ctx + ); + } + + if (isFile(entity)) { + return entity; + } + } +} + +export class WorkspaceFsRepo extends WorkspaceFsDir {} diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsEntity.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsEntity.ts new file mode 100644 index 000000000..36ec0240f --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsEntity.ts @@ -0,0 +1,197 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import {posix} from "path"; +import { + workspace, + WorkspaceClient, + ApiError, + logging, +} from "@databricks/databricks-sdk"; +import {context, Context} from "@databricks/databricks-sdk/dist/context"; + +const {ExposedLoggers, withLogContext} = logging; + +export class ObjectInfoValidationError extends Error { + constructor( + message: string, + readonly details: workspace.ObjectInfo + ) { + super(message); + } +} + +class RequiredFields { + "object_id" = ""; + "object_type" = ""; + "path" = ""; +} + +type RequiredObjectInfo = workspace.ObjectInfo & RequiredFields; + +export abstract class WorkspaceFsEntity { + protected _workspaceFsService: workspace.WorkspaceService; + private _children?: Array; + private _details: RequiredObjectInfo; + + constructor( + protected readonly wsClient: WorkspaceClient, + details: workspace.ObjectInfo + ) { + this._workspaceFsService = wsClient.workspace; + this._details = this.validateDetails(details); + } + + @withLogContext(ExposedLoggers.SDK) + private validateDetails( + details: workspace.ObjectInfo, + @context ctx?: Context + ): RequiredObjectInfo { + Object.keys(new RequiredFields()).forEach((field) => { + if (details[field as keyof workspace.ObjectInfo] === undefined) { + const err = new ObjectInfoValidationError( + `These fields are required for fs objects (${Object.keys( + new RequiredFields() + ).join(", ")})`, + details + ); + ctx?.logger?.error("ObjectInfo validation error", err); + throw err; + } + }); + + return details as RequiredObjectInfo; + } + + set details(details: workspace.ObjectInfo) { + this._details = this.validateDetails(details); + } + + get details() { + return this._details; + } + + get path() { + return this._details.path; + } + + protected abstract generateUrl(host: URL): Promise; + + get url() { + return new Promise((resolve) => { + this.wsClient.apiClient.host.then((host) => + resolve(this.generateUrl(host)) + ); + }); + } + + get type() { + return this._details.object_type; + } + + get id() { + return this._details.object_id; + } + + protected async fetchChildren() { + const children: Array = []; + + for await (const child of this._workspaceFsService.list({ + path: this.path, + })) { + const entity = await entityFromObjInfo(this.wsClient, child); + if (entity) { + children.push(entity); + } + } + + this._children = children; + } + + @withLogContext(ExposedLoggers.SDK) + async refresh(@context ctx?: Context) { + this._children = undefined; + + try { + const details = await this._workspaceFsService.getStatus( + { + path: this.path, + }, + ctx + ); + + this.details = details; + return this; + } catch (e: unknown) { + if (e instanceof ApiError) { + if (e.errorCode === "RESOURCE_DOES_NOT_EXIST") { + return undefined; + } + } + } + } + + get children() { + return new Promise>((resolve) => { + if (this._children === undefined) { + this.fetchChildren().then(() => resolve(this._children ?? [])); + } else { + resolve(this._children); + } + }); + } + + @withLogContext(ExposedLoggers.SDK, "WorkspaceFsEntity.fromPath") + static async fromPath( + wsClient: WorkspaceClient, + path: string, + @context ctx?: Context + ) { + try { + const entity = entityFromObjInfo( + wsClient, + await wsClient.workspace.getStatus({path}, ctx) + ); + return entity; + } catch (e) { + if ( + e instanceof ApiError && + e.errorCode === "RESOURCE_DOES_NOT_EXIST" + ) { + return undefined; + } + + throw e; + } + } + + get parent(): Promise { + const parentPath = posix.dirname(this.path); + return WorkspaceFsEntity.fromPath(this.wsClient, parentPath); + } + + get basename(): string { + return posix.basename(this.path); + } +} + +async function entityFromObjInfo( + wsClient: WorkspaceClient, + details: workspace.ObjectInfo +) { + // lazy import to avoid circular dependency + const {WorkspaceFsDir, WorkspaceFsRepo} = await import("./WorkspaceFsDir"); + const {WorkspaceFsFile, WorkspaceFsNotebook} = await import( + "./WorkspaceFsFile" + ); + + switch (details.object_type) { + case "DIRECTORY": + return new WorkspaceFsDir(wsClient, details); + case "REPO": + return new WorkspaceFsRepo(wsClient, details); + case "FILE": + return new WorkspaceFsFile(wsClient, details); + case "NOTEBOOK": + return new WorkspaceFsNotebook(wsClient, details); + } + return undefined; +} diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsFile.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsFile.ts new file mode 100644 index 000000000..f60523d07 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/WorkspaceFsFile.ts @@ -0,0 +1,17 @@ +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; + +export class WorkspaceFsFile extends WorkspaceFsEntity { + override get children() { + return Promise.resolve([]); + } + + override async generateUrl(host: URL): Promise { + return `${host.host}#folder/${(await this.parent)?.id ?? ""}`; + } +} + +export class WorkspaceFsNotebook extends WorkspaceFsFile { + get language() { + return this.details.language; + } +} diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/index.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/index.ts new file mode 100644 index 000000000..aeafedf2c --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/index.ts @@ -0,0 +1,4 @@ +export * from "./WorkspaceFsEntity"; +export * from "./WorkspaceFsDir"; +export * from "./WorkspaceFsFile"; +export * as WorkspaceFsUtils from "./utils"; diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.test.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.test.ts new file mode 100644 index 000000000..cfdc44f02 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.test.ts @@ -0,0 +1,109 @@ +/* eslint-disable @typescript-eslint/naming-convention */ +import assert from "node:assert"; +import {anything, deepEqual, instance, mock, when} from "ts-mockito"; +import {WorkspaceClient, workspace} from "@databricks/databricks-sdk"; +import {isDirectory, isFile, isNotebook, isRepo} from "./utils"; +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; + +describe(__filename, () => { + let mockWorkspaceClient: WorkspaceClient; + + beforeEach(() => { + mockWorkspaceClient = mock(WorkspaceClient); + const mockWorkspaceService = mock(workspace.WorkspaceService); + when(mockWorkspaceClient.workspace).thenReturn( + instance(mockWorkspaceService) + ); + + when( + mockWorkspaceService.getStatus( + deepEqual({path: "/file"}), + anything() + ) + ).thenResolve({ + path: "/file", + object_id: 12345, + object_type: "FILE", + language: "PYTHON", + }); + + when( + mockWorkspaceService.getStatus( + deepEqual({path: "/notebook"}), + anything() + ) + ).thenResolve({ + path: "/notebook", + object_id: 12345, + object_type: "NOTEBOOK", + language: "PYTHON", + }); + + when( + mockWorkspaceService.getStatus( + deepEqual({path: "/dir"}), + anything() + ) + ).thenResolve({ + path: "/dir", + object_id: 12345, + object_type: "DIRECTORY", + }); + + when( + mockWorkspaceService.getStatus( + deepEqual({path: "/repo"}), + anything() + ) + ).thenResolve({ + path: "/repo", + object_id: 12345, + object_type: "REPO", + }); + }); + + it("should type discriminate files", async () => { + const file = await WorkspaceFsEntity.fromPath( + instance(mockWorkspaceClient), + "/file" + ); + assert.ok(isFile(file)); + assert.ok(!isNotebook(file)); + assert.ok(!isDirectory(file)); + assert.ok(!isRepo(file)); + }); + + it("should type discriminate notebook", async () => { + const file = await WorkspaceFsEntity.fromPath( + instance(mockWorkspaceClient), + "/notebook" + ); + + assert.ok(isFile(file)); + assert.ok(isNotebook(file)); + assert.ok(!isDirectory(file)); + assert.ok(!isRepo(file)); + }); + + it("should type discriminate dir", async () => { + const file = await WorkspaceFsEntity.fromPath( + instance(mockWorkspaceClient), + "/dir" + ); + assert.ok(!isFile(file)); + assert.ok(!isNotebook(file)); + assert.ok(isDirectory(file)); + assert.ok(!isRepo(file)); + }); + + it("should type discriminate repo", async () => { + const file = await WorkspaceFsEntity.fromPath( + instance(mockWorkspaceClient), + "/repo" + ); + assert.ok(!isFile(file)); + assert.ok(!isNotebook(file)); + assert.ok(isDirectory(file)); + assert.ok(isRepo(file)); + }); +}); diff --git a/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.ts b/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.ts new file mode 100644 index 000000000..4c8a9e527 --- /dev/null +++ b/packages/databricks-vscode/src/sdk-extensions/wsfs/utils.ts @@ -0,0 +1,35 @@ +import {WorkspaceFsEntity} from "./WorkspaceFsEntity"; +import {WorkspaceFsDir, WorkspaceFsRepo} from "./WorkspaceFsDir"; +import {WorkspaceFsFile, WorkspaceFsNotebook} from "./WorkspaceFsFile"; + +export function isDirectory( + entity?: WorkspaceFsEntity +): entity is WorkspaceFsDir { + if (entity?.type === "DIRECTORY" || entity?.type === "REPO") { + return true; + } + return false; +} + +export function isRepo(entity?: WorkspaceFsEntity): entity is WorkspaceFsRepo { + if (entity?.type === "REPO") { + return true; + } + return false; +} + +export function isFile(entity?: WorkspaceFsEntity): entity is WorkspaceFsFile { + if (entity?.type === "FILE" || entity?.type === "NOTEBOOK") { + return true; + } + return false; +} + +export function isNotebook( + entity?: WorkspaceFsEntity +): entity is WorkspaceFsNotebook { + if (entity?.type === "NOTEBOOK") { + return true; + } + return false; +} diff --git a/packages/databricks-vscode/src/sync/SyncDestination.ts b/packages/databricks-vscode/src/sync/SyncDestination.ts index 626e9af7c..36638619e 100644 --- a/packages/databricks-vscode/src/sync/SyncDestination.ts +++ b/packages/databricks-vscode/src/sync/SyncDestination.ts @@ -1,9 +1,5 @@ -import { - WorkspaceClient, - WorkspaceFsEntity, - WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {WorkspaceClient, logging} from "@databricks/databricks-sdk"; +import {WorkspaceFsEntity, WorkspaceFsUtils} from "../sdk-extensions"; import path = require("path"); import {Uri} from "vscode"; import {Loggers} from "../logger"; @@ -40,7 +36,10 @@ export class RemoteUri extends DatabricksUri { const err = new Error( `Remote file URI scheme must be wsfs. Found ${uri.scheme} (${uri.path})` ); - NamedLogger.getOrCreate(Loggers.Extension).error(err.message, err); + logging.NamedLogger.getOrCreate(Loggers.Extension).error( + err.message, + err + ); throw err; } @@ -125,7 +124,10 @@ export class LocalUri extends DatabricksUri { const err = new Error( `Local file URI scheme must be file. Found ${uri.scheme} (${uri.fsPath})` ); - NamedLogger.getOrCreate(Loggers.Extension).error(err.message, err); + logging.NamedLogger.getOrCreate(Loggers.Extension).error( + err.message, + err + ); throw err; } diff --git a/packages/databricks-vscode/src/test/e2e/tsconfig.json b/packages/databricks-vscode/src/test/e2e/tsconfig.json index 1d9cc20b3..e67756e32 100644 --- a/packages/databricks-vscode/src/test/e2e/tsconfig.json +++ b/packages/databricks-vscode/src/test/e2e/tsconfig.json @@ -16,6 +16,7 @@ "isolatedModules": true, "sourceMap": true, "noImplicitAny": false, + "experimentalDecorators": true, "strict": true /* enable all strict type-checking options */, "forceConsistentCasingInFileNames": true, "esModuleInterop": true diff --git a/packages/databricks-vscode/src/test/e2e/wdio.conf.ts b/packages/databricks-vscode/src/test/e2e/wdio.conf.ts index cc7d3663c..7d8ca9659 100644 --- a/packages/databricks-vscode/src/test/e2e/wdio.conf.ts +++ b/packages/databricks-vscode/src/test/e2e/wdio.conf.ts @@ -8,13 +8,7 @@ import path from "node:path"; import {fileURLToPath} from "url"; import assert from "assert"; import fs from "fs/promises"; -import { - WorkspaceClient, - Cluster, - Repo, - WorkspaceFsEntity, - WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; +import {Config, WorkspaceClient, workspace} from "@databricks/databricks-sdk"; import * as ElementCustomCommands from "./customCommands/elementCustomCommands.ts"; import {execFile, ExecFileOptions} from "node:child_process"; import {cpSync, mkdirSync, rmSync} from "node:fs"; @@ -251,10 +245,13 @@ export const config: Options.Testrunner = { onPrepare: async function () { try { mkdirSync(EXTENSION_DIR, {recursive: true}); - assert( - process.env["DATABRICKS_TOKEN"], - "Environment variable DATABRICKS_TOKEN must be set" - ); + + const config = new Config({}); + await config.ensureResolved(); + + assert(config.host, "Config host must be set"); + assert(config.token, "Config token must be set"); + assert( process.env["TEST_DEFAULT_CLUSTER_ID"], "Environment variable TEST_DEFAULT_CLUSTER_ID must be set" @@ -263,15 +260,13 @@ export const config: Options.Testrunner = { await fs.rm(WORKSPACE_PATH, {recursive: true, force: true}); await fs.mkdir(WORKSPACE_PATH); - const client = getWorkspaceClient( - getHost(), - process.env["DATABRICKS_TOKEN"] - ); + const client = getWorkspaceClient(config); const repoPath = await createRepo(client); const workspaceFolderPath = await createWsFolder(client); - const configFile = await writeDatabricksConfig(); + const configFile = await writeDatabricksConfig(config); await startCluster(client, process.env["TEST_DEFAULT_CLUSTER_ID"]); + process.env.DATABRICKS_HOST = config.host!; process.env.DATABRICKS_CONFIG_FILE = configFile; process.env.WORKSPACE_PATH = WORKSPACE_PATH; process.env.TEST_REPO_PATH = repoPath; @@ -530,11 +525,7 @@ export const config: Options.Testrunner = { // } }; -async function writeDatabricksConfig() { - assert( - process.env["DATABRICKS_TOKEN"], - "Environment variable DATABRICKS_TOKEN must be set" - ); +async function writeDatabricksConfig(config: Config) { assert( process.env["TEST_DEFAULT_CLUSTER_ID"], "Environment variable TEST_DEFAULT_CLUSTER_ID must be set" @@ -544,25 +535,18 @@ async function writeDatabricksConfig() { await fs.writeFile( configFile, `[DEFAULT] -host = ${getHost()} -token = ${process.env["DATABRICKS_TOKEN"]}` +host = ${config.host!} +token = ${config.token!}` ); return configFile; } -function getWorkspaceClient(host: string, token: string) { - const client = new WorkspaceClient( - { - host, - token, - authType: "pat", - }, - { - product: "integration-tests", - productVersion: "0.0.1", - } - ); +function getWorkspaceClient(config: Config) { + const client = new WorkspaceClient(config, { + product: "integration-tests", + productVersion: "0.0.1", + }); return client; } @@ -576,18 +560,26 @@ async function createRepo(workspaceClient: WorkspaceClient): Promise { console.log(`Creating test Repo: ${repoPath}`); - let repo: Repo; + let repo: workspace.RepoInfo | undefined; try { - repo = await Repo.fromPath(workspaceClient.apiClient, repoPath); + for await (const r of workspaceClient.repos.list({ + path_prefix: repoPath, + })) { + if (r.path === repoPath) { + repo = r; + break; + } + } + assert(repo, `Couldn't find repo at ${repoPath}`); } catch (e) { - repo = await Repo.create(workspaceClient.apiClient, { + repo = await workspaceClient.repos.create({ path: repoPath, url: "https://github.com/fjakobs/empty-repo.git", provider: "github", }); } - return repo.path; + return repo.path!; } /** @@ -602,14 +594,12 @@ async function createWsFolder( console.log(`Creating test Workspace Folder: ${wsFolderPath}`); await workspaceClient.workspace.mkdirs({path: wsFolderPath}); - const repo = await WorkspaceFsEntity.fromPath( - workspaceClient, - wsFolderPath - ); - if (WorkspaceFsUtils.isDirectory(repo)) { - return repo.path; - } - throw Error(`Couldn't create worspace folder at ${wsFolderPath}`); + const status = await workspaceClient.workspace.getStatus({ + path: wsFolderPath, + }); + + assert.equal(status.object_type, "DIRECTORY"); + return status.path!; } async function startCluster( @@ -617,26 +607,18 @@ async function startCluster( clusterId: string ) { console.log(`Starting cluster: ${clusterId}`); - const cluster = await Cluster.fromClusterId( - workspaceClient.apiClient, - clusterId - ); - await cluster.start(undefined, (state) => - console.log(`Cluster state: ${state}`) - ); - console.log(`Cluster started`); -} -function getHost() { - assert( - process.env["DATABRICKS_HOST"], - "Environment variable DATABRICKS_HOST must be set" - ); - - let host = process.env["DATABRICKS_HOST"]; - if (!host.startsWith("http")) { - host = `https://${host}`; + try { + await ( + await workspaceClient.clusters.start({ + cluster_id: clusterId, + }) + ).wait(); + } catch (e: any) { + if (e.errorCode !== "INVALID_STATE") { + throw e; + } } - return host; + console.log(`Cluster started`); } diff --git a/packages/databricks-vscode/src/test/suite.ts b/packages/databricks-vscode/src/test/suite.ts index d4336190e..2d0a05894 100644 --- a/packages/databricks-vscode/src/test/suite.ts +++ b/packages/databricks-vscode/src/test/suite.ts @@ -11,7 +11,9 @@ export async function run(): Promise { // Add files to the test suite const testsRoot = path.resolve(__dirname, ".."); - const files = await glob("**/**.test.js", {cwd: testsRoot}); + const files = await glob("**/**.test.js", { + cwd: testsRoot, + }); files.forEach((f) => mocha.addFile(path.resolve(testsRoot, f))); return await new Promise((resolve, reject) => { diff --git a/packages/databricks-vscode/src/utils/UtilsCommands.ts b/packages/databricks-vscode/src/utils/UtilsCommands.ts index 3aacc84e2..b1f50f9aa 100644 --- a/packages/databricks-vscode/src/utils/UtilsCommands.ts +++ b/packages/databricks-vscode/src/utils/UtilsCommands.ts @@ -1,4 +1,4 @@ -import {Cluster} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import {Disposable} from "vscode"; import {ConfigurationTreeItem} from "../configuration/ConfigurationDataProvider"; import {openExternal} from "./urlUtils"; diff --git a/packages/databricks-vscode/src/utils/envVarGenerators.test.ts b/packages/databricks-vscode/src/utils/envVarGenerators.test.ts index eed2cfa95..301f7b6c0 100644 --- a/packages/databricks-vscode/src/utils/envVarGenerators.test.ts +++ b/packages/databricks-vscode/src/utils/envVarGenerators.test.ts @@ -2,7 +2,8 @@ import {anything, instance, mock, when} from "ts-mockito"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {DatabricksWorkspace} from "../configuration/DatabricksWorkspace"; -import {ApiClient, Cluster, Config} from "@databricks/databricks-sdk"; +import {ApiClient, Config} from "@databricks/databricks-sdk"; +import {Cluster} from "../sdk-extensions"; import { getAuthEnvVars, getDbConnectEnvVars, diff --git a/packages/databricks-vscode/src/utils/envVarGenerators.ts b/packages/databricks-vscode/src/utils/envVarGenerators.ts index ca54306e1..d08c0d6c7 100644 --- a/packages/databricks-vscode/src/utils/envVarGenerators.ts +++ b/packages/databricks-vscode/src/utils/envVarGenerators.ts @@ -2,7 +2,7 @@ import {Loggers} from "../logger"; import {readFile} from "fs/promises"; import {Uri} from "vscode"; import {FeatureManager} from "../feature-manager/FeatureManager"; -import {NamedLogger} from "@databricks/databricks-sdk/dist/logging"; +import {logging} from "@databricks/databricks-sdk"; import {NotebookInitScriptManager} from "../language/notebooks/NotebookInitScriptManager"; import {ConnectionManager} from "../configuration/ConnectionManager"; @@ -23,7 +23,7 @@ export async function getUserEnvVars(userEnvPath: Uri) { return prev; }, {}); } catch (e: unknown) { - NamedLogger.getOrCreate(Loggers.Extension).error( + logging.NamedLogger.getOrCreate(Loggers.Extension).error( "Can't load .env file", e ); diff --git a/packages/databricks-vscode/src/utils/fileUtils.test.ts b/packages/databricks-vscode/src/utils/fileUtils.test.ts index c3dee1f3a..6b9486fc6 100644 --- a/packages/databricks-vscode/src/utils/fileUtils.test.ts +++ b/packages/databricks-vscode/src/utils/fileUtils.test.ts @@ -1,18 +1,25 @@ import assert from "assert"; import fs from "fs/promises"; -import {withFile} from "tmp-promise"; +import path from "path"; +import {withDir} from "tmp-promise"; import {LocalUri} from "../sync/SyncDestination"; import {isNotebook} from "./fileUtils"; describe(__filename, async () => { it("should detect notebook", async () => { - withFile(async (file) => { - await fs.writeFile( - file.path, - Buffer.from("# Databricks notebook source\ncontent") - ); - assert.ok(await isNotebook(new LocalUri(file.path))); - }); + await withDir( + async (dir) => { + const notebookPath = path.join(dir.path, "notebook.py"); + await fs.writeFile( + notebookPath, + Buffer.from("# Databricks notebook source\ncontent") + ); + assert.ok(await isNotebook(new LocalUri(notebookPath))); + }, + { + unsafeCleanup: true, + } + ); }); it("should detect ipynb files", async () => { @@ -20,9 +27,15 @@ describe(__filename, async () => { }); it("should detect if not notebook", async () => { - withFile(async (file) => { - await fs.writeFile(file.path, Buffer.from("content")); - assert.ok(!(await isNotebook(new LocalUri(file.path)))); - }); + await withDir( + async (dir) => { + const notebookPath = path.join(dir.path, "notebook.py"); + await fs.writeFile(notebookPath, Buffer.from("content")); + assert.ok(!(await isNotebook(new LocalUri(notebookPath)))); + }, + { + unsafeCleanup: true, + } + ); }); }); diff --git a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsAccessVerifier.ts b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsAccessVerifier.ts index e31db3a13..42126ec90 100644 --- a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsAccessVerifier.ts +++ b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsAccessVerifier.ts @@ -1,8 +1,4 @@ -import { - Cluster, - WorkspaceFsEntity, - WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; +import {Cluster, WorkspaceFsEntity, WorkspaceFsUtils} from "../sdk-extensions"; import {commands, Disposable, window, EventEmitter} from "vscode"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {CodeSynchronizer} from "../sync"; diff --git a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsCommands.ts b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsCommands.ts index 3dbbae8d9..74aac5a20 100644 --- a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsCommands.ts +++ b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsCommands.ts @@ -1,11 +1,10 @@ +import {ApiError, logging} from "@databricks/databricks-sdk"; import { - ApiError, WorkspaceFsDir, WorkspaceFsEntity, WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; +} from "../sdk-extensions"; import {context, Context} from "@databricks/databricks-sdk/dist/context"; -import {withLogContext} from "@databricks/databricks-sdk/dist/logging"; import {Disposable, Uri, window} from "vscode"; import {ConnectionManager} from "../configuration/ConnectionManager"; import {RemoteUri, REPO_NAME_SUFFIX} from "../sync/SyncDestination"; @@ -16,6 +15,8 @@ import {WorkspaceFsDataProvider} from "./WorkspaceFsDataProvider"; import path from "node:path"; import {WorkspaceStateManager} from "../vscode-objs/WorkspaceState"; +const withLogContext = logging.withLogContext; + export class WorkspaceFsCommands implements Disposable { private disposables: Disposable[] = []; diff --git a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsDataProvider.ts b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsDataProvider.ts index b30b704b9..29a04987b 100644 --- a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsDataProvider.ts +++ b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsDataProvider.ts @@ -1,4 +1,4 @@ -import {WorkspaceFsEntity} from "@databricks/databricks-sdk"; +import {WorkspaceFsEntity} from "../sdk-extensions"; import {posix} from "path"; import { Disposable, diff --git a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.test.ts b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.test.ts index 86c96de8d..8320a1ad3 100644 --- a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.test.ts +++ b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.test.ts @@ -1,6 +1,5 @@ /* eslint-disable @typescript-eslint/naming-convention */ -import {ApiError, WorkspaceClient} from "@databricks/databricks-sdk"; -import {WorkspaceService} from "@databricks/databricks-sdk/dist/apis/workspace"; +import {ApiError, WorkspaceClient, workspace} from "@databricks/databricks-sdk"; import { instance, mock, @@ -18,7 +17,7 @@ import {readFile, writeFile} from "fs/promises"; import {withFile} from "tmp-promise"; describe(__filename, async () => { - let mockWorkspaceService: WorkspaceService; + let mockWorkspaceService: workspace.WorkspaceService; let mockConnectionManager: ConnectionManager; let mockExtensionContext: ExtensionContext; const testDirPath = "/Users/me/testdir"; @@ -45,7 +44,7 @@ describe(__filename, async () => { } function createMocks() { - mockWorkspaceService = mock(WorkspaceService); + mockWorkspaceService = mock(workspace.WorkspaceService); when( mockWorkspaceService.getStatus( objectContaining({ diff --git a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.ts b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.ts index 70d84dc96..5540bf043 100644 --- a/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.ts +++ b/packages/databricks-vscode/src/workspace-fs/WorkspaceFsWorkflowWrapper.ts @@ -1,8 +1,5 @@ -import { - WorkspaceFsEntity, - logging, - WorkspaceFsUtils, -} from "@databricks/databricks-sdk"; +import {logging} from "@databricks/databricks-sdk"; +import {WorkspaceFsEntity, WorkspaceFsUtils} from "../sdk-extensions"; import {Context, context} from "@databricks/databricks-sdk/dist/context"; import {readFile} from "fs/promises"; import path from "path"; diff --git a/packages/databricks-vscode/src/workspace-fs/createDirectoryWizard.ts b/packages/databricks-vscode/src/workspace-fs/createDirectoryWizard.ts index c8dea89ec..a53468b8d 100644 --- a/packages/databricks-vscode/src/workspace-fs/createDirectoryWizard.ts +++ b/packages/databricks-vscode/src/workspace-fs/createDirectoryWizard.ts @@ -1,6 +1,6 @@ import {Uri, window} from "vscode"; import path from "path"; -import {WorkspaceFsDir} from "@databricks/databricks-sdk"; +import {WorkspaceFsDir} from "../sdk-extensions"; import {workspaceConfigs} from "../vscode-objs/WorkspaceConfigs"; export async function createDirWizard( diff --git a/yarn.lock b/yarn.lock index 4b41e6897..d4ce768e5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -376,7 +376,7 @@ __metadata: inversify: ^6.0.1 reflect-metadata: ^0.1.13 semver: ^7.5.4 - checksum: 2e28fb154da0743e5b26cd3bdb1dba6f286e1f34f41d64291fca872ae25678f44a7780bd965f8166632f5a2303a00ecc6def5a9570d09e8d37bcd6546d489cd3 + checksum: 2fbf7327009b6c0dd7e04ca2980c1a2245e115fcd60701d756824be128697b78307f4f857182810772df4a70e7fdab73313ff75a6034f153371f9f9434a73fa3 languageName: node linkType: hard @@ -391,7 +391,7 @@ __metadata: inversify: ^6.0.1 reflect-metadata: ^0.1.13 semver: ^7.5.4 - checksum: 2e28fb154da0743e5b26cd3bdb1dba6f286e1f34f41d64291fca872ae25678f44a7780bd965f8166632f5a2303a00ecc6def5a9570d09e8d37bcd6546d489cd3 + checksum: 2fbf7327009b6c0dd7e04ca2980c1a2245e115fcd60701d756824be128697b78307f4f857182810772df4a70e7fdab73313ff75a6034f153371f9f9434a73fa3 languageName: node linkType: hard @@ -401,6 +401,7 @@ __metadata: dependencies: "@databricks/databricks-sdk": ../../vendor/databricks-sdk.tgz "@types/vscode": ^1.69.1 + databricks: "*" eslint: ^8.44.0 prettier: ^3.0.0 typescript: ~5.1.6 @@ -1478,6 +1479,13 @@ __metadata: languageName: node linkType: hard +"@types/json5@npm:^0.0.29": + version: 0.0.29 + resolution: "@types/json5@npm:0.0.29" + checksum: e60b153664572116dfea673c5bda7778dbff150498f44f998e34b5886d8afc47f16799280e4b6e241c0472aef1bc36add771c569c68fc5125fc2ae519a3eb9ac + languageName: node + linkType: hard + "@types/jsonfile@npm:*": version: 6.1.1 resolution: "@types/jsonfile@npm:6.1.1" @@ -2526,7 +2534,7 @@ __metadata: languageName: node linkType: hard -"arrify@npm:^1.0.1": +"arrify@npm:^1.0.0, arrify@npm:^1.0.1": version: 1.0.1 resolution: "arrify@npm:1.0.1" checksum: 745075dd4a4624ff0225c331dacb99be501a515d39bcb7c84d24660314a6ec28e68131b137e6f7e16318170842ce97538cd298fc4cd6b2cc798e0b957f2747e7 @@ -2751,6 +2759,13 @@ __metadata: languageName: node linkType: hard +"buffer-from@npm:^1.0.0, buffer-from@npm:^1.1.0": + version: 1.1.2 + resolution: "buffer-from@npm:1.1.2" + checksum: 0448524a562b37d4d7ed9efd91685a5b77a50672c556ea254ac9a6d30e3403a517d8981f10e565db24e8339413b43c97ca2951f10e399c6125a0d8911f5679bb + languageName: node + linkType: hard + "buffer@npm:^5.2.1, buffer@npm:^5.5.0": version: 5.7.1 resolution: "buffer@npm:5.7.1" @@ -3651,7 +3666,7 @@ __metadata: languageName: node linkType: hard -"databricks@workspace:packages/databricks-vscode": +"databricks@*, databricks@workspace:packages/databricks-vscode": version: 0.0.0-use.local resolution: "databricks@workspace:packages/databricks-vscode" dependencies: @@ -3694,6 +3709,7 @@ __metadata: prettier: ^3.0.0 tmp-promise: ^3.0.3 triple-beam: ^1.4.1 + ts-mocha: ^10.0.0 ts-mockito: ^2.6.1 ts-node: ^10.9.1 typescript: ~5.1.6 @@ -4079,6 +4095,13 @@ __metadata: languageName: node linkType: hard +"diff@npm:^3.1.0": + version: 3.5.0 + resolution: "diff@npm:3.5.0" + checksum: 00842950a6551e26ce495bdbce11047e31667deea546527902661f25cc2e73358967ebc78cf86b1a9736ec3e14286433225f9970678155753a6291c3bca5227b + languageName: node + linkType: hard + "diff@npm:^4.0.1": version: 4.0.2 resolution: "diff@npm:4.0.2" @@ -7619,7 +7642,7 @@ __metadata: languageName: node linkType: hard -"mkdirp@npm:^0.5.5": +"mkdirp@npm:^0.5.1, mkdirp@npm:^0.5.5": version: 0.5.6 resolution: "mkdirp@npm:0.5.6" dependencies: @@ -9605,7 +9628,17 @@ __metadata: languageName: node linkType: hard -"source-map@npm:^0.6.1": +"source-map-support@npm:^0.5.6": + version: 0.5.21 + resolution: "source-map-support@npm:0.5.21" + dependencies: + buffer-from: ^1.0.0 + source-map: ^0.6.0 + checksum: 43e98d700d79af1d36f859bdb7318e601dfc918c7ba2e98456118ebc4c4872b327773e5a1df09b0524e9e5063bb18f0934538eace60cca2710d1fa687645d137 + languageName: node + linkType: hard + +"source-map@npm:^0.6.0, source-map@npm:^0.6.1": version: 0.6.1 resolution: "source-map@npm:0.6.1" checksum: 59ce8640cf3f3124f64ac289012c2b8bd377c238e316fb323ea22fbfe83da07d81e000071d7242cad7a23cd91c7de98e4df8830ec3f133cb6133a5f6e9f67bc2 @@ -10240,6 +10273,23 @@ __metadata: languageName: node linkType: hard +"ts-mocha@npm:^10.0.0": + version: 10.0.0 + resolution: "ts-mocha@npm:10.0.0" + dependencies: + ts-node: 7.0.1 + tsconfig-paths: ^3.5.0 + peerDependencies: + mocha: ^3.X.X || ^4.X.X || ^5.X.X || ^6.X.X || ^7.X.X || ^8.X.X || ^9.X.X || ^10.X.X + dependenciesMeta: + tsconfig-paths: + optional: true + bin: + ts-mocha: bin/ts-mocha + checksum: b11f2a8ceecf195b0db724da429159982fef12e4357088fe900289223587217e8c126ead7929679edd58bf19ad96c5da5911535d26f535386632e18fbff10c40 + languageName: node + linkType: hard + "ts-mockito@npm:^2.6.1": version: 2.6.1 resolution: "ts-mockito@npm:2.6.1" @@ -10249,6 +10299,24 @@ __metadata: languageName: node linkType: hard +"ts-node@npm:7.0.1": + version: 7.0.1 + resolution: "ts-node@npm:7.0.1" + dependencies: + arrify: ^1.0.0 + buffer-from: ^1.1.0 + diff: ^3.1.0 + make-error: ^1.1.1 + minimist: ^1.2.0 + mkdirp: ^0.5.1 + source-map-support: ^0.5.6 + yn: ^2.0.0 + bin: + ts-node: dist/bin.js + checksum: 07ed6ea1805361828737a767cfd6c57ea6e267ee8679282afb933610af02405e1a87c1f2aea1d38ed8e66b34fcbf6272b6021ab95d78849105d2e57fc283870b + languageName: node + linkType: hard + "ts-node@npm:^10.9.1": version: 10.9.1 resolution: "ts-node@npm:10.9.1" @@ -10287,6 +10355,18 @@ __metadata: languageName: node linkType: hard +"tsconfig-paths@npm:^3.5.0": + version: 3.14.2 + resolution: "tsconfig-paths@npm:3.14.2" + dependencies: + "@types/json5": ^0.0.29 + json5: ^1.0.2 + minimist: ^1.2.6 + strip-bom: ^3.0.0 + checksum: a6162eaa1aed680537f93621b82399c7856afd10ec299867b13a0675e981acac4e0ec00896860480efc59fc10fd0b16fdc928c0b885865b52be62cadac692447 + languageName: node + linkType: hard + "tslib@npm:^1.13.0": version: 1.14.1 resolution: "tslib@npm:1.14.1" @@ -11235,6 +11315,13 @@ __metadata: languageName: node linkType: hard +"yn@npm:^2.0.0": + version: 2.0.0 + resolution: "yn@npm:2.0.0" + checksum: 9d49527cb3e9a0948cc057223810bf30607bf04b9ff7666cc1681a6501d660b60d90000c16f9e29311b0f28d8a06222ada565ccdca5f1049cdfefb1908217572 + languageName: node + linkType: hard + "yocto-queue@npm:^0.1.0": version: 0.1.0 resolution: "yocto-queue@npm:0.1.0"