Skip to content

Commit 453a51a

Browse files
committed
feat(databricks-jdbc-driver): Implement connection checking without waking up SQL warehouse
1 parent 940c30f commit 453a51a

File tree

2 files changed

+65
-28
lines changed

2 files changed

+65
-28
lines changed

packages/cubejs-databricks-jdbc-driver/src/DatabricksDriver.ts

+34-28
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import { JDBCDriver, JDBCDriverConfiguration, } from '@cubejs-backend/jdbc-drive
2020
import { DatabricksQuery } from './DatabricksQuery';
2121
import {
2222
extractAndRemoveUidPwdFromJdbcUrl,
23+
parseDatabricksJdbcUrl,
2324
resolveJDBCDriver
2425
} from './helpers';
2526

@@ -124,6 +125,11 @@ type ColumnInfo = {
124125
type: GenericDataBaseType;
125126
};
126127

128+
export type ParsedConnectionProperties = {
129+
host: string,
130+
warehouseId: string,
131+
};
132+
127133
const DatabricksToGenericType: Record<string, string> = {
128134
binary: 'hll_datasketches',
129135
'decimal(10,0)': 'bigint',
@@ -143,6 +149,8 @@ export class DatabricksDriver extends JDBCDriver {
143149
*/
144150
protected readonly config: DatabricksDriverConfiguration;
145151

152+
private readonly parsedConnectionProperties: ParsedConnectionProperties;
153+
146154
public static dialectClass() {
147155
return DatabricksQuery;
148156
}
@@ -262,38 +270,45 @@ export class DatabricksDriver extends JDBCDriver {
262270

263271
super(config);
264272
this.config = config;
273+
this.parsedConnectionProperties = parseDatabricksJdbcUrl(url);
265274
this.showSparkProtocolWarn = showSparkProtocolWarn;
266275
}
267276

268-
/**
269-
* @override
270-
*/
271-
public readOnly() {
277+
public override readOnly() {
272278
return !!this.config.readOnly;
273279
}
274280

275-
/**
276-
* @override
277-
*/
278-
public capabilities(): DriverCapabilities {
281+
public override capabilities(): DriverCapabilities {
279282
return {
280283
unloadWithoutTempTable: true,
281284
incrementalSchemaLoading: true
282285
};
283286
}
284287

285-
/**
286-
* @override
287-
*/
288-
public setLogger(logger: any) {
288+
public override setLogger(logger: any) {
289289
super.setLogger(logger);
290290
this.showDeprecations();
291291
}
292292

293-
/**
294-
* @override
295-
*/
296-
public async loadPreAggregationIntoTable(
293+
public override async testConnection() {
294+
const token = `Bearer ${this.config.properties.PWD}`;
295+
296+
const res = await fetch(`https://${this.parsedConnectionProperties.host}/api/2.0/sql/warehouses/${this.parsedConnectionProperties.warehouseId}`, {
297+
headers: { Authorization: token },
298+
});
299+
300+
if (!res.ok) {
301+
throw new Error(`Databricks API error: ${res.statusText}`);
302+
}
303+
304+
const data = await res.json();
305+
306+
if (data.state !== 'RUNNING') {
307+
throw new Error(`Warehouse not running (current state: ${data.state})`);
308+
}
309+
}
310+
311+
public override async loadPreAggregationIntoTable(
297312
preAggregationTableName: string,
298313
loadSql: string,
299314
params: unknown[],
@@ -320,10 +335,7 @@ export class DatabricksDriver extends JDBCDriver {
320335
}
321336
}
322337

323-
/**
324-
* @override
325-
*/
326-
public async query<R = unknown>(
338+
public override async query<R = unknown>(
327339
query: string,
328340
values: unknown[],
329341
): Promise<R[]> {
@@ -357,10 +369,7 @@ export class DatabricksDriver extends JDBCDriver {
357369
}
358370
}
359371

360-
/**
361-
* @override
362-
*/
363-
public dropTable(tableName: string, options?: QueryOptions): Promise<unknown> {
372+
public override dropTable(tableName: string, options?: QueryOptions): Promise<unknown> {
364373
const tableFullName = `${
365374
this.config?.catalog ? `${this.config.catalog}.` : ''
366375
}${tableName}`;
@@ -392,10 +401,7 @@ export class DatabricksDriver extends JDBCDriver {
392401
}
393402
}
394403

395-
/**
396-
* @override
397-
*/
398-
protected async getCustomClassPath() {
404+
protected override async getCustomClassPath() {
399405
return resolveJDBCDriver();
400406
}
401407

packages/cubejs-databricks-jdbc-driver/src/helpers.ts

+31
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ import fs from 'fs';
22
import path from 'path';
33

44
import { downloadJDBCDriver, OSS_DRIVER_VERSION } from './installer';
5+
import type { ParsedConnectionProperties } from './DatabricksDriver';
56

67
async function fileExistsOr(
78
fsPath: string,
@@ -51,3 +52,33 @@ export function extractAndRemoveUidPwdFromJdbcUrl(jdbcUrl: string): [uid: string
5152

5253
return [uid, pwd, cleanedUrl];
5354
}
55+
56+
export function parseDatabricksJdbcUrl(jdbcUrl: string): ParsedConnectionProperties {
57+
const jdbcPrefix = 'jdbc:databricks://';
58+
const urlWithoutPrefix = jdbcUrl.slice(jdbcPrefix.length);
59+
60+
const [hostPortAndPath, ...params] = urlWithoutPrefix.split(';');
61+
const [host] = hostPortAndPath.split(':');
62+
63+
const paramMap = new Map<string, string>();
64+
for (const param of params) {
65+
const [key, value] = param.split('=');
66+
if (key && value) {
67+
paramMap.set(key, value);
68+
}
69+
}
70+
71+
const httpPath = paramMap.get('httpPath');
72+
if (!httpPath) {
73+
throw new Error('Missing httpPath in JDBC URL');
74+
}
75+
76+
const warehouseMatch = httpPath.match(/\/warehouses\/([a-zA-Z0-9]+)/);
77+
if (!warehouseMatch) {
78+
throw new Error('Could not extract warehouseId from httpPath');
79+
}
80+
81+
const warehouseId = warehouseMatch[1];
82+
83+
return { host, warehouseId };
84+
}

0 commit comments

Comments
 (0)