@@ -20,6 +20,7 @@ import { JDBCDriver, JDBCDriverConfiguration, } from '@cubejs-backend/jdbc-drive
20
20
import { DatabricksQuery } from './DatabricksQuery' ;
21
21
import {
22
22
extractAndRemoveUidPwdFromJdbcUrl ,
23
+ parseDatabricksJdbcUrl ,
23
24
resolveJDBCDriver
24
25
} from './helpers' ;
25
26
@@ -124,6 +125,11 @@ type ColumnInfo = {
124
125
type : GenericDataBaseType ;
125
126
} ;
126
127
128
+ export type ParsedConnectionProperties = {
129
+ host : string ,
130
+ warehouseId : string ,
131
+ } ;
132
+
127
133
const DatabricksToGenericType : Record < string , string > = {
128
134
binary : 'hll_datasketches' ,
129
135
'decimal(10,0)' : 'bigint' ,
@@ -143,6 +149,8 @@ export class DatabricksDriver extends JDBCDriver {
143
149
*/
144
150
protected readonly config : DatabricksDriverConfiguration ;
145
151
152
+ private readonly parsedConnectionProperties : ParsedConnectionProperties ;
153
+
146
154
public static dialectClass ( ) {
147
155
return DatabricksQuery ;
148
156
}
@@ -262,38 +270,45 @@ export class DatabricksDriver extends JDBCDriver {
262
270
263
271
super ( config ) ;
264
272
this . config = config ;
273
+ this . parsedConnectionProperties = parseDatabricksJdbcUrl ( url ) ;
265
274
this . showSparkProtocolWarn = showSparkProtocolWarn ;
266
275
}
267
276
268
- /**
269
- * @override
270
- */
271
- public readOnly ( ) {
277
+ public override readOnly ( ) {
272
278
return ! ! this . config . readOnly ;
273
279
}
274
280
275
- /**
276
- * @override
277
- */
278
- public capabilities ( ) : DriverCapabilities {
281
+ public override capabilities ( ) : DriverCapabilities {
279
282
return {
280
283
unloadWithoutTempTable : true ,
281
284
incrementalSchemaLoading : true
282
285
} ;
283
286
}
284
287
285
- /**
286
- * @override
287
- */
288
- public setLogger ( logger : any ) {
288
+ public override setLogger ( logger : any ) {
289
289
super . setLogger ( logger ) ;
290
290
this . showDeprecations ( ) ;
291
291
}
292
292
293
- /**
294
- * @override
295
- */
296
- public async loadPreAggregationIntoTable (
293
+ public override async testConnection ( ) {
294
+ const token = `Bearer ${ this . config . properties . PWD } ` ;
295
+
296
+ const res = await fetch ( `https://${ this . parsedConnectionProperties . host } /api/2.0/sql/warehouses/${ this . parsedConnectionProperties . warehouseId } ` , {
297
+ headers : { Authorization : token } ,
298
+ } ) ;
299
+
300
+ if ( ! res . ok ) {
301
+ throw new Error ( `Databricks API error: ${ res . statusText } ` ) ;
302
+ }
303
+
304
+ const data = await res . json ( ) ;
305
+
306
+ if ( data . state !== 'RUNNING' ) {
307
+ throw new Error ( `Warehouse not running (current state: ${ data . state } )` ) ;
308
+ }
309
+ }
310
+
311
+ public override async loadPreAggregationIntoTable (
297
312
preAggregationTableName : string ,
298
313
loadSql : string ,
299
314
params : unknown [ ] ,
@@ -320,10 +335,7 @@ export class DatabricksDriver extends JDBCDriver {
320
335
}
321
336
}
322
337
323
- /**
324
- * @override
325
- */
326
- public async query < R = unknown > (
338
+ public override async query < R = unknown > (
327
339
query : string ,
328
340
values : unknown [ ] ,
329
341
) : Promise < R [ ] > {
@@ -357,10 +369,7 @@ export class DatabricksDriver extends JDBCDriver {
357
369
}
358
370
}
359
371
360
- /**
361
- * @override
362
- */
363
- public dropTable ( tableName : string , options ?: QueryOptions ) : Promise < unknown > {
372
+ public override dropTable ( tableName : string , options ?: QueryOptions ) : Promise < unknown > {
364
373
const tableFullName = `${
365
374
this . config ?. catalog ? `${ this . config . catalog } .` : ''
366
375
} ${ tableName } `;
@@ -392,10 +401,7 @@ export class DatabricksDriver extends JDBCDriver {
392
401
}
393
402
}
394
403
395
- /**
396
- * @override
397
- */
398
- protected async getCustomClassPath ( ) {
404
+ protected override async getCustomClassPath ( ) {
399
405
return resolveJDBCDriver ( ) ;
400
406
}
401
407
0 commit comments