4
4
* @fileoverview The `DatabricksDriver` and related types declaration.
5
5
*/
6
6
7
+ import fetch from 'node-fetch' ;
7
8
import { assertDataSource , getEnv , } from '@cubejs-backend/shared' ;
8
9
import {
9
10
DatabaseStructure ,
@@ -20,6 +21,7 @@ import { JDBCDriver, JDBCDriverConfiguration, } from '@cubejs-backend/jdbc-drive
20
21
import { DatabricksQuery } from './DatabricksQuery' ;
21
22
import {
22
23
extractAndRemoveUidPwdFromJdbcUrl ,
24
+ parseDatabricksJdbcUrl ,
23
25
resolveJDBCDriver
24
26
} from './helpers' ;
25
27
@@ -124,6 +126,11 @@ type ColumnInfo = {
124
126
type : GenericDataBaseType ;
125
127
} ;
126
128
129
+ export type ParsedConnectionProperties = {
130
+ host : string ,
131
+ warehouseId : string ,
132
+ } ;
133
+
127
134
const DatabricksToGenericType : Record < string , string > = {
128
135
binary : 'hll_datasketches' ,
129
136
'decimal(10,0)' : 'bigint' ,
@@ -143,6 +150,8 @@ export class DatabricksDriver extends JDBCDriver {
143
150
*/
144
151
protected readonly config : DatabricksDriverConfiguration ;
145
152
153
+ private readonly parsedConnectionProperties : ParsedConnectionProperties ;
154
+
146
155
public static dialectClass ( ) {
147
156
return DatabricksQuery ;
148
157
}
@@ -262,38 +271,50 @@ export class DatabricksDriver extends JDBCDriver {
262
271
263
272
super ( config ) ;
264
273
this . config = config ;
274
+ this . parsedConnectionProperties = parseDatabricksJdbcUrl ( url ) ;
265
275
this . showSparkProtocolWarn = showSparkProtocolWarn ;
266
276
}
267
277
268
- /**
269
- * @override
270
- */
271
- public readOnly ( ) {
278
+ public override readOnly ( ) {
272
279
return ! ! this . config . readOnly ;
273
280
}
274
281
275
- /**
276
- * @override
277
- */
278
- public capabilities ( ) : DriverCapabilities {
282
+ public override capabilities ( ) : DriverCapabilities {
279
283
return {
280
284
unloadWithoutTempTable : true ,
281
285
incrementalSchemaLoading : true
282
286
} ;
283
287
}
284
288
285
- /**
286
- * @override
287
- */
288
- public setLogger ( logger : any ) {
289
+ public override setLogger ( logger : any ) {
289
290
super . setLogger ( logger ) ;
290
291
this . showDeprecations ( ) ;
291
292
}
292
293
293
- /**
294
- * @override
295
- */
296
- public async loadPreAggregationIntoTable (
294
+ public override async testConnection ( ) {
295
+ const token = `Bearer ${ this . config . properties . PWD } ` ;
296
+
297
+ const res = await fetch ( `https://${ this . parsedConnectionProperties . host } /api/2.0/sql/warehouses/${ this . parsedConnectionProperties . warehouseId } ` , {
298
+ headers : { Authorization : token } ,
299
+ } ) ;
300
+
301
+ if ( ! res . ok ) {
302
+ throw new Error ( `Databricks API error: ${ res . statusText } ` ) ;
303
+ }
304
+
305
+ const data = await res . json ( ) ;
306
+
307
+ if ( [ 'DELETING' , 'DELETED' ] . includes ( data . state ) ) {
308
+ throw new Error ( `Warehouse is being deleted (current state: ${ data . state } )` ) ;
309
+ }
310
+
311
+ // There is also DEGRADED status, but it doesn't mean that cluster is 100% not working...
312
+ if ( data . health ?. status === 'FAILED' ) {
313
+ throw new Error ( `Warehouse is unhealthy: ${ data . health ?. summary } . Details: ${ data . health ?. details } ` ) ;
314
+ }
315
+ }
316
+
317
+ public override async loadPreAggregationIntoTable (
297
318
preAggregationTableName : string ,
298
319
loadSql : string ,
299
320
params : unknown [ ] ,
@@ -320,10 +341,7 @@ export class DatabricksDriver extends JDBCDriver {
320
341
}
321
342
}
322
343
323
- /**
324
- * @override
325
- */
326
- public async query < R = unknown > (
344
+ public override async query < R = unknown > (
327
345
query : string ,
328
346
values : unknown [ ] ,
329
347
) : Promise < R [ ] > {
@@ -357,10 +375,7 @@ export class DatabricksDriver extends JDBCDriver {
357
375
}
358
376
}
359
377
360
- /**
361
- * @override
362
- */
363
- public dropTable ( tableName : string , options ?: QueryOptions ) : Promise < unknown > {
378
+ public override dropTable ( tableName : string , options ?: QueryOptions ) : Promise < unknown > {
364
379
const tableFullName = `${
365
380
this . config ?. catalog ? `${ this . config . catalog } .` : ''
366
381
} ${ tableName } `;
@@ -392,10 +407,7 @@ export class DatabricksDriver extends JDBCDriver {
392
407
}
393
408
}
394
409
395
- /**
396
- * @override
397
- */
398
- protected async getCustomClassPath ( ) {
410
+ protected override async getCustomClassPath ( ) {
399
411
return resolveJDBCDriver ( ) ;
400
412
}
401
413
0 commit comments