Skip to content

Commit f1bf20d

Browse files
committed
test: add unit and integration tests for new discovery methods
1 parent 2076b8c commit f1bf20d

File tree

1 file changed

+53
-0
lines changed

1 file changed

+53
-0
lines changed

test/integration/test.discovery.js

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,14 @@ describe('discovery_integration', function() {
2222
let configuration_id;
2323
let collection_id;
2424
let collection_id2;
25+
let japanese_collection_id;
2526

2627
before(function() {
2728
environment_id = auth.discovery.environment_id;
2829
configuration_id = auth.discovery.configuration_id;
2930
collection_id = auth.discovery.collection_id;
3031
collection_id2 = auth.discovery.collection_id_2;
32+
japanese_collection_id = auth.discovery.japanese_collection_id;
3133

3234
nock.enableNetConnect();
3335
discovery = new DiscoveryV1(
@@ -481,4 +483,55 @@ describe('discovery_integration', function() {
481483
});
482484
});
483485
});
486+
487+
describe('tokenization dictionary tests @slow', function() {
488+
it('should createTokenizationDictionary', function(done) {
489+
const params = {
490+
environment_id,
491+
collection_id: japanese_collection_id,
492+
tokenization_rules: [
493+
{
494+
text: 'すしネコ',
495+
tokens: ['すし', 'ネコ'],
496+
readings: ['寿司', 'ネコ'],
497+
part_of_speech: 'カスタム名詞',
498+
},
499+
],
500+
};
501+
502+
discovery.createTokenizationDictionary(params, (err, res) => {
503+
assert.ifError(err);
504+
assert(res.status);
505+
assert(res.type);
506+
done();
507+
});
508+
});
509+
510+
it('should getTokenizationDictionaryStatus', function(done) {
511+
const params = {
512+
environment_id,
513+
collection_id: japanese_collection_id,
514+
};
515+
516+
discovery.getTokenizationDictionaryStatus(params, (err, res) => {
517+
assert.ifError(err);
518+
assert(res.status);
519+
assert(res.type);
520+
done();
521+
});
522+
});
523+
524+
it('should deleteTokenizationDictionary', function(done) {
525+
const params = {
526+
environment_id,
527+
collection_id: japanese_collection_id,
528+
};
529+
530+
discovery.deleteTokenizationDictionary(params, (err, res) => {
531+
assert.ifError(err);
532+
assert.equal(res, '');
533+
done();
534+
});
535+
});
536+
});
484537
});

0 commit comments

Comments
 (0)