tokenizer.test.js 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. var runBenchmarks = require('../support/benchmark-runner');
  2. var Tokenizer = require('../../index').query.tokenizer;
  3. // ╔╗ ╔═╗╔╗╔╔═╗╦ ╦╔╦╗╔═╗╦═╗╦╔═╔═╗
  4. // ╠╩╗║╣ ║║║║ ╠═╣║║║╠═╣╠╦╝╠╩╗╚═╗
  5. // ╚═╝╚═╝╝╚╝╚═╝╩ ╩╩ ╩╩ ╩╩╚═╩ ╩╚═╝
  6. describe('Benchmark :: Tokenizer', function() {
  7. // Set "timeout" and "slow" thresholds incredibly high
  8. // to avoid running into issues.
  9. this.slow(240000);
  10. this.timeout(240000);
  11. it('should be performant enough', function() {
  12. runBenchmarks('Tokenizer', [
  13. function buildSelectTokenSet() {
  14. Tokenizer({
  15. select: '*',
  16. from: 'books'
  17. });
  18. },
  19. function buildInsertTokenSet() {
  20. Tokenizer({
  21. insert: {
  22. title: 'Slaughterhouse Five'
  23. },
  24. into: 'books'
  25. });
  26. },
  27. function buildUpdateTokenSet() {
  28. Tokenizer({
  29. update: {
  30. status: 'archived'
  31. },
  32. where: {
  33. publishedDate: { '>': 2000 }
  34. },
  35. using: 'books'
  36. });
  37. },
  38. function buildDeleteTokenSet() {
  39. Tokenizer({
  40. del: true,
  41. from: 'accounts',
  42. where: {
  43. activated: false
  44. }
  45. });
  46. }
  47. ]);
  48. });
  49. });