analyzer.test.js 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768
  1. var runBenchmarks = require('../support/benchmark-runner');
  2. var Tokenizer = require('../../index').query.tokenizer;
  3. var Analyzer = require('../../index').query.analyzer;
  4. // ╔╗ ╔═╗╔╗╔╔═╗╦ ╦╔╦╗╔═╗╦═╗╦╔═╔═╗
  5. // ╠╩╗║╣ ║║║║ ╠═╣║║║╠═╣╠╦╝╠╩╗╚═╗
  6. // ╚═╝╚═╝╝╚╝╚═╝╩ ╩╩ ╩╩ ╩╩╚═╩ ╩╚═╝
  7. describe('Benchmark :: Analyzer', function() {
  8. // Set "timeout" and "slow" thresholds incredibly high
  9. // to avoid running into issues.
  10. this.slow(240000);
  11. this.timeout(240000);
  12. var tokens = {};
  13. // Tokenize all the test inputs before running benchmarks
  14. before(function() {
  15. tokens.select = Tokenizer({
  16. select: '*',
  17. from: 'books'
  18. });
  19. tokens.insert = Tokenizer({
  20. insert: {
  21. title: 'Slaughterhouse Five'
  22. },
  23. into: 'books'
  24. });
  25. tokens.update = Tokenizer({
  26. update: {
  27. status: 'archived'
  28. },
  29. where: {
  30. publishedDate: { '>': 2000 }
  31. },
  32. using: 'books'
  33. });
  34. tokens.delete = Tokenizer({
  35. del: true,
  36. from: 'accounts',
  37. where: {
  38. activated: false
  39. }
  40. });
  41. });
  42. it('should be performant enough', function() {
  43. runBenchmarks('Analyzer', [
  44. function analyzeSelectSet() {
  45. Analyzer(tokens.select);
  46. },
  47. function analyzeInsertSet() {
  48. Analyzer(tokens.insert);
  49. },
  50. function analyzeUpdateSet() {
  51. Analyzer(tokens.update);
  52. },
  53. function analyzeDeleteSet() {
  54. Analyzer(tokens.delete);
  55. }
  56. ]);
  57. });
  58. });