parse.test.js 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. var runBenchmarks = require('../support/benchmark-runner');
  2. var Tokenizer = require('../../index').query.tokenizer;
  3. var Analyzer = require('../../index').query.analyzer;
  4. // ╔╗ ╔═╗╔╗╔╔═╗╦ ╦╔╦╗╔═╗╦═╗╦╔═╔═╗
  5. // ╠╩╗║╣ ║║║║ ╠═╣║║║╠═╣╠╦╝╠╩╗╚═╗
  6. // ╚═╝╚═╝╝╚╝╚═╝╩ ╩╩ ╩╩ ╩╩╚═╩ ╩╚═╝
  7. describe('Benchmark :: Parse', function() {
  8. // Set "timeout" and "slow" thresholds incredibly high
  9. // to avoid running into issues.
  10. this.slow(240000);
  11. this.timeout(240000);
  12. it('should be performant enough', function() {
  13. runBenchmarks('Parse query and return token set', [
  14. function analyzeSelectSet() {
  15. var tokens = Tokenizer({
  16. select: '*',
  17. from: 'books'
  18. });
  19. Analyzer(tokens);
  20. },
  21. function analyzeInsertSet() {
  22. var tokens = Tokenizer({
  23. insert: {
  24. title: 'Slaughterhouse Five'
  25. },
  26. into: 'books'
  27. });
  28. Analyzer(tokens);
  29. },
  30. function analyzeUpdateSet() {
  31. var tokens = Tokenizer({
  32. update: {
  33. status: 'archived'
  34. },
  35. where: {
  36. publishedDate: { '>': 2000 }
  37. },
  38. using: 'books'
  39. });
  40. Analyzer(tokens);
  41. },
  42. function analyzeDeleteSet() {
  43. var tokens = Tokenizer({
  44. del: true,
  45. from: 'accounts',
  46. where: {
  47. activated: false
  48. }
  49. });
  50. Analyzer(tokens);
  51. }
  52. ]);
  53. });
  54. });