tokenizer.update.test.js 1.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344
  1. var Tokenizer = require('../../../index').query.tokenizer;
  2. var assert = require('assert');
  3. describe('Tokenizer ::', function() {
  4. describe('UPDATE statements', function() {
  5. it('should generate a valid token array for an UPDATE is used', function() {
  6. var result = Tokenizer({
  7. update: {
  8. status: 'archived'
  9. },
  10. where: {
  11. and: [
  12. {
  13. publishedDate: {
  14. '>': 2000
  15. }
  16. }
  17. ]
  18. },
  19. using: 'books'
  20. });
  21. assert.deepEqual(result, [
  22. { type: 'IDENTIFIER', value: 'UPDATE' },
  23. { type: 'KEY', value: 'status' },
  24. { type: 'VALUE', value: 'archived' },
  25. { type: 'ENDIDENTIFIER', value: 'UPDATE' },
  26. { type: 'IDENTIFIER', value: 'WHERE' },
  27. { type: 'CONDITION', value: 'AND' },
  28. { type: 'GROUP', value: 0 },
  29. { type: 'KEY', value: 'publishedDate' },
  30. { type: 'OPERATOR', value: '>' },
  31. { type: 'VALUE', value: 2000 },
  32. { type: 'ENDOPERATOR', value: '>' },
  33. { type: 'ENDGROUP', value: 0 },
  34. { type: 'ENDCONDITION', value: 'AND' },
  35. { type: 'ENDIDENTIFIER', value: 'WHERE' },
  36. { type: 'IDENTIFIER', value: 'USING' },
  37. { type: 'VALUE', value: 'books' },
  38. { type: 'ENDIDENTIFIER', value: 'USING' }
  39. ]);
  40. });
  41. });
  42. });