@@ -100,49 +100,61 @@ describe "Javascript grammar", ->
100100      expect (lines[1 ][0 ]).toEqual  value :  ' / '  , scopes :  [' source.js'  ]
101101      expect (lines[1 ][1 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
102102
103-     it  " should  tokenizes = correctly"  , -> 
103+     it  " tokenizes = correctly"  , -> 
104104      {tokens } =  grammar .tokenizeLine (' test = 2'  )
105105      expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
106106      expect (tokens[1 ]).toEqual  value :  ' ='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
107107      expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
108108      expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
109109
110-     it  " should  tokenizes + correctly"  , -> 
110+     it  " tokenizes + correctly"  , -> 
111111      {tokens } =  grammar .tokenizeLine (' test + 2'  )
112112      expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
113113      expect (tokens[1 ]).toEqual  value :  ' +'  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
114114      expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
115115      expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
116116
117117    describe  " operators with 2 characters"  , -> 
118-       it  " should  tokenizes += correctly"  , -> 
118+       it  " tokenizes += correctly"  , -> 
119119        {tokens } =  grammar .tokenizeLine (' test += 2'  )
120120        expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
121121        expect (tokens[1 ]).toEqual  value :  ' +='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
122122        expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
123123        expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
124124
125-       it  " should  tokenizes -= correctly"  , -> 
125+       it  " tokenizes -= correctly"  , -> 
126126        {tokens } =  grammar .tokenizeLine (' test -= 2'  )
127127        expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
128128        expect (tokens[1 ]).toEqual  value :  ' -='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
129129        expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
130130        expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
131131
132-       it  " should  tokenizes *= correctly"  , -> 
132+       it  " tokenizes *= correctly"  , -> 
133133        {tokens } =  grammar .tokenizeLine (' test *= 2'  )
134134        expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
135135        expect (tokens[1 ]).toEqual  value :  ' *='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
136136        expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
137137        expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
138138
139-       it  " should  tokenizes /= correctly"  , -> 
139+       it  " tokenizes /= correctly"  , -> 
140140        {tokens } =  grammar .tokenizeLine (' test /= 2'  )
141141        expect (tokens[0 ]).toEqual  value :  ' test '  , scopes :  [' source.js'  ]
142142        expect (tokens[1 ]).toEqual  value :  ' /='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
143143        expect (tokens[2 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
144144        expect (tokens[3 ]).toEqual  value :  ' 2'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
145145
146+   describe  " constants"  , -> 
147+     it  " tokenizes ALL_CAPS variables correctly"  , -> 
148+       {tokens } =  grammar .tokenizeLine (' var MY_COOL_VAR = 42;'  )
149+       expect (tokens[0 ]).toEqual  value :  ' var'  , scopes :  [' source.js'  , ' storage.modifier.js'  ]
150+       expect (tokens[1 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
151+       expect (tokens[2 ]).toEqual  value :  ' MY_COOL_VAR'  , scopes :  [' source.js'  , ' constant.other.js'  ]
152+       expect (tokens[3 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
153+       expect (tokens[4 ]).toEqual  value :  ' ='  , scopes :  [' source.js'  , ' keyword.operator.js'  ]
154+       expect (tokens[5 ]).toEqual  value :  '  '  , scopes :  [' source.js'  ]
155+       expect (tokens[6 ]).toEqual  value :  ' 42'  , scopes :  [' source.js'  , ' constant.numeric.js'  ]
156+       expect (tokens[7 ]).toEqual  value :  ' ;'  , scopes :  [' source.js'  , ' punctuation.terminator.statement.js'  ]
157+ 
146158  describe  " ES6 string templates"  , -> 
147159    it  " tokenizes them as strings"  , -> 
148160      {tokens } =  grammar .tokenizeLine (' `hey ${name}`'  )
0 commit comments