2 * Link to the project's GitHub page:
3 * https://github.com/pickhardt/coffeescript-codemirror-mode
5 CodeMirror.defineMode('coffeescript', function(conf) {
6 var ERRORCLASS = 'error';
8 function wordRegexp(words) {
9 return new RegExp("^((" + words.join(")|(") + "))\\b");
12 var singleOperators = new RegExp("^[\\+\\-\\*/%&|\\^~<>!\?]");
13 var singleDelimiters = new RegExp('^[\\(\\)\\[\\]\\{\\},:`=;\\.]');
14 var doubleOperators = new RegExp("^((\->)|(\=>)|(\\+\\+)|(\\+\\=)|(\\-\\-)|(\\-\\=)|(\\*\\*)|(\\*\\=)|(\\/\\/)|(\\/\\=)|(==)|(!=)|(<=)|(>=)|(<>)|(<<)|(>>)|(//))");
15 var doubleDelimiters = new RegExp("^((\\.\\.)|(\\+=)|(\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))");
16 var tripleDelimiters = new RegExp("^((\\.\\.\\.)|(//=)|(>>=)|(<<=)|(\\*\\*=))");
17 var identifiers = new RegExp("^[_A-Za-z$][_A-Za-z$0-9]*");
18 var properties = new RegExp("^(@|this\.)[_A-Za-z$][_A-Za-z$0-9]*");
20 var wordOperators = wordRegexp(['and', 'or', 'not',
22 'instanceof', 'typeof']);
23 var indentKeywords = ['for', 'while', 'loop', 'if', 'unless', 'else',
24 'switch', 'try', 'catch', 'finally', 'class'];
25 var commonKeywords = ['break', 'by', 'continue', 'debugger', 'delete',
26 'do', 'in', 'of', 'new', 'return', 'then',
27 'this', 'throw', 'when', 'until'];
29 var keywords = wordRegexp(indentKeywords.concat(commonKeywords));
31 indentKeywords = wordRegexp(indentKeywords);
34 var stringPrefixes = new RegExp("^('{3}|\"{3}|['\"])");
35 var regexPrefixes = new RegExp("^(/{3}|/)");
36 var commonConstants = ['Infinity', 'NaN', 'undefined', 'null', 'true', 'false', 'on', 'off', 'yes', 'no'];
37 var constants = wordRegexp(commonConstants);
40 function tokenBase(stream, state) {
41 // Handle scope changes
43 var scopeOffset = state.scopes[0].offset;
44 if (stream.eatSpace()) {
45 var lineOffset = stream.indentation();
46 if (lineOffset > scopeOffset) {
48 } else if (lineOffset < scopeOffset) {
53 if (scopeOffset > 0) {
54 dedent(stream, state);
58 if (stream.eatSpace()) {
62 var ch = stream.peek();
64 // Handle docco title comment (single line)
65 if (stream.match("####")) {
70 // Handle multi line comments
71 if (stream.match("###")) {
72 state.tokenize = longComment;
73 return state.tokenize(stream, state);
76 // Single line comment
82 // Handle number literals
83 if (stream.match(/^-?[0-9\.]/, false)) {
84 var floatLiteral = false;
86 if (stream.match(/^-?\d*\.\d+(e[\+\-]?\d+)?/i)) {
89 if (stream.match(/^-?\d+\.\d*/)) {
92 if (stream.match(/^-?\.\d+/)) {
97 // prevent from getting extra . on 1..
98 if (stream.peek() == "."){
104 var intLiteral = false;
106 if (stream.match(/^-?0x[0-9a-f]+/i)) {
110 if (stream.match(/^-?[1-9]\d*(e[\+\-]?\d+)?/)) {
113 // Zero by itself with no other piece of number.
114 if (stream.match(/^-?0(?![\dx])/i)) {
123 if (stream.match(stringPrefixes)) {
124 state.tokenize = tokenFactory(stream.current(), 'string');
125 return state.tokenize(stream, state);
127 // Handle regex literals
128 if (stream.match(regexPrefixes)) {
129 if (stream.current() != '/' || stream.match(/^.*\//, false)) { // prevent highlight of division
130 state.tokenize = tokenFactory(stream.current(), 'string-2');
131 return state.tokenize(stream, state);
137 // Handle operators and delimiters
138 if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) {
139 return 'punctuation';
141 if (stream.match(doubleOperators)
142 || stream.match(singleOperators)
143 || stream.match(wordOperators)) {
146 if (stream.match(singleDelimiters)) {
147 return 'punctuation';
150 if (stream.match(constants)) {
154 if (stream.match(keywords)) {
158 if (stream.match(identifiers)) {
162 if (stream.match(properties)) {
166 // Handle non-detected items
171 function tokenFactory(delimiter, outclass) {
172 var singleline = delimiter.length == 1;
173 return function(stream, state) {
174 while (!stream.eol()) {
175 stream.eatWhile(/[^'"\/\\]/);
176 if (stream.eat('\\')) {
178 if (singleline && stream.eol()) {
181 } else if (stream.match(delimiter)) {
182 state.tokenize = tokenBase;
185 stream.eat(/['"\/]/);
189 if (conf.mode.singleLineStringErrors) {
190 outclass = ERRORCLASS;
192 state.tokenize = tokenBase;
199 function longComment(stream, state) {
200 while (!stream.eol()) {
201 stream.eatWhile(/[^#]/);
202 if (stream.match("###")) {
203 state.tokenize = tokenBase;
206 stream.eatWhile("#");
211 function indent(stream, state, type) {
212 type = type || 'coffee';
214 if (type === 'coffee') {
215 for (var i = 0; i < state.scopes.length; i++) {
216 if (state.scopes[i].type === 'coffee') {
217 indentUnit = state.scopes[i].offset + conf.indentUnit;
222 indentUnit = stream.column() + stream.current().length;
224 state.scopes.unshift({
230 function dedent(stream, state) {
231 if (state.scopes.length == 1) return;
232 if (state.scopes[0].type === 'coffee') {
233 var _indent = stream.indentation();
234 var _indent_index = -1;
235 for (var i = 0; i < state.scopes.length; ++i) {
236 if (_indent === state.scopes[i].offset) {
241 if (_indent_index === -1) {
244 while (state.scopes[0].offset !== _indent) {
245 state.scopes.shift();
249 state.scopes.shift();
254 function tokenLexer(stream, state) {
255 var style = state.tokenize(stream, state);
256 var current = stream.current();
258 // Handle '.' connected identifiers
259 if (current === '.') {
260 style = state.tokenize(stream, state);
261 current = stream.current();
262 if (style === 'variable') {
269 // Handle scope changes.
270 if (current === 'return') {
273 if (((current === '->' || current === '=>') &&
275 state.scopes[0].type == 'coffee' &&
276 stream.peek() === '')
277 || style === 'indent') {
278 indent(stream, state);
280 var delimiter_index = '[({'.indexOf(current);
281 if (delimiter_index !== -1) {
282 indent(stream, state, '])}'.slice(delimiter_index, delimiter_index+1));
284 if (indentKeywords.exec(current)){
285 indent(stream, state);
287 if (current == 'then'){
288 dedent(stream, state);
292 if (style === 'dedent') {
293 if (dedent(stream, state)) {
297 delimiter_index = '])}'.indexOf(current);
298 if (delimiter_index !== -1) {
299 if (dedent(stream, state)) {
303 if (state.dedent > 0 && stream.eol() && state.scopes[0].type == 'coffee') {
304 if (state.scopes.length > 1) state.scopes.shift();
312 startState: function(basecolumn) {
315 scopes: [{offset:basecolumn || 0, type:'coffee'}],
322 token: function(stream, state) {
323 var style = tokenLexer(stream, state);
325 state.lastToken = {style:style, content: stream.current()};
327 if (stream.eol() && stream.lambda) {
328 state.lambda = false;
334 indent: function(state) {
335 if (state.tokenize != tokenBase) {
339 return state.scopes[0].offset;
348 CodeMirror.defineMIME('text/x-coffeescript', 'coffeescript');