1 CodeMirror.defineMode("sass", function(config) {
2 var tokenRegexp = function(words){
3 return new RegExp("^" + words.join("|"));
6 var keywords = ["true", "false", "null", "auto"];
7 var keywordsRegexp = new RegExp("^" + keywords.join("|"));
9 var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
10 var opRegexp = tokenRegexp(operators);
12 var pseudoElementsRegexp = /^::?[\w\-]+/;
14 var urlTokens = function(stream, state){
15 var ch = stream.peek();
19 state.tokenizer = tokenBase;
21 }else if (ch === "("){
26 }else if (ch === "'" || ch === '"'){
27 state.tokenizer = buildStringTokenizer(stream.next());
30 state.tokenizer = buildStringTokenizer(")", false);
34 var multilineComment = function(stream, state) {
35 if (stream.skipTo("*/")){
38 state.tokenizer = tokenBase;
46 var buildStringTokenizer = function(quote, greedy){
47 if(greedy == null){ greedy = true; }
49 function stringTokenizer(stream, state){
50 var nextChar = stream.next();
51 var peekChar = stream.peek();
52 var previousChar = stream.string.charAt(stream.pos-2);
54 var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
57 console.log("previousChar: " + previousChar);
58 console.log("nextChar: " + nextChar);
59 console.log("peekChar: " + peekChar);
60 console.log("ending: " + endingString);
64 if (nextChar !== quote && greedy) { stream.next(); }
65 state.tokenizer = tokenBase;
67 }else if (nextChar === "#" && peekChar === "{"){
68 state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
76 return stringTokenizer;
79 var buildInterpolationTokenizer = function(currentTokenizer){
80 return function(stream, state){
81 if (stream.peek() === "}"){
83 state.tokenizer = currentTokenizer;
86 return tokenBase(stream, state);
91 var indent = function(state){
92 if (state.indentCount == 0){
94 var lastScopeOffset = state.scopes[0].offset;
95 var currentOffset = lastScopeOffset + config.indentUnit;
96 state.scopes.unshift({ offset:currentOffset });
100 var dedent = function(state){
101 if (state.scopes.length == 1) { return; }
103 state.scopes.shift();
106 var tokenBase = function(stream, state) {
107 var ch = stream.peek();
109 // Single line Comment
110 if (stream.match('//')) {
116 if (stream.match('/*')){
117 state.tokenizer = multilineComment;
118 return state.tokenizer(stream, state);
122 if (stream.match('#{')){
123 state.tokenizer = buildInterpolationTokenizer(tokenBase);
130 // Match class selectors
131 if (stream.match(/^[\w-]+/)){
134 }else if (stream.peek() === "#"){
146 if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
151 if (stream.match(/^[\w-]+/)){
156 if (stream.peek() === "#"){
163 if (stream.match(/^-?[0-9\.]+/)){
168 if (stream.match(/^(px|em|in)\b/)){
172 if (stream.match(keywordsRegexp)){
176 if (stream.match(/^url/) && stream.peek() === "("){
177 state.tokenizer = urlTokens;
184 stream.eatWhile(/[\w-]/);
186 if (stream.peek() === ":"){
197 if (stream.match(/^[\w]+/)){
207 // Match shortcut mixin definition
208 if (stream.match(/^[\w-]+/)){
219 // Match shortcut mixin definition
220 if (stream.match(/^[\w-]+/)){
228 if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){
236 stream.eatWhile(/[\w-]/);
241 if (ch === '"' || ch === "'"){
243 state.tokenizer = buildStringTokenizer(ch);
247 // Pseudo element selectors
248 if (ch == ':' && stream.match(pseudoElementsRegexp)){
253 if (stream.eatWhile(/[\w-&]/)){
254 // matches a property definition
255 if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
261 if (stream.match(opRegexp)){
265 // If we haven't returned by now, we move 1 character
266 // and return an error
271 var tokenLexer = function(stream, state) {
273 state.indentCount = 0;
275 var style = state.tokenizer(stream, state);
276 var current = stream.current();
278 if (current === "@return"){
282 if (style === "atom"){
287 var startOfToken = stream.pos - current.length;
288 var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
292 for (var i = 0; i < state.scopes.length; i++){
293 var scope = state.scopes[i];
295 if (scope.offset <= withCurrentIndent){
296 newScopes.push(scope);
300 state.scopes = newScopes;
308 startState: function() {
310 tokenizer: tokenBase,
311 scopes: [{offset: 0, type: 'sass'}],
316 token: function(stream, state) {
317 var style = tokenLexer(stream, state);
319 state.lastToken = { style: style, content: stream.current() };
324 indent: function(state) {
325 return state.scopes[0].offset;
330 CodeMirror.defineMIME("text/x-sass", "sass");