##// END OF EJS Templates
Added tag v4.16.2 for changeset 5d8057df561c
Added tag v4.16.2 for changeset 5d8057df561c

File last commit:

r1:854a839a default
r3616:3c011e67 stable
Show More
sass.js
414 lines | 9.8 KiB | application/javascript | JavascriptLexer
project: added all source files and assets
r1 // CodeMirror, copyright (c) by Marijn Haverbeke and others
// Distributed under an MIT license: http://codemirror.net/LICENSE
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("sass", function(config) {
function tokenRegexp(words) {
return new RegExp("^" + words.join("|"));
}
var keywords = ["true", "false", "null", "auto"];
var keywordsRegexp = new RegExp("^" + keywords.join("|"));
var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-",
"\\!=", "/", "\\*", "%", "and", "or", "not", ";","\\{","\\}",":"];
var opRegexp = tokenRegexp(operators);
var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/;
function urlTokens(stream, state) {
var ch = stream.peek();
if (ch === ")") {
stream.next();
state.tokenizer = tokenBase;
return "operator";
} else if (ch === "(") {
stream.next();
stream.eatSpace();
return "operator";
} else if (ch === "'" || ch === '"') {
state.tokenizer = buildStringTokenizer(stream.next());
return "string";
} else {
state.tokenizer = buildStringTokenizer(")", false);
return "string";
}
}
function comment(indentation, multiLine) {
return function(stream, state) {
if (stream.sol() && stream.indentation() <= indentation) {
state.tokenizer = tokenBase;
return tokenBase(stream, state);
}
if (multiLine && stream.skipTo("*/")) {
stream.next();
stream.next();
state.tokenizer = tokenBase;
} else {
stream.skipToEnd();
}
return "comment";
};
}
function buildStringTokenizer(quote, greedy) {
if (greedy == null) { greedy = true; }
function stringTokenizer(stream, state) {
var nextChar = stream.next();
var peekChar = stream.peek();
var previousChar = stream.string.charAt(stream.pos-2);
var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
if (endingString) {
if (nextChar !== quote && greedy) { stream.next(); }
state.tokenizer = tokenBase;
return "string";
} else if (nextChar === "#" && peekChar === "{") {
state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
stream.next();
return "operator";
} else {
return "string";
}
}
return stringTokenizer;
}
function buildInterpolationTokenizer(currentTokenizer) {
return function(stream, state) {
if (stream.peek() === "}") {
stream.next();
state.tokenizer = currentTokenizer;
return "operator";
} else {
return tokenBase(stream, state);
}
};
}
function indent(state) {
if (state.indentCount == 0) {
state.indentCount++;
var lastScopeOffset = state.scopes[0].offset;
var currentOffset = lastScopeOffset + config.indentUnit;
state.scopes.unshift({ offset:currentOffset });
}
}
function dedent(state) {
if (state.scopes.length == 1) return;
state.scopes.shift();
}
function tokenBase(stream, state) {
var ch = stream.peek();
// Comment
if (stream.match("/*")) {
state.tokenizer = comment(stream.indentation(), true);
return state.tokenizer(stream, state);
}
if (stream.match("//")) {
state.tokenizer = comment(stream.indentation(), false);
return state.tokenizer(stream, state);
}
// Interpolation
if (stream.match("#{")) {
state.tokenizer = buildInterpolationTokenizer(tokenBase);
return "operator";
}
// Strings
if (ch === '"' || ch === "'") {
stream.next();
state.tokenizer = buildStringTokenizer(ch);
return "string";
}
if(!state.cursorHalf){// state.cursorHalf === 0
// first half i.e. before : for key-value pairs
// including selectors
if (ch === ".") {
stream.next();
if (stream.match(/^[\w-]+/)) {
indent(state);
return "atom";
} else if (stream.peek() === "#") {
indent(state);
return "atom";
}
}
if (ch === "#") {
stream.next();
// ID selectors
if (stream.match(/^[\w-]+/)) {
indent(state);
return "atom";
}
if (stream.peek() === "#") {
indent(state);
return "atom";
}
}
// Variables
if (ch === "$") {
stream.next();
stream.eatWhile(/[\w-]/);
return "variable-2";
}
// Numbers
if (stream.match(/^-?[0-9\.]+/))
return "number";
// Units
if (stream.match(/^(px|em|in)\b/))
return "unit";
if (stream.match(keywordsRegexp))
return "keyword";
if (stream.match(/^url/) && stream.peek() === "(") {
state.tokenizer = urlTokens;
return "atom";
}
if (ch === "=") {
// Match shortcut mixin definition
if (stream.match(/^=[\w-]+/)) {
indent(state);
return "meta";
}
}
if (ch === "+") {
// Match shortcut mixin definition
if (stream.match(/^\+[\w-]+/)){
return "variable-3";
}
}
if(ch === "@"){
if(stream.match(/@extend/)){
if(!stream.match(/\s*[\w]/))
dedent(state);
}
}
// Indent Directives
if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) {
indent(state);
return "meta";
}
// Other Directives
if (ch === "@") {
stream.next();
stream.eatWhile(/[\w-]/);
return "meta";
}
if (stream.eatWhile(/[\w-]/)){
if(stream.match(/ *: *[\w-\+\$#!\("']/,false)){
return "property";
}
else if(stream.match(/ *:/,false)){
indent(state);
state.cursorHalf = 1;
return "atom";
}
else if(stream.match(/ *,/,false)){
return "atom";
}
else{
indent(state);
return "atom";
}
}
if(ch === ":"){
if (stream.match(pseudoElementsRegexp)){ // could be a pseudo-element
return "keyword";
}
stream.next();
state.cursorHalf=1;
return "operator";
}
} // cursorHalf===0 ends here
else{
if (ch === "#") {
stream.next();
// Hex numbers
if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
if(!stream.peek()){
state.cursorHalf = 0;
}
return "number";
}
}
// Numbers
if (stream.match(/^-?[0-9\.]+/)){
if(!stream.peek()){
state.cursorHalf = 0;
}
return "number";
}
// Units
if (stream.match(/^(px|em|in)\b/)){
if(!stream.peek()){
state.cursorHalf = 0;
}
return "unit";
}
if (stream.match(keywordsRegexp)){
if(!stream.peek()){
state.cursorHalf = 0;
}
return "keyword";
}
if (stream.match(/^url/) && stream.peek() === "(") {
state.tokenizer = urlTokens;
if(!stream.peek()){
state.cursorHalf = 0;
}
return "atom";
}
// Variables
if (ch === "$") {
stream.next();
stream.eatWhile(/[\w-]/);
if(!stream.peek()){
state.cursorHalf = 0;
}
return "variable-3";
}
// bang character for !important, !default, etc.
if (ch === "!") {
stream.next();
if(!stream.peek()){
state.cursorHalf = 0;
}
return stream.match(/^[\w]+/) ? "keyword": "operator";
}
if (stream.match(opRegexp)){
if(!stream.peek()){
state.cursorHalf = 0;
}
return "operator";
}
// attributes
if (stream.eatWhile(/[\w-]/)) {
if(!stream.peek()){
state.cursorHalf = 0;
}
return "attribute";
}
//stream.eatSpace();
if(!stream.peek()){
state.cursorHalf = 0;
return null;
}
} // else ends here
if (stream.match(opRegexp))
return "operator";
// If we haven't returned by now, we move 1 character
// and return an error
stream.next();
return null;
}
function tokenLexer(stream, state) {
if (stream.sol()) state.indentCount = 0;
var style = state.tokenizer(stream, state);
var current = stream.current();
if (current === "@return" || current === "}"){
dedent(state);
}
if (style !== null) {
var startOfToken = stream.pos - current.length;
var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
var newScopes = [];
for (var i = 0; i < state.scopes.length; i++) {
var scope = state.scopes[i];
if (scope.offset <= withCurrentIndent)
newScopes.push(scope);
}
state.scopes = newScopes;
}
return style;
}
return {
startState: function() {
return {
tokenizer: tokenBase,
scopes: [{offset: 0, type: "sass"}],
indentCount: 0,
cursorHalf: 0, // cursor half tells us if cursor lies after (1)
// or before (0) colon (well... more or less)
definedVars: [],
definedMixins: []
};
},
token: function(stream, state) {
var style = tokenLexer(stream, state);
state.lastToken = { style: style, content: stream.current() };
return style;
},
indent: function(state) {
return state.scopes[0].offset;
}
};
});
CodeMirror.defineMIME("text/x-sass", "sass");
});