sass.js 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342
  1. (function(mod) {
  2. if (typeof exports == "object" && typeof module == "object") // CommonJS
  3. mod(require("../../lib/codemirror"));
  4. else if (typeof define == "function" && define.amd) // AMD
  5. define(["../../lib/codemirror"], mod);
  6. else // Plain browser env
  7. mod(CodeMirror);
  8. })(function(CodeMirror) {
  9. "use strict";
  10. CodeMirror.defineMode("sass", function(config) {
  11. var tokenRegexp = function(words){
  12. return new RegExp("^" + words.join("|"));
  13. };
  14. var keywords = ["true", "false", "null", "auto"];
  15. var keywordsRegexp = new RegExp("^" + keywords.join("|"));
  16. var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
  17. var opRegexp = tokenRegexp(operators);
  18. var pseudoElementsRegexp = /^::?[\w\-]+/;
  19. var urlTokens = function(stream, state){
  20. var ch = stream.peek();
  21. if (ch === ")"){
  22. stream.next();
  23. state.tokenizer = tokenBase;
  24. return "operator";
  25. }else if (ch === "("){
  26. stream.next();
  27. stream.eatSpace();
  28. return "operator";
  29. }else if (ch === "'" || ch === '"'){
  30. state.tokenizer = buildStringTokenizer(stream.next());
  31. return "string";
  32. }else{
  33. state.tokenizer = buildStringTokenizer(")", false);
  34. return "string";
  35. }
  36. };
  37. var multilineComment = function(stream, state) {
  38. if (stream.skipTo("*/")){
  39. stream.next();
  40. stream.next();
  41. state.tokenizer = tokenBase;
  42. }else {
  43. stream.next();
  44. }
  45. return "comment";
  46. };
  47. var buildStringTokenizer = function(quote, greedy){
  48. if(greedy == null){ greedy = true; }
  49. function stringTokenizer(stream, state){
  50. var nextChar = stream.next();
  51. var peekChar = stream.peek();
  52. var previousChar = stream.string.charAt(stream.pos-2);
  53. var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
  54. /*
  55. console.log("previousChar: " + previousChar);
  56. console.log("nextChar: " + nextChar);
  57. console.log("peekChar: " + peekChar);
  58. console.log("ending: " + endingString);
  59. */
  60. if (endingString){
  61. if (nextChar !== quote && greedy) { stream.next(); }
  62. state.tokenizer = tokenBase;
  63. return "string";
  64. }else if (nextChar === "#" && peekChar === "{"){
  65. state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
  66. stream.next();
  67. return "operator";
  68. }else {
  69. return "string";
  70. }
  71. }
  72. return stringTokenizer;
  73. };
  74. var buildInterpolationTokenizer = function(currentTokenizer){
  75. return function(stream, state){
  76. if (stream.peek() === "}"){
  77. stream.next();
  78. state.tokenizer = currentTokenizer;
  79. return "operator";
  80. }else{
  81. return tokenBase(stream, state);
  82. }
  83. };
  84. };
  85. var indent = function(state){
  86. if (state.indentCount == 0){
  87. state.indentCount++;
  88. var lastScopeOffset = state.scopes[0].offset;
  89. var currentOffset = lastScopeOffset + config.indentUnit;
  90. state.scopes.unshift({ offset:currentOffset });
  91. }
  92. };
  93. var dedent = function(state){
  94. if (state.scopes.length == 1) { return; }
  95. state.scopes.shift();
  96. };
  97. var tokenBase = function(stream, state) {
  98. var ch = stream.peek();
  99. // Single line Comment
  100. if (stream.match('//')) {
  101. stream.skipToEnd();
  102. return "comment";
  103. }
  104. // Multiline Comment
  105. if (stream.match('/*')){
  106. state.tokenizer = multilineComment;
  107. return state.tokenizer(stream, state);
  108. }
  109. // Interpolation
  110. if (stream.match('#{')){
  111. state.tokenizer = buildInterpolationTokenizer(tokenBase);
  112. return "operator";
  113. }
  114. if (ch === "."){
  115. stream.next();
  116. // Match class selectors
  117. if (stream.match(/^[\w-]+/)){
  118. indent(state);
  119. return "atom";
  120. }else if (stream.peek() === "#"){
  121. indent(state);
  122. return "atom";
  123. }else{
  124. return "operator";
  125. }
  126. }
  127. if (ch === "#"){
  128. stream.next();
  129. // Hex numbers
  130. if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
  131. return "number";
  132. }
  133. // ID selectors
  134. if (stream.match(/^[\w-]+/)){
  135. indent(state);
  136. return "atom";
  137. }
  138. if (stream.peek() === "#"){
  139. indent(state);
  140. return "atom";
  141. }
  142. }
  143. // Numbers
  144. if (stream.match(/^-?[0-9\.]+/)){
  145. return "number";
  146. }
  147. // Units
  148. if (stream.match(/^(px|em|in)\b/)){
  149. return "unit";
  150. }
  151. if (stream.match(keywordsRegexp)){
  152. return "keyword";
  153. }
  154. if (stream.match(/^url/) && stream.peek() === "("){
  155. state.tokenizer = urlTokens;
  156. return "atom";
  157. }
  158. // Variables
  159. if (ch === "$"){
  160. stream.next();
  161. stream.eatWhile(/[\w-]/);
  162. if (stream.peek() === ":"){
  163. stream.next();
  164. return "variable-2";
  165. }else{
  166. return "variable-3";
  167. }
  168. }
  169. if (ch === "!"){
  170. stream.next();
  171. if (stream.match(/^[\w]+/)){
  172. return "keyword";
  173. }
  174. return "operator";
  175. }
  176. if (ch === "="){
  177. stream.next();
  178. // Match shortcut mixin definition
  179. if (stream.match(/^[\w-]+/)){
  180. indent(state);
  181. return "meta";
  182. }else {
  183. return "operator";
  184. }
  185. }
  186. if (ch === "+"){
  187. stream.next();
  188. // Match shortcut mixin definition
  189. if (stream.match(/^[\w-]+/)){
  190. return "variable-3";
  191. }else {
  192. return "operator";
  193. }
  194. }
  195. // Indent Directives
  196. if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){
  197. indent(state);
  198. return "meta";
  199. }
  200. // Other Directives
  201. if (ch === "@"){
  202. stream.next();
  203. stream.eatWhile(/[\w-]/);
  204. return "meta";
  205. }
  206. // Strings
  207. if (ch === '"' || ch === "'"){
  208. stream.next();
  209. state.tokenizer = buildStringTokenizer(ch);
  210. return "string";
  211. }
  212. // Pseudo element selectors
  213. if (ch == ':' && stream.match(pseudoElementsRegexp)){
  214. return "keyword";
  215. }
  216. // atoms
  217. if (stream.eatWhile(/[\w-&]/)){
  218. // matches a property definition
  219. if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
  220. return "property";
  221. else
  222. return "atom";
  223. }
  224. if (stream.match(opRegexp)){
  225. return "operator";
  226. }
  227. // If we haven't returned by now, we move 1 character
  228. // and return an error
  229. stream.next();
  230. return null;
  231. };
  232. var tokenLexer = function(stream, state) {
  233. if (stream.sol()){
  234. state.indentCount = 0;
  235. }
  236. var style = state.tokenizer(stream, state);
  237. var current = stream.current();
  238. if (current === "@return"){
  239. dedent(state);
  240. }
  241. if (style === "atom"){
  242. indent(state);
  243. }
  244. if (style !== null){
  245. var startOfToken = stream.pos - current.length;
  246. var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
  247. var newScopes = [];
  248. for (var i = 0; i < state.scopes.length; i++){
  249. var scope = state.scopes[i];
  250. if (scope.offset <= withCurrentIndent){
  251. newScopes.push(scope);
  252. }
  253. }
  254. state.scopes = newScopes;
  255. }
  256. return style;
  257. };
  258. return {
  259. startState: function() {
  260. return {
  261. tokenizer: tokenBase,
  262. scopes: [{offset: 0, type: 'sass'}],
  263. definedVars: [],
  264. definedMixins: []
  265. };
  266. },
  267. token: function(stream, state) {
  268. var style = tokenLexer(stream, state);
  269. state.lastToken = { style: style, content: stream.current() };
  270. return style;
  271. },
  272. indent: function(state) {
  273. return state.scopes[0].offset;
  274. }
  275. };
  276. });
  277. CodeMirror.defineMIME("text/x-sass", "sass");
  278. });