Update ACE Editor to version 1.2.0
Previously, we were at an ACE editor published between 1.1.8 and 1.1.9. This caused multiple issues and was especially a problem for the upcoming pair programming feature. Further, updating ace is a long-time priority, see https://github.com/openHPI/codeocean/issues/250. Now, we are not yet updating to the latest version, but rather to the next minor version. This already contains breaking changes, and we are currently interested to keep the number of changes as low as possible. Further updating ACE might be still a future task. The new ACE version 1.2.0 is taken from this tag: https://github.com/ajaxorg/ace-builds/releases/tag/v1.2.0. We are using the src build (not minified, not in the noconflict version), since the same was used before as well. Further, we need to change our migration for storing editor events. Since the table is not yet used (in production), we also update the enum.
This commit is contained in:
224
vendor/assets/javascripts/ace/mode-latex.js
vendored
Executable file → Normal file
224
vendor/assets/javascripts/ace/mode-latex.js
vendored
Executable file → Normal file
@ -1 +1,223 @@
|
||||
define("ace/mode/latex_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text_highlight_rules").TextHighlightRules,s=function(){this.$rules={start:[{token:"comment",regex:"%.*$"},{token:["keyword","lparen","variable.parameter","rparen","lparen","storage.type","rparen"],regex:"(\\\\(?:documentclass|usepackage|input))(?:(\\[)([^\\]]*)(\\]))?({)([^}]*)(})"},{token:["keyword","lparen","variable.parameter","rparen"],regex:"(\\\\(?:label|v?ref|cite(?:[^{]*)))(?:({)([^}]*)(}))?"},{token:["storage.type","lparen","variable.parameter","rparen"],regex:"(\\\\(?:begin|end))({)(\\w*)(})"},{token:"storage.type",regex:"\\\\[a-zA-Z]+"},{token:"lparen",regex:"[[({]"},{token:"rparen",regex:"[\\])}]"},{token:"constant.character.escape",regex:"\\\\[^a-zA-Z]?"},{token:"string",regex:"\\${1,2}",next:"equation"}],equation:[{token:"comment",regex:"%.*$"},{token:"string",regex:"\\${1,2}",next:"start"},{token:"constant.character.escape",regex:"\\\\(?:[^a-zA-Z]|[a-zA-Z]+)"},{token:"error",regex:"^\\s*$",next:"start"},{defaultToken:"string"}]}};r.inherits(s,i),t.LatexHighlightRules=s}),define("ace/mode/folding/latex",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode","ace/range","ace/token_iterator"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("./fold_mode").FoldMode,s=e("../../range").Range,o=e("../../token_iterator").TokenIterator,u=t.FoldMode=function(){};r.inherits(u,i),function(){this.foldingStartMarker=/^\s*\\(begin)|(section|subsection|paragraph)\b|{\s*$/,this.foldingStopMarker=/^\s*\\(end)\b|^\s*}/,this.getFoldWidgetRange=function(e,t,n){var r=e.doc.getLine(n),i=this.foldingStartMarker.exec(r);if(i)return i[1]?this.latexBlock(e,n,i[0].length-1):i[2]?this.latexSection(e,n,i[0].length-1):this.openingBracketBlock(e,"{",n,i.index);var i=this.foldingStopMarker.exec(r);if(i)return i[1]?this.latexBlock(e,n,i[0].length-1):this.closingBracketBlock(e,"}",n,i.index+i[0].length)},this.latexBlock=function(e,t,n){var r={"\\begin":1,"\\end":-1},i=new o(e,t,n),u=i.getCurrentToken();if(!u||u.type!="storage.type"&&u.type!="constant.character.escape")return;var a=u.value,f=r[a],l=function(){var e=i.stepForward(),t=e.type=="lparen"?i.stepForward().value:"";return f===-1&&(i.stepBackward(),t&&i.stepBackward()),t},c=[l()],h=f===-1?i.getCurrentTokenColumn():e.getLine(t).length,p=t;i.step=f===-1?i.stepBackward:i.stepForward;while(u=i.step()){if(!u||u.type!="storage.type"&&u.type!="constant.character.escape")continue;var d=r[u.value];if(!d)continue;var v=l();if(d===f)c.unshift(v);else if(c.shift()!==v||!c.length)break}if(c.length)return;var t=i.getCurrentTokenRow();return f===-1?new s(t,e.getLine(t).length,p,h):(i.stepBackward(),new s(p,h,t,i.getCurrentTokenColumn()))},this.latexSection=function(e,t,n){var r=["\\subsection","\\section","\\begin","\\end","\\paragraph"],i=new o(e,t,n),u=i.getCurrentToken();if(!u||u.type!="storage.type")return;var a=r.indexOf(u.value),f=0,l=t;while(u=i.stepForward()){if(u.type!=="storage.type")continue;var c=r.indexOf(u.value);if(c>=2){f||(l=i.getCurrentTokenRow()-1),f+=c==2?1:-1;if(f<0)break}else if(c>=a)break}f||(l=i.getCurrentTokenRow()-1);while(l>t&&!/\S/.test(e.getLine(l)))l--;return new s(t,e.getLine(t).length,l,e.getLine(l).length)}}.call(u.prototype)}),define("ace/mode/latex",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/latex_highlight_rules","ace/mode/folding/latex","ace/range"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text").Mode,s=e("./latex_highlight_rules").LatexHighlightRules,o=e("./folding/latex").FoldMode,u=e("../range").Range,a=function(){this.HighlightRules=s,this.foldingRules=new o};r.inherits(a,i),function(){this.type="text",this.lineCommentStart="%",this.$id="ace/mode/latex"}.call(a.prototype),t.Mode=a})
|
||||
define("ace/mode/latex_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../lib/oop");
|
||||
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
|
||||
|
||||
var LatexHighlightRules = function() {
|
||||
|
||||
this.$rules = {
|
||||
"start" : [{
|
||||
token : "comment",
|
||||
regex : "%.*$"
|
||||
}, {
|
||||
token : ["keyword", "lparen", "variable.parameter", "rparen", "lparen", "storage.type", "rparen"],
|
||||
regex : "(\\\\(?:documentclass|usepackage|input))(?:(\\[)([^\\]]*)(\\]))?({)([^}]*)(})"
|
||||
}, {
|
||||
token : ["keyword","lparen", "variable.parameter", "rparen"],
|
||||
regex : "(\\\\(?:label|v?ref|cite(?:[^{]*)))(?:({)([^}]*)(}))?"
|
||||
}, {
|
||||
token : ["storage.type", "lparen", "variable.parameter", "rparen"],
|
||||
regex : "(\\\\(?:begin|end))({)(\\w*)(})"
|
||||
}, {
|
||||
token : "storage.type",
|
||||
regex : "\\\\[a-zA-Z]+"
|
||||
}, {
|
||||
token : "lparen",
|
||||
regex : "[[({]"
|
||||
}, {
|
||||
token : "rparen",
|
||||
regex : "[\\])}]"
|
||||
}, {
|
||||
token : "constant.character.escape",
|
||||
regex : "\\\\[^a-zA-Z]?"
|
||||
}, {
|
||||
token : "string",
|
||||
regex : "\\${1,2}",
|
||||
next : "equation"
|
||||
}],
|
||||
"equation" : [{
|
||||
token : "comment",
|
||||
regex : "%.*$"
|
||||
}, {
|
||||
token : "string",
|
||||
regex : "\\${1,2}",
|
||||
next : "start"
|
||||
}, {
|
||||
token : "constant.character.escape",
|
||||
regex : "\\\\(?:[^a-zA-Z]|[a-zA-Z]+)"
|
||||
}, {
|
||||
token : "error",
|
||||
regex : "^\\s*$",
|
||||
next : "start"
|
||||
}, {
|
||||
defaultToken : "string"
|
||||
}]
|
||||
|
||||
};
|
||||
};
|
||||
oop.inherits(LatexHighlightRules, TextHighlightRules);
|
||||
|
||||
exports.LatexHighlightRules = LatexHighlightRules;
|
||||
|
||||
});
|
||||
|
||||
define("ace/mode/folding/latex",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode","ace/range","ace/token_iterator"], function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../../lib/oop");
|
||||
var BaseFoldMode = require("./fold_mode").FoldMode;
|
||||
var Range = require("../../range").Range;
|
||||
var TokenIterator = require("../../token_iterator").TokenIterator;
|
||||
|
||||
var FoldMode = exports.FoldMode = function() {};
|
||||
|
||||
oop.inherits(FoldMode, BaseFoldMode);
|
||||
|
||||
(function() {
|
||||
|
||||
this.foldingStartMarker = /^\s*\\(begin)|(section|subsection|paragraph)\b|{\s*$/;
|
||||
this.foldingStopMarker = /^\s*\\(end)\b|^\s*}/;
|
||||
|
||||
this.getFoldWidgetRange = function(session, foldStyle, row) {
|
||||
var line = session.doc.getLine(row);
|
||||
var match = this.foldingStartMarker.exec(line);
|
||||
if (match) {
|
||||
if (match[1])
|
||||
return this.latexBlock(session, row, match[0].length - 1);
|
||||
if (match[2])
|
||||
return this.latexSection(session, row, match[0].length - 1);
|
||||
|
||||
return this.openingBracketBlock(session, "{", row, match.index);
|
||||
}
|
||||
|
||||
var match = this.foldingStopMarker.exec(line);
|
||||
if (match) {
|
||||
if (match[1])
|
||||
return this.latexBlock(session, row, match[0].length - 1);
|
||||
|
||||
return this.closingBracketBlock(session, "}", row, match.index + match[0].length);
|
||||
}
|
||||
};
|
||||
|
||||
this.latexBlock = function(session, row, column) {
|
||||
var keywords = {
|
||||
"\\begin": 1,
|
||||
"\\end": -1
|
||||
};
|
||||
|
||||
var stream = new TokenIterator(session, row, column);
|
||||
var token = stream.getCurrentToken();
|
||||
if (!token || !(token.type == "storage.type" || token.type == "constant.character.escape"))
|
||||
return;
|
||||
|
||||
var val = token.value;
|
||||
var dir = keywords[val];
|
||||
|
||||
var getType = function() {
|
||||
var token = stream.stepForward();
|
||||
var type = token.type == "lparen" ?stream.stepForward().value : "";
|
||||
if (dir === -1) {
|
||||
stream.stepBackward();
|
||||
if (type)
|
||||
stream.stepBackward();
|
||||
}
|
||||
return type;
|
||||
};
|
||||
var stack = [getType()];
|
||||
var startColumn = dir === -1 ? stream.getCurrentTokenColumn() : session.getLine(row).length;
|
||||
var startRow = row;
|
||||
|
||||
stream.step = dir === -1 ? stream.stepBackward : stream.stepForward;
|
||||
while(token = stream.step()) {
|
||||
if (!token || !(token.type == "storage.type" || token.type == "constant.character.escape"))
|
||||
continue;
|
||||
var level = keywords[token.value];
|
||||
if (!level)
|
||||
continue;
|
||||
var type = getType();
|
||||
if (level === dir)
|
||||
stack.unshift(type);
|
||||
else if (stack.shift() !== type || !stack.length)
|
||||
break;
|
||||
}
|
||||
|
||||
if (stack.length)
|
||||
return;
|
||||
|
||||
var row = stream.getCurrentTokenRow();
|
||||
if (dir === -1)
|
||||
return new Range(row, session.getLine(row).length, startRow, startColumn);
|
||||
stream.stepBackward();
|
||||
return new Range(startRow, startColumn, row, stream.getCurrentTokenColumn());
|
||||
};
|
||||
|
||||
this.latexSection = function(session, row, column) {
|
||||
var keywords = ["\\subsection", "\\section", "\\begin", "\\end", "\\paragraph"];
|
||||
|
||||
var stream = new TokenIterator(session, row, column);
|
||||
var token = stream.getCurrentToken();
|
||||
if (!token || token.type != "storage.type")
|
||||
return;
|
||||
|
||||
var startLevel = keywords.indexOf(token.value);
|
||||
var stackDepth = 0
|
||||
var endRow = row;
|
||||
|
||||
while(token = stream.stepForward()) {
|
||||
if (token.type !== "storage.type")
|
||||
continue;
|
||||
var level = keywords.indexOf(token.value);
|
||||
|
||||
if (level >= 2) {
|
||||
if (!stackDepth)
|
||||
endRow = stream.getCurrentTokenRow() - 1;
|
||||
stackDepth += level == 2 ? 1 : - 1;
|
||||
if (stackDepth < 0)
|
||||
break
|
||||
} else if (level >= startLevel)
|
||||
break;
|
||||
}
|
||||
|
||||
if (!stackDepth)
|
||||
endRow = stream.getCurrentTokenRow() - 1;
|
||||
|
||||
while (endRow > row && !/\S/.test(session.getLine(endRow)))
|
||||
endRow--;
|
||||
|
||||
return new Range(
|
||||
row, session.getLine(row).length,
|
||||
endRow, session.getLine(endRow).length
|
||||
);
|
||||
};
|
||||
|
||||
}).call(FoldMode.prototype);
|
||||
|
||||
});
|
||||
|
||||
define("ace/mode/latex",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/latex_highlight_rules","ace/mode/folding/latex","ace/range"], function(require, exports, module) {
|
||||
"use strict";
|
||||
|
||||
var oop = require("../lib/oop");
|
||||
var TextMode = require("./text").Mode;
|
||||
var LatexHighlightRules = require("./latex_highlight_rules").LatexHighlightRules;
|
||||
var LatexFoldMode = require("./folding/latex").FoldMode;
|
||||
var Range = require("../range").Range;
|
||||
|
||||
var Mode = function() {
|
||||
this.HighlightRules = LatexHighlightRules;
|
||||
this.foldingRules = new LatexFoldMode();
|
||||
};
|
||||
oop.inherits(Mode, TextMode);
|
||||
|
||||
(function() {
|
||||
this.type = "text";
|
||||
|
||||
this.lineCommentStart = "%";
|
||||
|
||||
this.$id = "ace/mode/latex";
|
||||
}).call(Mode.prototype);
|
||||
|
||||
exports.Mode = Mode;
|
||||
|
||||
});
|
||||
|
Reference in New Issue
Block a user