mirror of
https://github.com/go-gitea/gitea
synced 2025-07-06 02:27:20 +00:00
Update CodeMirror to version 5.49.0 (#8381)
* Update CodeMirror to version 5.49.0 * Update CodeMirror versions in librejs and VERSIONS
This commit is contained in:
committed by
Lauris BH
parent
6fa14ac3c8
commit
1e9b330525
@ -1,5 +1,5 @@
|
||||
// CodeMirror, copyright (c) by Marijn Haverbeke and others
|
||||
// Distributed under an MIT license: http://codemirror.net/LICENSE
|
||||
// Distributed under an MIT license: https://codemirror.net/LICENSE
|
||||
|
||||
(function(mod) {
|
||||
if (typeof exports == "object" && typeof module == "object") // CommonJS
|
||||
@ -41,10 +41,11 @@
|
||||
CodeMirror.defineMode("python", function(conf, parserConf) {
|
||||
var ERRORCLASS = "error";
|
||||
|
||||
var singleDelimiters = parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.]/;
|
||||
var doubleOperators = parserConf.doubleOperators || /^([!<>]==|<>|<<|>>|\/\/|\*\*)/;
|
||||
var doubleDelimiters = parserConf.doubleDelimiters || /^(\+=|\-=|\*=|%=|\/=|&=|\|=|\^=)/;
|
||||
var tripleDelimiters = parserConf.tripleDelimiters || /^(\/\/=|>>=|<<=|\*\*=)/;
|
||||
var delimiters = parserConf.delimiters || parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.\\]/;
|
||||
// (Backwards-compatiblity with old, cumbersome config system)
|
||||
var operators = [parserConf.singleOperators, parserConf.doubleOperators, parserConf.doubleDelimiters, parserConf.tripleDelimiters,
|
||||
parserConf.operators || /^([-+*/%\/&|^]=?|[<>=]+|\/\/=?|\*\*=?|!=|[~!@]|\.\.\.)/]
|
||||
for (var i = 0; i < operators.length; i++) if (!operators[i]) operators.splice(i--, 1)
|
||||
|
||||
var hangingIndent = parserConf.hangingIndent || conf.indentUnit;
|
||||
|
||||
@ -55,37 +56,36 @@
|
||||
if (parserConf.extra_builtins != undefined)
|
||||
myBuiltins = myBuiltins.concat(parserConf.extra_builtins);
|
||||
|
||||
var py3 = parserConf.version && parseInt(parserConf.version, 10) == 3
|
||||
var py3 = !(parserConf.version && Number(parserConf.version) < 3)
|
||||
if (py3) {
|
||||
// since http://legacy.python.org/dev/peps/pep-0465/ @ is also an operator
|
||||
var singleOperators = parserConf.singleOperators || /^[\+\-\*\/%&|\^~<>!@]/;
|
||||
var identifiers = parserConf.identifiers|| /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*/;
|
||||
myKeywords = myKeywords.concat(["nonlocal", "False", "True", "None", "async", "await"]);
|
||||
myBuiltins = myBuiltins.concat(["ascii", "bytes", "exec", "print"]);
|
||||
var stringPrefixes = new RegExp("^(([rbuf]|(br))?('{3}|\"{3}|['\"]))", "i");
|
||||
var stringPrefixes = new RegExp("^(([rbuf]|(br)|(fr))?('{3}|\"{3}|['\"]))", "i");
|
||||
} else {
|
||||
var singleOperators = parserConf.singleOperators || /^[\+\-\*\/%&|\^~<>!]/;
|
||||
var identifiers = parserConf.identifiers|| /^[_A-Za-z][_A-Za-z0-9]*/;
|
||||
myKeywords = myKeywords.concat(["exec", "print"]);
|
||||
myBuiltins = myBuiltins.concat(["apply", "basestring", "buffer", "cmp", "coerce", "execfile",
|
||||
"file", "intern", "long", "raw_input", "reduce", "reload",
|
||||
"unichr", "unicode", "xrange", "False", "True", "None"]);
|
||||
var stringPrefixes = new RegExp("^(([rub]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i");
|
||||
var stringPrefixes = new RegExp("^(([rubf]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i");
|
||||
}
|
||||
var keywords = wordRegexp(myKeywords);
|
||||
var builtins = wordRegexp(myBuiltins);
|
||||
|
||||
// tokenizers
|
||||
function tokenBase(stream, state) {
|
||||
if (stream.sol()) state.indent = stream.indentation()
|
||||
var sol = stream.sol() && state.lastToken != "\\"
|
||||
if (sol) state.indent = stream.indentation()
|
||||
// Handle scope changes
|
||||
if (stream.sol() && top(state).type == "py") {
|
||||
if (sol && top(state).type == "py") {
|
||||
var scopeOffset = top(state).offset;
|
||||
if (stream.eatSpace()) {
|
||||
var lineOffset = stream.indentation();
|
||||
if (lineOffset > scopeOffset)
|
||||
pushPyScope(state);
|
||||
else if (lineOffset < scopeOffset && dedent(stream, state))
|
||||
else if (lineOffset < scopeOffset && dedent(stream, state) && stream.peek() != "#")
|
||||
state.errorToken = true;
|
||||
return null;
|
||||
} else {
|
||||
@ -101,20 +101,15 @@
|
||||
function tokenBaseInner(stream, state) {
|
||||
if (stream.eatSpace()) return null;
|
||||
|
||||
var ch = stream.peek();
|
||||
|
||||
// Handle Comments
|
||||
if (ch == "#") {
|
||||
stream.skipToEnd();
|
||||
return "comment";
|
||||
}
|
||||
if (stream.match(/^#.*/)) return "comment";
|
||||
|
||||
// Handle Number Literals
|
||||
if (stream.match(/^[0-9\.]/, false)) {
|
||||
var floatLiteral = false;
|
||||
// Floats
|
||||
if (stream.match(/^\d*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; }
|
||||
if (stream.match(/^\d+\.\d*/)) { floatLiteral = true; }
|
||||
if (stream.match(/^[\d_]*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; }
|
||||
if (stream.match(/^[\d_]+\.\d*/)) { floatLiteral = true; }
|
||||
if (stream.match(/^\.\d+/)) { floatLiteral = true; }
|
||||
if (floatLiteral) {
|
||||
// Float literals may be "imaginary"
|
||||
@ -124,13 +119,13 @@
|
||||
// Integers
|
||||
var intLiteral = false;
|
||||
// Hex
|
||||
if (stream.match(/^0x[0-9a-f]+/i)) intLiteral = true;
|
||||
if (stream.match(/^0x[0-9a-f_]+/i)) intLiteral = true;
|
||||
// Binary
|
||||
if (stream.match(/^0b[01]+/i)) intLiteral = true;
|
||||
if (stream.match(/^0b[01_]+/i)) intLiteral = true;
|
||||
// Octal
|
||||
if (stream.match(/^0o[0-7]+/i)) intLiteral = true;
|
||||
if (stream.match(/^0o[0-7_]+/i)) intLiteral = true;
|
||||
// Decimal
|
||||
if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) {
|
||||
if (stream.match(/^[1-9][\d_]*(e[\+\-]?[\d_]+)?/)) {
|
||||
// Decimal literals may be "imaginary"
|
||||
stream.eat(/J/i);
|
||||
// TODO - Can you have imaginary longs?
|
||||
@ -147,19 +142,20 @@
|
||||
|
||||
// Handle Strings
|
||||
if (stream.match(stringPrefixes)) {
|
||||
state.tokenize = tokenStringFactory(stream.current());
|
||||
return state.tokenize(stream, state);
|
||||
var isFmtString = stream.current().toLowerCase().indexOf('f') !== -1;
|
||||
if (!isFmtString) {
|
||||
state.tokenize = tokenStringFactory(stream.current(), state.tokenize);
|
||||
return state.tokenize(stream, state);
|
||||
} else {
|
||||
state.tokenize = formatStringFactory(stream.current(), state.tokenize);
|
||||
return state.tokenize(stream, state);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle operators and Delimiters
|
||||
if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters))
|
||||
return "punctuation";
|
||||
for (var i = 0; i < operators.length; i++)
|
||||
if (stream.match(operators[i])) return "operator"
|
||||
|
||||
if (stream.match(doubleOperators) || stream.match(singleOperators))
|
||||
return "operator";
|
||||
|
||||
if (stream.match(singleDelimiters))
|
||||
return "punctuation";
|
||||
if (stream.match(delimiters)) return "punctuation";
|
||||
|
||||
if (state.lastToken == "." && stream.match(identifiers))
|
||||
return "property";
|
||||
@ -184,8 +180,69 @@
|
||||
return ERRORCLASS;
|
||||
}
|
||||
|
||||
function tokenStringFactory(delimiter) {
|
||||
while ("rub".indexOf(delimiter.charAt(0).toLowerCase()) >= 0)
|
||||
function formatStringFactory(delimiter, tokenOuter) {
|
||||
while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0)
|
||||
delimiter = delimiter.substr(1);
|
||||
|
||||
var singleline = delimiter.length == 1;
|
||||
var OUTCLASS = "string";
|
||||
|
||||
function tokenNestedExpr(depth) {
|
||||
return function(stream, state) {
|
||||
var inner = tokenBaseInner(stream, state)
|
||||
if (inner == "punctuation") {
|
||||
if (stream.current() == "{") {
|
||||
state.tokenize = tokenNestedExpr(depth + 1)
|
||||
} else if (stream.current() == "}") {
|
||||
if (depth > 1) state.tokenize = tokenNestedExpr(depth - 1)
|
||||
else state.tokenize = tokenString
|
||||
}
|
||||
}
|
||||
return inner
|
||||
}
|
||||
}
|
||||
|
||||
function tokenString(stream, state) {
|
||||
while (!stream.eol()) {
|
||||
stream.eatWhile(/[^'"\{\}\\]/);
|
||||
if (stream.eat("\\")) {
|
||||
stream.next();
|
||||
if (singleline && stream.eol())
|
||||
return OUTCLASS;
|
||||
} else if (stream.match(delimiter)) {
|
||||
state.tokenize = tokenOuter;
|
||||
return OUTCLASS;
|
||||
} else if (stream.match('{{')) {
|
||||
// ignore {{ in f-str
|
||||
return OUTCLASS;
|
||||
} else if (stream.match('{', false)) {
|
||||
// switch to nested mode
|
||||
state.tokenize = tokenNestedExpr(0)
|
||||
if (stream.current()) return OUTCLASS;
|
||||
else return state.tokenize(stream, state)
|
||||
} else if (stream.match('}}')) {
|
||||
return OUTCLASS;
|
||||
} else if (stream.match('}')) {
|
||||
// single } in f-string is an error
|
||||
return ERRORCLASS;
|
||||
} else {
|
||||
stream.eat(/['"]/);
|
||||
}
|
||||
}
|
||||
if (singleline) {
|
||||
if (parserConf.singleLineStringErrors)
|
||||
return ERRORCLASS;
|
||||
else
|
||||
state.tokenize = tokenOuter;
|
||||
}
|
||||
return OUTCLASS;
|
||||
}
|
||||
tokenString.isString = true;
|
||||
return tokenString;
|
||||
}
|
||||
|
||||
function tokenStringFactory(delimiter, tokenOuter) {
|
||||
while ("rubf".indexOf(delimiter.charAt(0).toLowerCase()) >= 0)
|
||||
delimiter = delimiter.substr(1);
|
||||
|
||||
var singleline = delimiter.length == 1;
|
||||
@ -199,7 +256,7 @@
|
||||
if (singleline && stream.eol())
|
||||
return OUTCLASS;
|
||||
} else if (stream.match(delimiter)) {
|
||||
state.tokenize = tokenBase;
|
||||
state.tokenize = tokenOuter;
|
||||
return OUTCLASS;
|
||||
} else {
|
||||
stream.eat(/['"]/);
|
||||
@ -209,7 +266,7 @@
|
||||
if (parserConf.singleLineStringErrors)
|
||||
return ERRORCLASS;
|
||||
else
|
||||
state.tokenize = tokenBase;
|
||||
state.tokenize = tokenOuter;
|
||||
}
|
||||
return OUTCLASS;
|
||||
}
|
||||
@ -264,14 +321,16 @@
|
||||
if (current == ":" && !state.lambda && top(state).type == "py")
|
||||
pushPyScope(state);
|
||||
|
||||
var delimiter_index = current.length == 1 ? "[({".indexOf(current) : -1;
|
||||
if (delimiter_index != -1)
|
||||
pushBracketScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1));
|
||||
if (current.length == 1 && !/string|comment/.test(style)) {
|
||||
var delimiter_index = "[({".indexOf(current);
|
||||
if (delimiter_index != -1)
|
||||
pushBracketScope(stream, state, "])}".slice(delimiter_index, delimiter_index+1));
|
||||
|
||||
delimiter_index = "])}".indexOf(current);
|
||||
if (delimiter_index != -1) {
|
||||
if (top(state).type == current) state.indent = state.scopes.pop().offset - hangingIndent
|
||||
else return ERRORCLASS;
|
||||
delimiter_index = "])}".indexOf(current);
|
||||
if (delimiter_index != -1) {
|
||||
if (top(state).type == current) state.indent = state.scopes.pop().offset - hangingIndent
|
||||
else return ERRORCLASS;
|
||||
}
|
||||
}
|
||||
if (state.dedent > 0 && stream.eol() && top(state).type == "py") {
|
||||
if (state.scopes.length > 1) state.scopes.pop();
|
||||
@ -332,8 +391,8 @@
|
||||
|
||||
CodeMirror.defineMIME("text/x-cython", {
|
||||
name: "python",
|
||||
extra_keywords: words("by cdef cimport cpdef ctypedef enum except"+
|
||||
"extern gil include nogil property public"+
|
||||
extra_keywords: words("by cdef cimport cpdef ctypedef enum except "+
|
||||
"extern gil include nogil property public "+
|
||||
"readonly struct union DEF IF ELIF ELSE")
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user