mirror of
https://github.com/go-gitea/gitea
synced 2024-11-01 07:44:25 +00:00
af7ffaa279
* Server-side syntax hilighting for all code This PR does a few things: * Remove all traces of highlight.js * Use chroma library to provide fast syntax hilighting directly on the server * Provide syntax hilighting for diffs * Re-style both unified and split diffs views * Add custom syntax hilighting styling for both regular and arc-green Fixes #7729 Fixes #10157 Fixes #11825 Fixes #7728 Fixes #3872 Fixes #3682 And perhaps gets closer to #9553 * fix line marker * fix repo search * Fix single line select * properly load settings * npm uninstall highlight.js * review suggestion * code review * forgot to call function * fix test * Apply suggestions from code review suggestions from @silverwind thanks Co-authored-by: silverwind <me@silverwind.io> * code review * copy/paste error * Use const for highlight size limit * Update web_src/less/_repository.less Co-authored-by: Lauris BH <lauris@nix.lv> * update size limit to 1MB and other styling tweaks * fix highlighting for certain diff sections * fix test * add worker back as suggested Co-authored-by: silverwind <me@silverwind.io> Co-authored-by: Lauris BH <lauris@nix.lv>
77 lines
1.7 KiB
Go
Vendored
77 lines
1.7 KiB
Go
Vendored
package chroma
|
|
|
|
import "strings"
|
|
|
|
// An Iterator across tokens.
|
|
//
|
|
// nil will be returned at the end of the Token stream.
|
|
//
|
|
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
|
type Iterator func() Token
|
|
|
|
// Tokens consumes all tokens from the iterator and returns them as a slice.
|
|
func (i Iterator) Tokens() []Token {
|
|
var out []Token
|
|
for t := i(); t != EOF; t = i() {
|
|
out = append(out, t)
|
|
}
|
|
return out
|
|
}
|
|
|
|
// Concaterator concatenates tokens from a series of iterators.
|
|
func Concaterator(iterators ...Iterator) Iterator {
|
|
return func() Token {
|
|
for len(iterators) > 0 {
|
|
t := iterators[0]()
|
|
if t != EOF {
|
|
return t
|
|
}
|
|
iterators = iterators[1:]
|
|
}
|
|
return EOF
|
|
}
|
|
}
|
|
|
|
// Literator converts a sequence of literal Tokens into an Iterator.
|
|
func Literator(tokens ...Token) Iterator {
|
|
return func() Token {
|
|
if len(tokens) == 0 {
|
|
return EOF
|
|
}
|
|
token := tokens[0]
|
|
tokens = tokens[1:]
|
|
return token
|
|
}
|
|
}
|
|
|
|
// SplitTokensIntoLines splits tokens containing newlines in two.
|
|
func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
|
|
var line []Token // nolint: prealloc
|
|
for _, token := range tokens {
|
|
for strings.Contains(token.Value, "\n") {
|
|
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
|
// Token becomes the tail.
|
|
token.Value = parts[1]
|
|
|
|
// Append the head to the line and flush the line.
|
|
clone := token.Clone()
|
|
clone.Value = parts[0]
|
|
line = append(line, clone)
|
|
out = append(out, line)
|
|
line = nil
|
|
}
|
|
line = append(line, token)
|
|
}
|
|
if len(line) > 0 {
|
|
out = append(out, line)
|
|
}
|
|
// Strip empty trailing token line.
|
|
if len(out) > 0 {
|
|
last := out[len(out)-1]
|
|
if len(last) == 1 && last[0].Value == "" {
|
|
out = out[:len(out)-1]
|
|
}
|
|
}
|
|
return
|
|
}
|