2015-12-17 12:13:12 +09:00
|
|
|
// Copyright 2015 The Gogs Authors. All rights reserved.
|
2020-07-01 06:34:03 +09:00
|
|
|
// Copyright 2020 The Gitea Authors. All rights reserved.
|
2015-12-17 12:13:12 +09:00
|
|
|
// Use of this source code is governed by a MIT-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2016-02-01 01:19:02 +09:00
|
|
|
package highlight
|
2015-12-17 12:13:12 +09:00
|
|
|
|
|
|
|
import (
|
2020-07-01 06:34:03 +09:00
|
|
|
"bufio"
|
|
|
|
"bytes"
|
2021-06-17 23:55:16 +09:00
|
|
|
"fmt"
|
2020-09-03 05:19:42 +09:00
|
|
|
gohtml "html"
|
2020-07-01 06:34:03 +09:00
|
|
|
"path/filepath"
|
2015-12-17 12:13:12 +09:00
|
|
|
"strings"
|
2020-07-01 06:34:03 +09:00
|
|
|
"sync"
|
2015-12-18 12:31:34 +09:00
|
|
|
|
2020-11-14 06:13:41 +09:00
|
|
|
"code.gitea.io/gitea/modules/analyze"
|
2020-07-01 06:34:03 +09:00
|
|
|
"code.gitea.io/gitea/modules/log"
|
2016-11-11 01:24:48 +09:00
|
|
|
"code.gitea.io/gitea/modules/setting"
|
2021-11-17 21:34:35 +09:00
|
|
|
|
2021-05-13 18:31:23 +09:00
|
|
|
"github.com/alecthomas/chroma"
|
2020-07-01 06:34:03 +09:00
|
|
|
"github.com/alecthomas/chroma/formatters/html"
|
|
|
|
"github.com/alecthomas/chroma/lexers"
|
|
|
|
"github.com/alecthomas/chroma/styles"
|
2021-06-17 23:55:16 +09:00
|
|
|
lru "github.com/hashicorp/golang-lru"
|
2015-12-17 12:13:12 +09:00
|
|
|
)
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
// don't index files larger than this many bytes for performance purposes
|
|
|
|
const sizeLimit = 1000000
|
|
|
|
|
2015-12-17 12:13:12 +09:00
|
|
|
var (
|
2020-07-01 06:34:03 +09:00
|
|
|
// For custom user mapping
|
|
|
|
highlightMapping = map[string]string{}
|
|
|
|
|
|
|
|
once sync.Once
|
2021-06-17 23:55:16 +09:00
|
|
|
|
2021-06-25 03:37:07 +09:00
|
|
|
cache *lru.TwoQueueCache
|
2020-07-01 06:34:03 +09:00
|
|
|
)
|
|
|
|
|
|
|
|
// NewContext loads custom highlight map from local config
|
|
|
|
func NewContext() {
|
|
|
|
once.Do(func() {
|
|
|
|
keys := setting.Cfg.Section("highlight.mapping").Keys()
|
|
|
|
for i := range keys {
|
|
|
|
highlightMapping[keys[i].Name()] = keys[i].Value()
|
|
|
|
}
|
2021-06-17 23:55:16 +09:00
|
|
|
|
|
|
|
// The size 512 is simply a conservative rule of thumb
|
2021-06-25 03:37:07 +09:00
|
|
|
c, err := lru.New2Q(512)
|
2021-06-17 23:55:16 +09:00
|
|
|
if err != nil {
|
|
|
|
panic(fmt.Sprintf("failed to initialize LRU cache for highlighter: %s", err))
|
|
|
|
}
|
|
|
|
cache = c
|
2020-07-01 06:34:03 +09:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Code returns a HTML version of code string with chroma syntax highlighting classes
|
2021-11-18 05:37:00 +09:00
|
|
|
func Code(fileName, language, code string) string {
|
2020-07-01 06:34:03 +09:00
|
|
|
NewContext()
|
2015-12-17 12:13:12 +09:00
|
|
|
|
2020-07-11 14:43:12 +09:00
|
|
|
// diff view newline will be passed as empty, change to literal \n so it can be copied
|
|
|
|
// preserve literal newline in blame view
|
|
|
|
if code == "" || code == "\n" {
|
2020-07-09 06:02:38 +09:00
|
|
|
return "\n"
|
|
|
|
}
|
2020-07-11 14:43:12 +09:00
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
if len(code) > sizeLimit {
|
|
|
|
return code
|
|
|
|
}
|
|
|
|
|
2021-05-13 18:31:23 +09:00
|
|
|
var lexer chroma.Lexer
|
2021-11-18 05:37:00 +09:00
|
|
|
|
|
|
|
if len(language) > 0 {
|
|
|
|
lexer = lexers.Get(language)
|
|
|
|
|
|
|
|
if lexer == nil {
|
|
|
|
// Attempt stripping off the '?'
|
|
|
|
if idx := strings.IndexByte(language, '?'); idx > 0 {
|
|
|
|
lexer = lexers.Get(language[:idx])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if lexer == nil {
|
|
|
|
if val, ok := highlightMapping[filepath.Ext(fileName)]; ok {
|
2022-01-21 02:46:10 +09:00
|
|
|
// use mapped value to find lexer
|
2021-11-18 05:37:00 +09:00
|
|
|
lexer = lexers.Get(val)
|
|
|
|
}
|
2015-12-17 12:13:12 +09:00
|
|
|
}
|
2015-12-18 12:31:34 +09:00
|
|
|
|
2021-06-17 23:55:16 +09:00
|
|
|
if lexer == nil {
|
|
|
|
if l, ok := cache.Get(fileName); ok {
|
|
|
|
lexer = l.(chroma.Lexer)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
if lexer == nil {
|
2021-05-13 18:31:23 +09:00
|
|
|
lexer = lexers.Match(fileName)
|
|
|
|
if lexer == nil {
|
|
|
|
lexer = lexers.Fallback
|
|
|
|
}
|
2021-06-17 23:55:16 +09:00
|
|
|
cache.Add(fileName, lexer)
|
2017-06-10 09:39:16 +09:00
|
|
|
}
|
2021-09-24 22:29:32 +09:00
|
|
|
return CodeFromLexer(lexer, code)
|
|
|
|
}
|
|
|
|
|
|
|
|
// CodeFromLexer returns a HTML version of code string with chroma syntax highlighting classes
|
|
|
|
func CodeFromLexer(lexer chroma.Lexer, code string) string {
|
|
|
|
formatter := html.New(html.WithClasses(true),
|
|
|
|
html.WithLineNumbers(false),
|
|
|
|
html.PreventSurroundingPre(true),
|
|
|
|
)
|
|
|
|
|
|
|
|
htmlbuf := bytes.Buffer{}
|
|
|
|
htmlw := bufio.NewWriter(&htmlbuf)
|
2015-12-17 12:13:12 +09:00
|
|
|
|
2020-07-11 14:43:12 +09:00
|
|
|
iterator, err := lexer.Tokenise(nil, string(code))
|
2020-07-01 06:34:03 +09:00
|
|
|
if err != nil {
|
|
|
|
log.Error("Can't tokenize code: %v", err)
|
|
|
|
return code
|
2015-12-18 12:31:34 +09:00
|
|
|
}
|
2020-07-01 06:34:03 +09:00
|
|
|
// style not used for live site but need to pass something
|
|
|
|
err = formatter.Format(htmlw, styles.GitHub, iterator)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Can't format code: %v", err)
|
|
|
|
return code
|
|
|
|
}
|
|
|
|
|
|
|
|
htmlw.Flush()
|
2020-07-11 14:43:12 +09:00
|
|
|
// Chroma will add newlines for certain lexers in order to highlight them properly
|
|
|
|
// Once highlighted, strip them here so they don't cause copy/paste trouble in HTML output
|
|
|
|
return strings.TrimSuffix(htmlbuf.String(), "\n")
|
2015-12-18 12:31:34 +09:00
|
|
|
}
|
|
|
|
|
2021-08-24 06:23:39 +09:00
|
|
|
// File returns a slice of chroma syntax highlighted lines of code
|
2021-11-18 05:37:00 +09:00
|
|
|
func File(numLines int, fileName, language string, code []byte) []string {
|
2020-07-01 06:34:03 +09:00
|
|
|
NewContext()
|
|
|
|
|
|
|
|
if len(code) > sizeLimit {
|
|
|
|
return plainText(string(code), numLines)
|
|
|
|
}
|
|
|
|
formatter := html.New(html.WithClasses(true),
|
|
|
|
html.WithLineNumbers(false),
|
|
|
|
html.PreventSurroundingPre(true),
|
|
|
|
)
|
|
|
|
|
|
|
|
if formatter == nil {
|
|
|
|
log.Error("Couldn't create chroma formatter")
|
|
|
|
return plainText(string(code), numLines)
|
|
|
|
}
|
|
|
|
|
|
|
|
htmlbuf := bytes.Buffer{}
|
|
|
|
htmlw := bufio.NewWriter(&htmlbuf)
|
|
|
|
|
2021-05-13 18:31:23 +09:00
|
|
|
var lexer chroma.Lexer
|
2021-11-18 05:37:00 +09:00
|
|
|
|
|
|
|
// provided language overrides everything
|
|
|
|
if len(language) > 0 {
|
|
|
|
lexer = lexers.Get(language)
|
|
|
|
}
|
|
|
|
|
|
|
|
if lexer == nil {
|
|
|
|
if val, ok := highlightMapping[filepath.Ext(fileName)]; ok {
|
|
|
|
lexer = lexers.Get(val)
|
|
|
|
}
|
2020-07-01 06:34:03 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
if lexer == nil {
|
2021-05-13 18:31:23 +09:00
|
|
|
language := analyze.GetCodeLanguage(fileName, code)
|
|
|
|
|
|
|
|
lexer = lexers.Get(language)
|
2020-07-01 06:34:03 +09:00
|
|
|
if lexer == nil {
|
2021-05-13 18:31:23 +09:00
|
|
|
lexer = lexers.Match(fileName)
|
|
|
|
if lexer == nil {
|
|
|
|
lexer = lexers.Fallback
|
|
|
|
}
|
2020-07-01 06:34:03 +09:00
|
|
|
}
|
2015-12-17 12:13:12 +09:00
|
|
|
}
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
iterator, err := lexer.Tokenise(nil, string(code))
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Can't tokenize code: %v", err)
|
|
|
|
return plainText(string(code), numLines)
|
2015-12-17 12:13:12 +09:00
|
|
|
}
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
err = formatter.Format(htmlw, styles.GitHub, iterator)
|
|
|
|
if err != nil {
|
|
|
|
log.Error("Can't format code: %v", err)
|
|
|
|
return plainText(string(code), numLines)
|
2015-12-17 12:13:12 +09:00
|
|
|
}
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
htmlw.Flush()
|
2021-08-14 08:16:56 +09:00
|
|
|
finalNewLine := false
|
|
|
|
if len(code) > 0 {
|
|
|
|
finalNewLine = code[len(code)-1] == '\n'
|
|
|
|
}
|
|
|
|
|
2021-08-24 06:23:39 +09:00
|
|
|
m := make([]string, 0, numLines)
|
|
|
|
for _, v := range strings.SplitN(htmlbuf.String(), "\n", numLines) {
|
2020-07-09 06:02:38 +09:00
|
|
|
content := string(v)
|
2022-01-21 02:46:10 +09:00
|
|
|
// need to keep lines that are only \n so copy/paste works properly in browser
|
2020-07-09 06:02:38 +09:00
|
|
|
if content == "" {
|
|
|
|
content = "\n"
|
2021-08-14 08:16:56 +09:00
|
|
|
} else if content == `</span><span class="w">` {
|
|
|
|
content += "\n</span>"
|
2022-06-10 22:45:28 +09:00
|
|
|
} else if content == `</span></span><span class="line"><span class="cl">` {
|
|
|
|
content += "\n"
|
2020-07-09 06:02:38 +09:00
|
|
|
}
|
2021-08-14 08:16:56 +09:00
|
|
|
content = strings.TrimSuffix(content, `<span class="w">`)
|
|
|
|
content = strings.TrimPrefix(content, `</span>`)
|
2021-08-24 06:23:39 +09:00
|
|
|
m = append(m, content)
|
2015-12-18 12:31:34 +09:00
|
|
|
}
|
2021-08-14 08:16:56 +09:00
|
|
|
if finalNewLine {
|
2021-08-24 06:23:39 +09:00
|
|
|
m = append(m, "<span class=\"w\">\n</span>")
|
2021-08-14 08:16:56 +09:00
|
|
|
}
|
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
return m
|
|
|
|
}
|
2015-12-18 12:31:34 +09:00
|
|
|
|
2020-07-01 06:34:03 +09:00
|
|
|
// return unhiglighted map
|
2021-08-24 06:23:39 +09:00
|
|
|
func plainText(code string, numLines int) []string {
|
|
|
|
m := make([]string, 0, numLines)
|
|
|
|
for _, v := range strings.SplitN(string(code), "\n", numLines) {
|
2020-07-09 06:02:38 +09:00
|
|
|
content := string(v)
|
2022-01-21 02:46:10 +09:00
|
|
|
// need to keep lines that are only \n so copy/paste works properly in browser
|
2020-07-09 06:02:38 +09:00
|
|
|
if content == "" {
|
|
|
|
content = "\n"
|
|
|
|
}
|
2021-08-24 06:23:39 +09:00
|
|
|
m = append(m, gohtml.EscapeString(content))
|
2020-07-01 06:34:03 +09:00
|
|
|
}
|
|
|
|
return m
|
2015-12-17 12:13:12 +09:00
|
|
|
}
|