mirror of
https://github.com/taigrr/wtf
synced 2025-01-18 04:03:14 -08:00
Update dependencies to latest versions
This commit is contained in:
27
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
27
vendor/github.com/alecthomas/chroma/README.md
generated
vendored
@@ -29,7 +29,32 @@ translators for Pygments lexers and styles.
|
||||
|
||||
## Supported languages
|
||||
|
||||
ABNF, ANTLR, APL, ActionScript, ActionScript 3, Ada, Angular2, ApacheConf, AppleScript, Awk, BNF, Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, Brainfuck, C, C#, C++, CFEngine3, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython, DTD, Dart, Diff, Django/Jinja, Docker, EBNF, Elixir, Elm, EmacsLisp, Erlang, FSharp, Factor, Fish, Forth, Fortran, GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Go HTML Template, Go Text Template, Groovy, HTML, HTTP, Handlebars, Haskell, Haxe, Hexdump, Hy, INI, Idris, Io, JSON, JSX, Java, JavaScript, Julia, Kotlin, LLVM, Lighttpd configuration file, Lua, Mako, Mason, Mathematica, MiniZinc, Modula-2, MorrowindScript, MySQL, Myghty, NASM, Newspeak, Nginx configuration file, Nim, Nix, OCaml, Objective-C, Octave, Org Mode, PHP, PL/pgSQL, POVRay, PacmanConf, Perl, Pig, PkgConfig, PostScript, PostgreSQL SQL dialect, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3, QBasic, R, Racket, Ragel, Rexx, Ruby, Rust, SCSS, SPARQL, SQL, Sass, Scala, Scheme, Scilab, Smalltalk, Smarty, Snobol, Solidity, SquidConf, Swift, TASM, TOML, Tcl, Tcsh, TeX, Termcap, Terminfo, Terraform, Thrift, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData, VHDL, VimL, WDTE, XML, Xorg, YAML, cfstatement, markdown, reStructuredText, reg, systemverilog, verilog
|
||||
Prefix | Language
|
||||
:----: | --------
|
||||
A | ABNF, ActionScript, ActionScript 3, Ada, Angular2, ANTLR, ApacheConf, APL, AppleScript, Awk
|
||||
B | Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, BNF, Brainfuck
|
||||
C | C, C#, C++, Cassandra CQL, CFEngine3, cfstatement/ColdFusion, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython
|
||||
D | Dart, Diff, Django/Jinja, Docker, DTD
|
||||
E | EBNF, Elixir, Elm, EmacsLisp, Erlang
|
||||
F | Factor, Fish, Forth, Fortran, FSharp
|
||||
G | GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Go HTML Template, Go Text Template, Groovy
|
||||
H | Handlebars, Haskell, Haxe, Hexdump, HTML, HTTP, Hy
|
||||
I | Idris, INI, Io
|
||||
J | Java, JavaScript, JSON, Jsx, Julia, Jungle
|
||||
K | Kotlin
|
||||
L | Lighttpd configuration file, LLVM, Lua
|
||||
M | Mako, Markdown, Mason, Mathematica, MiniZinc, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL
|
||||
N | NASM, Newspeak, Nginx configuration file, Nim, Nix
|
||||
O | Objective-C, OCaml, Octave, OpenSCAD, Org Mode
|
||||
P | PacmanConf, Perl, PHP, Pig, PkgConfig, Plaintext, PL/pgSQL, PostgreSQL SQL dialect, PostScript, POVRay, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3
|
||||
Q | QBasic
|
||||
R | R, Racket, Ragel, reg, reStructuredText, Rexx, Ruby, Rust
|
||||
S | Sass, Scala, Scheme, Scilab, SCSS, Smalltalk, Smarty, Snobol, Solidity, SPARQL, SQL, SquidConf, Swift, systemd, Systemverilog
|
||||
T | TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData
|
||||
V | verilog, VHDL, VimL
|
||||
W | WDTE
|
||||
X | XML, Xorg
|
||||
Y | YAML
|
||||
|
||||
_I will attempt to keep this section up to date, but an authoritative list can be
|
||||
displayed with `chroma --list`._
|
||||
|
||||
10
vendor/github.com/alecthomas/chroma/coalesce.go
generated
vendored
10
vendor/github.com/alecthomas/chroma/coalesce.go
generated
vendored
@@ -6,17 +6,17 @@ func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
|
||||
type coalescer struct{ Lexer }
|
||||
|
||||
func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
||||
var prev *Token
|
||||
var prev Token
|
||||
it, err := d.Lexer.Tokenise(options, text)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return func() *Token {
|
||||
for token := it(); token != nil; token = it() {
|
||||
return func() Token {
|
||||
for token := it(); token != (EOF); token = it() {
|
||||
if len(token.Value) == 0 {
|
||||
continue
|
||||
}
|
||||
if prev == nil {
|
||||
if prev == EOF {
|
||||
prev = token
|
||||
} else {
|
||||
if prev.Type == token.Type && len(prev.Value) < 8192 {
|
||||
@@ -29,7 +29,7 @@ func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, e
|
||||
}
|
||||
}
|
||||
out := prev
|
||||
prev = nil
|
||||
prev = EOF
|
||||
return out
|
||||
}, nil
|
||||
}
|
||||
|
||||
34
vendor/github.com/alecthomas/chroma/delegate.go
generated
vendored
34
vendor/github.com/alecthomas/chroma/delegate.go
generated
vendored
@@ -31,7 +31,7 @@ func (d *delegatingLexer) Config() *Config {
|
||||
// An insertion is the character range where language tokens should be inserted.
|
||||
type insertion struct {
|
||||
start, end int
|
||||
tokens []*Token
|
||||
tokens []Token
|
||||
}
|
||||
|
||||
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
|
||||
@@ -44,15 +44,15 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
|
||||
insertions := []*insertion{}
|
||||
var insert *insertion
|
||||
offset := 0
|
||||
var last *Token
|
||||
var last Token
|
||||
for _, t := range tokens {
|
||||
if t.Type == Other {
|
||||
if last != nil && insert != nil && last.Type != Other {
|
||||
if last != EOF && insert != nil && last.Type != Other {
|
||||
insert.end = offset
|
||||
}
|
||||
others.WriteString(t.Value)
|
||||
} else {
|
||||
if last == nil || last.Type == Other {
|
||||
if last == EOF || last.Type == Other {
|
||||
insert = &insertion{start: offset}
|
||||
insertions = append(insertions, insert)
|
||||
}
|
||||
@@ -73,12 +73,12 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
|
||||
}
|
||||
|
||||
// Interleave the two sets of tokens.
|
||||
out := []*Token{}
|
||||
var out []Token
|
||||
offset = 0 // Offset into text.
|
||||
tokenIndex := 0
|
||||
nextToken := func() *Token {
|
||||
nextToken := func() Token {
|
||||
if tokenIndex >= len(rootTokens) {
|
||||
return nil
|
||||
return EOF
|
||||
}
|
||||
t := rootTokens[tokenIndex]
|
||||
tokenIndex++
|
||||
@@ -95,18 +95,18 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
|
||||
}
|
||||
t := nextToken()
|
||||
i := nextInsertion()
|
||||
for t != nil || i != nil {
|
||||
for t != EOF || i != nil {
|
||||
// fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
|
||||
if t == nil || (i != nil && i.start < offset+len(t.Value)) {
|
||||
var l *Token
|
||||
if t == EOF || (i != nil && i.start < offset+len(t.Value)) {
|
||||
var l Token
|
||||
l, t = splitToken(t, i.start-offset)
|
||||
if l != nil {
|
||||
if l != EOF {
|
||||
out = append(out, l)
|
||||
offset += len(l.Value)
|
||||
}
|
||||
out = append(out, i.tokens...)
|
||||
offset += i.end - i.start
|
||||
if t == nil {
|
||||
if t == EOF {
|
||||
t = nextToken()
|
||||
}
|
||||
i = nextInsertion()
|
||||
@@ -119,15 +119,15 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
|
||||
return Literator(out...), nil
|
||||
}
|
||||
|
||||
func splitToken(t *Token, offset int) (l *Token, r *Token) {
|
||||
if t == nil {
|
||||
return nil, nil
|
||||
func splitToken(t Token, offset int) (l Token, r Token) {
|
||||
if t == EOF {
|
||||
return EOF, EOF
|
||||
}
|
||||
if offset == 0 {
|
||||
return nil, t
|
||||
return EOF, t
|
||||
}
|
||||
if offset == len(t.Value) {
|
||||
return t, nil
|
||||
return t, EOF
|
||||
}
|
||||
l = t.Clone()
|
||||
r = t.Clone()
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/formatters/api.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/formatters/api.go
generated
vendored
@@ -11,7 +11,7 @@ import (
|
||||
var (
|
||||
// NoOp formatter.
|
||||
NoOp = Register("noop", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, iterator chroma.Iterator) error {
|
||||
for t := iterator(); t != nil; t = iterator() {
|
||||
for t := iterator(); t != chroma.EOF; t = iterator() {
|
||||
if _, err := io.WriteString(w, t.Value); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
64
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
64
vendor/github.com/alecthomas/chroma/formatters/html/html.go
generated
vendored
@@ -25,6 +25,9 @@ func WithClasses() Option { return func(f *Formatter) { f.Classes = true } }
|
||||
// TabWidth sets the number of characters for a tab. Defaults to 8.
|
||||
func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
|
||||
|
||||
// PreventSurroundingPre prevents the surrounding pre tags around the generated code
|
||||
func PreventSurroundingPre() Option { return func(f *Formatter) { f.preventSurroundingPre = true } }
|
||||
|
||||
// WithLineNumbers formats output with line numbers.
|
||||
func WithLineNumbers() Option {
|
||||
return func(f *Formatter) {
|
||||
@@ -70,14 +73,15 @@ func New(options ...Option) *Formatter {
|
||||
|
||||
// Formatter that generates HTML.
|
||||
type Formatter struct {
|
||||
standalone bool
|
||||
prefix string
|
||||
Classes bool // Exported field to detect when classes are being used
|
||||
tabWidth int
|
||||
lineNumbers bool
|
||||
lineNumbersInTable bool
|
||||
highlightRanges highlightRanges
|
||||
baseLineNumber int
|
||||
standalone bool
|
||||
prefix string
|
||||
Classes bool // Exported field to detect when classes are being used
|
||||
preventSurroundingPre bool
|
||||
tabWidth int
|
||||
lineNumbers bool
|
||||
lineNumbersInTable bool
|
||||
highlightRanges highlightRanges
|
||||
baseLineNumber int
|
||||
}
|
||||
|
||||
type highlightRanges [][2]int
|
||||
@@ -125,7 +129,7 @@ func (f *Formatter) restyle(style *chroma.Style) (*chroma.Style, error) {
|
||||
// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked).
|
||||
//
|
||||
// OTOH we need to be super careful about correct escaping...
|
||||
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma.Token) (err error) { // nolint: gocyclo
|
||||
func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
|
||||
style, err = f.restyle(style)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -149,7 +153,7 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma
|
||||
|
||||
wrapInTable := f.lineNumbers && f.lineNumbersInTable
|
||||
|
||||
lines := splitTokensIntoLines(tokens)
|
||||
lines := chroma.SplitTokensIntoLines(tokens)
|
||||
lineDigits := len(fmt.Sprintf("%d", len(lines)))
|
||||
highlightIndex := 0
|
||||
|
||||
@@ -158,7 +162,9 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma
|
||||
fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.Background))
|
||||
fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
|
||||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
|
||||
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
|
||||
if !f.preventSurroundingPre {
|
||||
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
|
||||
}
|
||||
for index := range lines {
|
||||
line := f.baseLineNumber + index
|
||||
highlight, next := f.shouldHighlight(highlightIndex, line)
|
||||
@@ -175,11 +181,16 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma
|
||||
fmt.Fprintf(w, "</span>")
|
||||
}
|
||||
}
|
||||
fmt.Fprint(w, "</pre></td>\n")
|
||||
if !f.preventSurroundingPre {
|
||||
fmt.Fprint(w, "</pre>")
|
||||
}
|
||||
fmt.Fprint(w, "</td>\n")
|
||||
fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
|
||||
if !f.preventSurroundingPre {
|
||||
fmt.Fprintf(w, "<pre%s>", f.styleAttr(css, chroma.Background))
|
||||
}
|
||||
highlightIndex = 0
|
||||
for index, tokens := range lines {
|
||||
// 1-based line number.
|
||||
@@ -209,7 +220,9 @@ func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprint(w, "</pre>")
|
||||
if !f.preventSurroundingPre {
|
||||
fmt.Fprint(w, "</pre>")
|
||||
}
|
||||
|
||||
if wrapInTable {
|
||||
fmt.Fprint(w, "</td></tr></table>\n")
|
||||
@@ -377,26 +390,3 @@ func compressStyle(s string) string {
|
||||
}
|
||||
return strings.Join(out, ";")
|
||||
}
|
||||
|
||||
func splitTokensIntoLines(tokens []*chroma.Token) (out [][]*chroma.Token) {
|
||||
line := []*chroma.Token{}
|
||||
for _, token := range tokens {
|
||||
for strings.Contains(token.Value, "\n") {
|
||||
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
||||
// Token becomes the tail.
|
||||
token.Value = parts[1]
|
||||
|
||||
// Append the head to the line and flush the line.
|
||||
clone := token.Clone()
|
||||
clone.Value = parts[0]
|
||||
line = append(line, clone)
|
||||
out = append(out, line)
|
||||
line = nil
|
||||
}
|
||||
line = append(line, token)
|
||||
}
|
||||
if len(line) > 0 {
|
||||
out = append(out, line)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/formatters/json.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/formatters/json.go
generated
vendored
@@ -12,7 +12,7 @@ import (
|
||||
var JSON = Register("json", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
|
||||
fmt.Fprintln(w, "[")
|
||||
i := 0
|
||||
for t := it(); t != nil; t = it() {
|
||||
for t := it(); t != chroma.EOF; t = it() {
|
||||
if i > 0 {
|
||||
fmt.Fprintln(w, ",")
|
||||
}
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/formatters/tokens.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/formatters/tokens.go
generated
vendored
@@ -9,7 +9,7 @@ import (
|
||||
|
||||
// Tokens formatter outputs the raw token structures.
|
||||
var Tokens = Register("tokens", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
|
||||
for t := it(); t != nil; t = it() {
|
||||
for t := it(); t != chroma.EOF; t = it() {
|
||||
if _, err := fmt.Fprintln(w, t.GoString()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go
generated
vendored
@@ -216,7 +216,7 @@ func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma
|
||||
}
|
||||
}()
|
||||
theme := styleToEscapeSequence(c.table, style)
|
||||
for token := it(); token != nil; token = it() {
|
||||
for token := it(); token != chroma.EOF; token = it() {
|
||||
// TODO: Cache token lookups?
|
||||
clr, ok := theme[token.Type]
|
||||
if !ok {
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go
generated
vendored
@@ -11,7 +11,7 @@ import (
|
||||
var TTY16m = Register("terminal16m", chroma.FormatterFunc(trueColourFormatter))
|
||||
|
||||
func trueColourFormatter(w io.Writer, style *chroma.Style, it chroma.Iterator) error {
|
||||
for token := it(); token != nil; token = it() {
|
||||
for token := it(); token != chroma.EOF; token = it() {
|
||||
entry := style.Get(token.Type)
|
||||
if !entry.IsZero() {
|
||||
out := ""
|
||||
|
||||
14
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
Normal file
14
vendor/github.com/alecthomas/chroma/go.mod
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
module github.com/alecthomas/chroma
|
||||
|
||||
require (
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect
|
||||
github.com/alecthomas/kong v0.1.15
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964
|
||||
github.com/dlclark/regexp2 v1.1.6
|
||||
github.com/mattn/go-colorable v0.0.9
|
||||
github.com/mattn/go-isatty v0.0.4
|
||||
github.com/sergi/go-diff v1.0.0 // indirect
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 // indirect
|
||||
)
|
||||
26
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
Normal file
26
vendor/github.com/alecthomas/chroma/go.sum
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||
github.com/alecthomas/kong v0.1.15 h1:IWBg+KrLvoHBicD50OzMI8fKjrtAa1okMR9g38HVM/s=
|
||||
github.com/alecthomas/kong v0.1.15/go.mod h1:0m2VYms8rH0qbCqVB2gvGHk74bqLIq0HXjCs5bNbNQU=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 h1:p9Sln00KOTlrYkxI1zYWl1QLnEqAqEARBEYa8FQnQcY=
|
||||
github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/mattn/go-colorable v0.0.9 h1:UVL0vNpWh04HeJXV0KLcaT7r06gOH2l4OW6ddYRUIY4=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35 h1:YAFjXN64LMvktoUZH9zgY4lGc/msGN7HQfoSuKCgaDU=
|
||||
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
52
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
52
vendor/github.com/alecthomas/chroma/iterator.go
generated
vendored
@@ -1,16 +1,18 @@
|
||||
package chroma
|
||||
|
||||
import "strings"
|
||||
|
||||
// An Iterator across tokens.
|
||||
//
|
||||
// nil will be returned at the end of the Token stream.
|
||||
//
|
||||
// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
|
||||
type Iterator func() *Token
|
||||
type Iterator func() Token
|
||||
|
||||
// Tokens consumes all tokens from the iterator and returns them as a slice.
|
||||
func (i Iterator) Tokens() []*Token {
|
||||
out := []*Token{}
|
||||
for t := i(); t != nil; t = i() {
|
||||
func (i Iterator) Tokens() []Token {
|
||||
var out []Token
|
||||
for t := i(); t != EOF; t = i() {
|
||||
out = append(out, t)
|
||||
}
|
||||
return out
|
||||
@@ -18,26 +20,56 @@ func (i Iterator) Tokens() []*Token {
|
||||
|
||||
// Concaterator concatenates tokens from a series of iterators.
|
||||
func Concaterator(iterators ...Iterator) Iterator {
|
||||
return func() *Token {
|
||||
return func() Token {
|
||||
for len(iterators) > 0 {
|
||||
t := iterators[0]()
|
||||
if t != nil {
|
||||
if t != EOF {
|
||||
return t
|
||||
}
|
||||
iterators = iterators[1:]
|
||||
}
|
||||
return nil
|
||||
return EOF
|
||||
}
|
||||
}
|
||||
|
||||
// Literator converts a sequence of literal Tokens into an Iterator.
|
||||
func Literator(tokens ...*Token) Iterator {
|
||||
return func() (out *Token) {
|
||||
func Literator(tokens ...Token) Iterator {
|
||||
return func() Token {
|
||||
if len(tokens) == 0 {
|
||||
return nil
|
||||
return EOF
|
||||
}
|
||||
token := tokens[0]
|
||||
tokens = tokens[1:]
|
||||
return token
|
||||
}
|
||||
}
|
||||
|
||||
func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
|
||||
var line []Token
|
||||
for _, token := range tokens {
|
||||
for strings.Contains(token.Value, "\n") {
|
||||
parts := strings.SplitAfterN(token.Value, "\n", 2)
|
||||
// Token becomes the tail.
|
||||
token.Value = parts[1]
|
||||
|
||||
// Append the head to the line and flush the line.
|
||||
clone := token.Clone()
|
||||
clone.Value = parts[0]
|
||||
line = append(line, clone)
|
||||
out = append(out, line)
|
||||
line = nil
|
||||
}
|
||||
line = append(line, token)
|
||||
}
|
||||
if len(line) > 0 {
|
||||
out = append(out, line)
|
||||
}
|
||||
// Strip empty trailing token line.
|
||||
if len(out) > 0 {
|
||||
last := out[len(out)-1]
|
||||
if len(last) == 1 && last[0].Value == "" {
|
||||
out = out[:len(out)-1]
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
8
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
8
vendor/github.com/alecthomas/chroma/lexer.go
generated
vendored
@@ -66,12 +66,12 @@ type Token struct {
|
||||
func (t *Token) String() string { return t.Value }
|
||||
func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
|
||||
|
||||
func (t *Token) Clone() *Token {
|
||||
clone := &Token{}
|
||||
*clone = *t
|
||||
return clone
|
||||
func (t *Token) Clone() Token {
|
||||
return *t
|
||||
}
|
||||
|
||||
var EOF Token
|
||||
|
||||
type TokeniseOptions struct {
|
||||
// State to start tokenisation in. Defaults to "root".
|
||||
State string
|
||||
|
||||
4
vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go
generated
vendored
4
vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go
generated
vendored
@@ -22,8 +22,8 @@ var Ballerina = internal.Register(MustNewLexer(
|
||||
{`(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b`, Keyword, nil},
|
||||
{`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
|
||||
{`@[^\W\d][\w.]*`, NameDecorator, nil},
|
||||
{`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil},
|
||||
{`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil},
|
||||
{`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil},
|
||||
{`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil},
|
||||
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||
{`import(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")},
|
||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
@@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
||||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
||||
{`\A#!.+\n`, CommentPreproc, nil},
|
||||
{`#.*\n`, CommentSingle, nil},
|
||||
{`#.*\S`, CommentSingle, nil},
|
||||
{`\\[\w\W]`, LiteralStringEscape, nil},
|
||||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
||||
{`[\[\]{}()=]`, Operator, nil},
|
||||
|
||||
6
vendor/github.com/alecthomas/chroma/lexers/e/elixir.go
generated
vendored
6
vendor/github.com/alecthomas/chroma/lexers/e/elixir.go
generated
vendored
@@ -36,9 +36,9 @@ var Elixir = internal.Register(MustNewLexer(
|
||||
{`\\\\|\<\<|\>\>|\=\>|\(|\)|\:|\;|\,|\[|\]`, Punctuation, nil},
|
||||
{`&\d`, NameEntity, nil},
|
||||
{`\<|\>|\+|\-|\*|\/|\!|\^|\&`, Operator, nil},
|
||||
{`0b[01]+`, LiteralNumberBin, nil},
|
||||
{`0o[0-7]+`, LiteralNumberOct, nil},
|
||||
{`0x[\da-fA-F]+`, LiteralNumberHex, nil},
|
||||
{`0b[01](_?[01])*`, LiteralNumberBin, nil},
|
||||
{`0o[0-7](_?[0-7])*`, LiteralNumberOct, nil},
|
||||
{`0x[\da-fA-F](_?[\dA-Fa-f])*`, LiteralNumberHex, nil},
|
||||
{`\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?`, LiteralNumberFloat, nil},
|
||||
{`\d(_?\d)*`, LiteralNumberInteger, nil},
|
||||
{`"""\s*`, LiteralStringHeredoc, Push("heredoc_double")},
|
||||
|
||||
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
@@ -42,6 +42,7 @@ var Go = internal.Register(MustNewLexer(
|
||||
{"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
|
||||
{`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
|
||||
{`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
|
||||
{`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
|
||||
{`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil},
|
||||
{`[^\W\d]\w*`, NameOther, nil},
|
||||
},
|
||||
|
||||
17
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
17
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
@@ -34,7 +34,7 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||
)))
|
||||
|
||||
func httpContentBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []*Token{
|
||||
tokens := []Token{
|
||||
{Generic, groups[0]},
|
||||
}
|
||||
return Literator(tokens...)
|
||||
@@ -42,7 +42,7 @@ func httpContentBlock(groups []string, lexer Lexer) Iterator {
|
||||
}
|
||||
|
||||
func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []*Token{
|
||||
tokens := []Token{
|
||||
{Name, groups[1]},
|
||||
{Text, groups[2]},
|
||||
{Operator, groups[3]},
|
||||
@@ -54,7 +54,7 @@ func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
|
||||
}
|
||||
|
||||
func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []*Token{
|
||||
tokens := []Token{
|
||||
{Text, groups[1]},
|
||||
{Literal, groups[2]},
|
||||
{Text, groups[3]},
|
||||
@@ -76,8 +76,8 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return func() *Token {
|
||||
for token := it(); token != nil; token = it() {
|
||||
return func() Token {
|
||||
for token := it(); token != EOF; token = it() {
|
||||
switch {
|
||||
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
|
||||
{
|
||||
@@ -85,6 +85,7 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
|
||||
}
|
||||
case token.Type == Literal && isContentType:
|
||||
{
|
||||
isContentType = false
|
||||
contentType = strings.TrimSpace(token.Value)
|
||||
pos := strings.Index(contentType, ";")
|
||||
if pos > 0 {
|
||||
@@ -111,7 +112,7 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return nil
|
||||
return EOF
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,11 +122,11 @@ func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (
|
||||
}
|
||||
|
||||
if subIterator != nil {
|
||||
for token := subIterator(); token != nil; token = subIterator() {
|
||||
for token := subIterator(); token != EOF; token = subIterator() {
|
||||
return token
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return EOF
|
||||
|
||||
}, nil
|
||||
}
|
||||
|
||||
26
vendor/github.com/alecthomas/chroma/lexers/o/org.go
generated
vendored
26
vendor/github.com/alecthomas/chroma/lexers/o/org.go
generated
vendored
@@ -24,9 +24,9 @@ var Org = internal.Register(MustNewLexer(
|
||||
{`^(\*)( TODO)( .*)$`, ByGroups(GenericHeading, Error, GenericStrong), nil},
|
||||
{`^(\*\*+)( TODO)( .*)$`, ByGroups(GenericSubheading, Error, Text), nil},
|
||||
{`^(\*)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericHeading, GenericStrong, GenericEmph), nil}, // Level 1 heading with tags
|
||||
{`^(\*)( .+)$`, ByGroups(GenericHeading, GenericStrong), nil}, // // Level 1 heading with NO tags
|
||||
{`^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericSubheading, Text, GenericEmph), nil}, // Level 2+ heading with tags
|
||||
{`^(\*\*+)( .+)$`, ByGroups(GenericSubheading, Text), nil}, // Level 2+ heading with NO tags
|
||||
{`^(\*)( .+)$`, ByGroups(GenericHeading, GenericStrong), nil}, // // Level 1 heading with NO tags
|
||||
{`^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericSubheading, Text, GenericEmph), nil}, // Level 2+ heading with tags
|
||||
{`^(\*\*+)( .+)$`, ByGroups(GenericSubheading, Text), nil}, // Level 2+ heading with NO tags
|
||||
// Checkbox lists
|
||||
{`^( *)([+-] )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil},
|
||||
{`^( +)(\* )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil},
|
||||
@@ -78,19 +78,19 @@ var Org = internal.Register(MustNewLexer(
|
||||
Include("inline"),
|
||||
},
|
||||
"inline": {
|
||||
{`(\s)*(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))`, ByGroups(Text, GenericStrong, Text), nil}, // Bold
|
||||
{`(\s)*(/[^/]+?/)((?=\W|\n|$))`, ByGroups(Text, GenericEmph, Text), nil}, // Italic
|
||||
{`(\s)*(=[^\n=]+?=)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Verbatim
|
||||
{`(\s)*(~[^\n~]+?~)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Code
|
||||
{`(\s)*(\+[^+]+?\+)((?=\W|\n|$))`, ByGroups(Text, GenericDeleted, Text), nil}, // Strikethrough
|
||||
{`(\s)*(_[^_]+?_)((?=\W|\n|$))`, ByGroups(Text, GenericUnderline, Text), nil}, // Underline
|
||||
{`(<)([^<>]+?)(>)`, ByGroups(Text, String, Text), nil}, // <datestamp>
|
||||
{`[{]{3}[^}]+[}]{3}`, NameBuiltin, nil}, // {{{macro(foo,1)}}}
|
||||
{`(\s)*(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))`, ByGroups(Text, GenericStrong, Text), nil}, // Bold
|
||||
{`(\s)*(/[^/]+?/)((?=\W|\n|$))`, ByGroups(Text, GenericEmph, Text), nil}, // Italic
|
||||
{`(\s)*(=[^\n=]+?=)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Verbatim
|
||||
{`(\s)*(~[^\n~]+?~)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Code
|
||||
{`(\s)*(\+[^+]+?\+)((?=\W|\n|$))`, ByGroups(Text, GenericDeleted, Text), nil}, // Strikethrough
|
||||
{`(\s)*(_[^_]+?_)((?=\W|\n|$))`, ByGroups(Text, GenericUnderline, Text), nil}, // Underline
|
||||
{`(<)([^<>]+?)(>)`, ByGroups(Text, String, Text), nil}, // <datestamp>
|
||||
{`[{]{3}[^}]+[}]{3}`, NameBuiltin, nil}, // {{{macro(foo,1)}}}
|
||||
{`([^[])(\[fn:)([^]]+?)(\])([^]])`, ByGroups(Text, NameBuiltinPseudo, LiteralString, NameBuiltinPseudo, Text), nil}, // [fn:1]
|
||||
// Links
|
||||
{`(\[\[)([^][]+?)(\]\[)([^][]+)(\]\])`, ByGroups(Text, NameAttribute, Text, NameTag, Text), nil}, // [[link][descr]]
|
||||
{`(\[\[)([^][]+?)(\]\])`, ByGroups(Text, NameAttribute, Text), nil}, // [[link]]
|
||||
{`(<<)([^<>]+?)(>>)`, ByGroups(Text, NameAttribute, Text), nil}, // <<targetlink>>
|
||||
{`(\[\[)([^][]+?)(\]\])`, ByGroups(Text, NameAttribute, Text), nil}, // [[link]]
|
||||
{`(<<)([^<>]+?)(>>)`, ByGroups(Text, NameAttribute, Text), nil}, // <<targetlink>>
|
||||
// Tables
|
||||
{`^( *)(\|[ -].*?[ -]\|)$`, ByGroups(Text, String), nil},
|
||||
// Blank lines, newlines
|
||||
|
||||
4
vendor/github.com/alecthomas/chroma/lexers/r/rst.go
generated
vendored
4
vendor/github.com/alecthomas/chroma/lexers/r/rst.go
generated
vendored
@@ -61,7 +61,7 @@ var Restructuredtext = internal.Register(MustNewLexer(
|
||||
|
||||
func rstCodeBlock(groups []string, lexer Lexer) Iterator {
|
||||
iterators := []Iterator{}
|
||||
tokens := []*Token{
|
||||
tokens := []Token{
|
||||
{Punctuation, groups[1]},
|
||||
{Text, groups[2]},
|
||||
{OperatorWord, groups[3]},
|
||||
@@ -73,7 +73,7 @@ func rstCodeBlock(groups []string, lexer Lexer) Iterator {
|
||||
code := strings.Join(groups[8:], "")
|
||||
lexer = internal.Get(groups[6])
|
||||
if lexer == nil {
|
||||
tokens = append(tokens, &Token{String, code})
|
||||
tokens = append(tokens, Token{String, code})
|
||||
iterators = append(iterators, Literator(tokens...))
|
||||
} else {
|
||||
sub, err := lexer.Tokenise(nil, code)
|
||||
|
||||
14
vendor/github.com/alecthomas/chroma/lexers/s/sass.go
generated
vendored
14
vendor/github.com/alecthomas/chroma/lexers/s/sass.go
generated
vendored
@@ -15,13 +15,13 @@ var Sass = internal.Register(MustNewLexer(
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
// "root": {
|
||||
// },
|
||||
"root": {
|
||||
{`[ \t]*\n`, Text, nil},
|
||||
// { `[ \t]*`, ?? <function _indentation at 0x10fcaf1e0> ??, nil },
|
||||
},
|
||||
"content": {
|
||||
// { `//[^\n]*`, ?? <function _starts_block.<locals>.callback at 0x10fcaf378> ??, Push("root") },
|
||||
// { `/\*[^\n]*`, ?? <function _starts_block.<locals>.callback at 0x10fcaf400> ??, Push("root") },
|
||||
// { `[ \t]*`, ?? <function _indentation at 0x106932e18> ??, nil },
|
||||
// { `//[^\n]*`, ?? <function _starts_block.<locals>.callback at 0x106936048> ??, Push("root") },
|
||||
// { `/\*[^\n]*`, ?? <function _starts_block.<locals>.callback at 0x1069360d0> ??, Push("root") },
|
||||
{`@import`, Keyword, Push("import")},
|
||||
{`@for`, Keyword, Push("for")},
|
||||
{`@(debug|warn|if|while)`, Keyword, Push("value")},
|
||||
@@ -112,9 +112,9 @@ var Sass = internal.Register(MustNewLexer(
|
||||
{`"`, LiteralStringDouble, Pop(1)},
|
||||
},
|
||||
"string-single": {
|
||||
{`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringDouble, nil},
|
||||
{`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringSingle, nil},
|
||||
{`#\{`, LiteralStringInterpol, Push("interpolation")},
|
||||
{`'`, LiteralStringDouble, Pop(1)},
|
||||
{`'`, LiteralStringSingle, Pop(1)},
|
||||
},
|
||||
"string-url": {
|
||||
{`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil},
|
||||
|
||||
28
vendor/github.com/alecthomas/chroma/lexers/s/systemd.go
generated
vendored
Normal file
28
vendor/github.com/alecthomas/chroma/lexers/s/systemd.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
package s
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
var SYSTEMD = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "SYSTEMD",
|
||||
Aliases: []string{"systemd"},
|
||||
Filenames: []string{"*.service"},
|
||||
MimeTypes: []string{"text/plain"},
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`\s+`, Text, nil},
|
||||
{`[;#].*`, Comment, nil},
|
||||
{`\[.*?\]$`, Keyword, nil},
|
||||
{`(.*?)(=)(.*)(\\\n)`, ByGroups(NameAttribute, Operator, LiteralString, Text), Push("continuation")},
|
||||
{`(.*?)(=)(.*)`, ByGroups(NameAttribute, Operator, LiteralString), nil},
|
||||
},
|
||||
"continuation": {
|
||||
{`(.*?)(\\\n)`, ByGroups(LiteralString, Text), nil},
|
||||
{`(.*)`, LiteralString, Pop(1)},
|
||||
},
|
||||
},
|
||||
))
|
||||
36
vendor/github.com/alecthomas/chroma/lexers/t/tradingview.go
generated
vendored
Normal file
36
vendor/github.com/alecthomas/chroma/lexers/t/tradingview.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
package t
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// TradingView lexer.
|
||||
var TradingView = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "TradingView",
|
||||
Aliases: []string{"tradingview", "tv"},
|
||||
Filenames: []string{"*.tv"},
|
||||
MimeTypes: []string{"text/x-tradingview"},
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`[^\S\n]+|\n|[()]`, Text, nil},
|
||||
{`(//.*?)(\n)`, ByGroups(CommentSingle, Text), nil},
|
||||
{`>=|<=|==|!=|>|<|\?|-|\+|\*|\/|%|\[|\]`, Operator, nil},
|
||||
{`[:,.]`, Punctuation, nil},
|
||||
{`=`, KeywordPseudo, nil},
|
||||
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
|
||||
{`'\\.'|'[^\\]'`, LiteralString, nil},
|
||||
{`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?`, LiteralNumber, nil},
|
||||
{`(abs|acos|alertcondition|alma|asin|atan|atr|avg|barcolor|barssince|bgcolor|cci|ceil|change|cog|correlation|cos|crossover|crossunder|cum|dev|ema|exp|falling|fill|fixnan|floor|heikinashi|highest|highestbars|hline|iff|input|kagi|linebreak|linreg|log|log10|lowest|lowestbars|macd|max|min|mom|nz|percentile_linear_interpolation|percentile_nearest_rank|percentrank|pivothigh|pivotlow|plot|plotarrow|plotbar|plotcandle|plotchar|plotshape|pointfigure|pow|renko|rising|rma|roc|round|rsi|sar|security|sign|sin|sma|sqrt|stdev|stoch|study|sum|swma|tan|tostring|tsi|valuewhen|variance|vwma|wma|strategy\.(cancel|cancel_all|close|close_all|entry|exit|order)|strategy\.risk\.(allow_entry_in|max_cons_loss_days|max_drawdown|max_intraday_filled_orders|max_intraday_loss|max_position_size))\b`, NameFunction, nil},
|
||||
{`\b(cross|dayofmonth|dayofweek|hour|minute|month|na|offset|second|tickerid|time|tr|vwap|weekofyear|year)(\()`, ByGroups(NameFunction, Text), nil}, // functions that can also be variable
|
||||
{`(accdist|aqua|area|areabr|black|blue|bool|circles|close|columns|currency\.(AUD|CAD|CHF|EUR|GBP|HKD|JPY|NOK|NONE|NZD|SEK|SGD|TRY|USD|ZAR)|dashed|dotted|float|friday|fuchsia|gray|green|high|histogram|hl2|hlc3|integer|interval|isdaily|isdwm|isintraday|ismonthly|isweekly|lime|line|linebr|location\.(abovebar|belowbar|bottom|top)|low|maroon|monday|n|navy|ohlc4|olive|open|orange|period|purple|red|resolution|saturday|scale\.(left|none|right)|session|session\.(extended|regular)|silver|size\.(auto|huge|large|normal|small|tiny)|solid|source|string|sunday|symbol|syminfo\.(mintick|pointvalue|prefix|root|session)|teal|thursday|ticker|tuesday|volume|wednesday|white|yellow|strategy\.(cash|position_size|closedtrades|direction\.(all|long|short)|equity|eventrades|fixed|grossloss|grossprofit|initial_capital|long|losstrades|max_contracts_held_all|max_contracts_held_long|max_contracts_held_short|max_drawdown|netprofit|oca\.(cancel|none|reduce)|openprofit|opentrades|percent_of_equity|position_avg_price|position_entry_name|short|wintrades)|shape\.(arrowdown|arrowup|circle|cross|diamond|flag|labeldown|labelup|square|triangledown|triangleup|xcross)|barstate\.is(first|history|last|new|realtime)|barmerge\.(gaps_on|gaps_off|lookahead_on|lookahead_off)|strategy\.commission\.(cash_per_contract|cash_per_order|percent))\b`, NameVariable, nil},
|
||||
{`(cross|dayofmonth|dayofweek|hour|minute|month|na|second|tickerid|time|tr|vwap|weekofyear|year)(\b[^\(])`, ByGroups(NameVariable, Text), nil}, // variables that can also be function
|
||||
{`(true|false)\b`, KeywordConstant, nil},
|
||||
{`(and|or|not|if|else|for|to)\b`, OperatorWord, nil},
|
||||
{`@?[_a-zA-Z]\w*`, Text, nil},
|
||||
},
|
||||
},
|
||||
))
|
||||
73
vendor/github.com/alecthomas/chroma/lexers/v/vb.go
generated
vendored
Normal file
73
vendor/github.com/alecthomas/chroma/lexers/v/vb.go
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
package v
|
||||
|
||||
import (
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
const vbName = `[_\w][\w]*`
|
||||
|
||||
// VB.Net lexer.
|
||||
var VBNet = internal.Register(MustNewLexer(
|
||||
&Config{
|
||||
Name: "VB.net",
|
||||
Aliases: []string{"vb.net", "vbnet"},
|
||||
Filenames: []string{"*.vb", "*.bas"},
|
||||
MimeTypes: []string{"text/x-vbnet", "text/x-vba"},
|
||||
CaseInsensitive: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`^\s*<.*?>`, NameAttribute, nil},
|
||||
{`\s+`, Text, nil},
|
||||
{`\n`, Text, nil},
|
||||
{`rem\b.*?\n`, Comment, nil},
|
||||
{`'.*?\n`, Comment, nil},
|
||||
{`#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|#ExternalSource.*?\n|#End\s+ExternalSource|#Region.*?\n|#End\s+Region|#ExternalChecksum`, CommentPreproc, nil},
|
||||
{`[(){}!#,.:]`, Punctuation, nil},
|
||||
{`Option\s+(Strict|Explicit|Compare)\s+(On|Off|Binary|Text)`, KeywordDeclaration, nil},
|
||||
{Words(`(?<!\.)`, `\b`, `AddHandler`, `Alias`, `ByRef`, `ByVal`, `Call`, `Case`, `Catch`, `CBool`, `CByte`, `CChar`, `CDate`, `CDec`, `CDbl`, `CInt`, `CLng`, `CObj`, `Continue`, `CSByte`, `CShort`, `CSng`, `CStr`, `CType`, `CUInt`, `CULng`, `CUShort`, `Declare`, `Default`, `Delegate`, `DirectCast`, `Do`, `Each`, `Else`, `ElseIf`, `EndIf`, `Erase`, `Error`, `Event`, `Exit`, `False`, `Finally`, `For`, `Friend`, `Get`, `Global`, `GoSub`, `GoTo`, `Handles`, `If`, `Implements`, `Inherits`, `Interface`, `Let`, `Lib`, `Loop`, `Me`, `MustInherit`, `MustOverride`, `MyBase`, `MyClass`, `Narrowing`, `New`, `Next`, `Not`, `Nothing`, `NotInheritable`, `NotOverridable`, `Of`, `On`, `Operator`, `Option`, `Optional`, `Overloads`, `Overridable`, `Overrides`, `ParamArray`, `Partial`, `Private`, `Protected`, `Public`, `RaiseEvent`, `ReadOnly`, `ReDim`, `RemoveHandler`, `Resume`, `Return`, `Select`, `Set`, `Shadows`, `Shared`, `Single`, `Static`, `Step`, `Stop`, `SyncLock`, `Then`, `Throw`, `To`, `True`, `Try`, `TryCast`, `Wend`, `Using`, `When`, `While`, `Widening`, `With`, `WithEvents`, `WriteOnly`), Keyword, nil},
|
||||
{`(?<!\.)End\b`, Keyword, Push("end")},
|
||||
{`(?<!\.)(Dim|Const)\b`, Keyword, Push("dim")},
|
||||
{`(?<!\.)(Function|Sub|Property)(\s+)`, ByGroups(Keyword, Text), Push("funcname")},
|
||||
{`(?<!\.)(Class|Structure|Enum)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
||||
{`(?<!\.)(Module|Namespace|Imports)(\s+)`, ByGroups(Keyword, Text), Push("namespace")},
|
||||
{`(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|Object|SByte|Short|Single|String|Variant|UInteger|ULong|UShort)\b`, KeywordType, nil},
|
||||
{`(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|Or|OrElse|TypeOf|Xor)\b`, OperatorWord, nil},
|
||||
{`&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|<=|>=|<>|[-&*/\\^+=<>\[\]]`, Operator, nil},
|
||||
{`"`, LiteralString, Push("string")},
|
||||
{`_\n`, Text, nil},
|
||||
{vbName, Name, nil},
|
||||
{`#.*?#`, LiteralDate, nil},
|
||||
{`(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||
{`\d+([SILDFR]|US|UI|UL)?`, LiteralNumberInteger, nil},
|
||||
{`&H[0-9a-f]+([SILDFR]|US|UI|UL)?`, LiteralNumberInteger, nil},
|
||||
{`&O[0-7]+([SILDFR]|US|UI|UL)?`, LiteralNumberInteger, nil},
|
||||
},
|
||||
"string": {
|
||||
{`""`, LiteralString, nil},
|
||||
{`"C?`, LiteralString, Pop(1)},
|
||||
{`[^"]+`, LiteralString, nil},
|
||||
},
|
||||
"dim": {
|
||||
{vbName, NameVariable, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
"funcname": {
|
||||
{vbName, NameFunction, Pop(1)},
|
||||
},
|
||||
"classname": {
|
||||
{vbName, NameClass, Pop(1)},
|
||||
},
|
||||
"namespace": {
|
||||
{vbName, NameNamespace, nil},
|
||||
{`\.`, NameNamespace, nil},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
"end": {
|
||||
{`\s+`, Text, nil},
|
||||
{`(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b`, Keyword, Pop(1)},
|
||||
Default(Pop(1)),
|
||||
},
|
||||
},
|
||||
))
|
||||
2
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
@@ -20,7 +20,7 @@ var YAML = internal.Register(MustNewLexer(
|
||||
{`&[^\s]+`, CommentPreproc, nil},
|
||||
{`\*[^\s]+`, CommentPreproc, nil},
|
||||
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
||||
{`([>|])(\s+)((?:(?:.*?$)(?:[\n\r]*?\2)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
|
||||
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
|
||||
Include("value"),
|
||||
{`[?:,\[\]]`, Punctuation, nil},
|
||||
{`.`, Text, nil},
|
||||
|
||||
2
vendor/github.com/alecthomas/chroma/mutators.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/mutators.go
generated
vendored
@@ -122,7 +122,7 @@ func Default(mutators ...Mutator) Rule {
|
||||
}
|
||||
|
||||
// Stringify returns the raw string for a set of tokens.
|
||||
func Stringify(tokens ...*Token) string {
|
||||
func Stringify(tokens ...Token) string {
|
||||
out := []string{}
|
||||
for _, t := range tokens {
|
||||
out = append(out, t.Value)
|
||||
|
||||
18
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
18
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
@@ -140,13 +140,13 @@ func Words(prefix, suffix string, words ...string) string {
|
||||
}
|
||||
|
||||
// Tokenise text using lexer, returning tokens as a slice.
|
||||
func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, error) {
|
||||
out := []*Token{}
|
||||
func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, error) {
|
||||
var out []Token
|
||||
it, err := lexer.Tokenise(options, text)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for t := it(); t != nil; t = it() {
|
||||
for t := it(); t != EOF; t = it() {
|
||||
out = append(out, t)
|
||||
}
|
||||
return out, nil
|
||||
@@ -246,13 +246,13 @@ func (l *LexerState) Get(key interface{}) interface{} {
|
||||
return l.MutatorContext[key]
|
||||
}
|
||||
|
||||
func (l *LexerState) Iterator() *Token {
|
||||
func (l *LexerState) Iterator() Token {
|
||||
for l.Pos < len(l.Text) && len(l.Stack) > 0 {
|
||||
// Exhaust the iterator stack, if any.
|
||||
for len(l.iteratorStack) > 0 {
|
||||
n := len(l.iteratorStack) - 1
|
||||
t := l.iteratorStack[n]()
|
||||
if t == nil {
|
||||
if t == EOF {
|
||||
l.iteratorStack = l.iteratorStack[:n]
|
||||
continue
|
||||
}
|
||||
@@ -271,7 +271,7 @@ func (l *LexerState) Iterator() *Token {
|
||||
// No match.
|
||||
if groups == nil {
|
||||
l.Pos++
|
||||
return &Token{Error, string(l.Text[l.Pos-1 : l.Pos])}
|
||||
return Token{Error, string(l.Text[l.Pos-1 : l.Pos])}
|
||||
}
|
||||
l.Rule = ruleIndex
|
||||
l.Groups = groups
|
||||
@@ -290,7 +290,7 @@ func (l *LexerState) Iterator() *Token {
|
||||
for len(l.iteratorStack) > 0 {
|
||||
n := len(l.iteratorStack) - 1
|
||||
t := l.iteratorStack[n]()
|
||||
if t == nil {
|
||||
if t == EOF {
|
||||
l.iteratorStack = l.iteratorStack[:n]
|
||||
continue
|
||||
}
|
||||
@@ -301,9 +301,9 @@ func (l *LexerState) Iterator() *Token {
|
||||
if l.Pos != len(l.Text) && len(l.Stack) == 0 {
|
||||
value := string(l.Text[l.Pos:])
|
||||
l.Pos = len(l.Text)
|
||||
return &Token{Type: Error, Value: value}
|
||||
return Token{Type: Error, Value: value}
|
||||
}
|
||||
return nil
|
||||
return EOF
|
||||
}
|
||||
|
||||
type RegexLexer struct {
|
||||
|
||||
14
vendor/github.com/alecthomas/chroma/remap.go
generated
vendored
14
vendor/github.com/alecthomas/chroma/remap.go
generated
vendored
@@ -2,11 +2,11 @@ package chroma
|
||||
|
||||
type remappingLexer struct {
|
||||
lexer Lexer
|
||||
mapper func(*Token) []*Token
|
||||
mapper func(Token) []Token
|
||||
}
|
||||
|
||||
// RemappingLexer remaps a token to a set of, potentially empty, tokens.
|
||||
func RemappingLexer(lexer Lexer, mapper func(*Token) []*Token) Lexer {
|
||||
func RemappingLexer(lexer Lexer, mapper func(Token) []Token) Lexer {
|
||||
return &remappingLexer{lexer, mapper}
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@ func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterat
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buffer := []*Token{}
|
||||
return func() *Token {
|
||||
var buffer []Token
|
||||
return func() Token {
|
||||
for {
|
||||
if len(buffer) > 0 {
|
||||
t := buffer[0]
|
||||
@@ -28,7 +28,7 @@ func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterat
|
||||
return t
|
||||
}
|
||||
t := it()
|
||||
if t == nil {
|
||||
if t == EOF {
|
||||
return t
|
||||
}
|
||||
buffer = r.mapper(t)
|
||||
@@ -67,7 +67,7 @@ func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
|
||||
}
|
||||
|
||||
}
|
||||
return RemappingLexer(lexer, func(t *Token) []*Token {
|
||||
return RemappingLexer(lexer, func(t Token) []Token {
|
||||
if k, ok := lut[t.Type]; ok {
|
||||
if tt, ok := k[t.Value]; ok {
|
||||
t.Type = tt
|
||||
@@ -75,6 +75,6 @@ func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer {
|
||||
t.Type = tt
|
||||
}
|
||||
}
|
||||
return []*Token{t}
|
||||
return []Token{t}
|
||||
})
|
||||
}
|
||||
|
||||
177
vendor/github.com/alecthomas/chroma/tokentype_string.go
generated
vendored
177
vendor/github.com/alecthomas/chroma/tokentype_string.go
generated
vendored
@@ -2,9 +2,9 @@
|
||||
|
||||
package chroma
|
||||
|
||||
import "fmt"
|
||||
import "strconv"
|
||||
|
||||
const _TokenType_name = "NoneOtherErrorLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersBackgroundKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
|
||||
const _TokenType_name = "NoneOtherErrorLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersBackgroundEOFTypeKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation"
|
||||
|
||||
var _TokenType_map = map[TokenType]string{
|
||||
-9: _TokenType_name[0:4],
|
||||
@@ -16,96 +16,97 @@ var _TokenType_map = map[TokenType]string{
|
||||
-3: _TokenType_name[47:63],
|
||||
-2: _TokenType_name[63:74],
|
||||
-1: _TokenType_name[74:84],
|
||||
1000: _TokenType_name[84:91],
|
||||
1001: _TokenType_name[91:106],
|
||||
1002: _TokenType_name[106:124],
|
||||
1003: _TokenType_name[124:140],
|
||||
1004: _TokenType_name[140:153],
|
||||
1005: _TokenType_name[153:168],
|
||||
1006: _TokenType_name[168:179],
|
||||
2000: _TokenType_name[179:183],
|
||||
2001: _TokenType_name[183:196],
|
||||
2002: _TokenType_name[196:207],
|
||||
2003: _TokenType_name[207:224],
|
||||
2004: _TokenType_name[224:233],
|
||||
2005: _TokenType_name[233:245],
|
||||
2006: _TokenType_name[245:258],
|
||||
2007: _TokenType_name[258:268],
|
||||
2008: _TokenType_name[268:281],
|
||||
2009: _TokenType_name[281:293],
|
||||
2010: _TokenType_name[293:310],
|
||||
2011: _TokenType_name[310:321],
|
||||
2012: _TokenType_name[321:330],
|
||||
2013: _TokenType_name[330:343],
|
||||
2014: _TokenType_name[343:355],
|
||||
2015: _TokenType_name[355:364],
|
||||
2016: _TokenType_name[364:374],
|
||||
2017: _TokenType_name[374:386],
|
||||
2018: _TokenType_name[386:393],
|
||||
2019: _TokenType_name[393:405],
|
||||
2020: _TokenType_name[405:426],
|
||||
2021: _TokenType_name[426:443],
|
||||
2022: _TokenType_name[443:461],
|
||||
2023: _TokenType_name[461:481],
|
||||
2024: _TokenType_name[481:498],
|
||||
3000: _TokenType_name[498:505],
|
||||
3001: _TokenType_name[505:516],
|
||||
3002: _TokenType_name[516:528],
|
||||
3100: _TokenType_name[528:541],
|
||||
3101: _TokenType_name[541:559],
|
||||
3102: _TokenType_name[559:576],
|
||||
3103: _TokenType_name[576:597],
|
||||
3104: _TokenType_name[597:617],
|
||||
3105: _TokenType_name[617:634],
|
||||
3106: _TokenType_name[634:656],
|
||||
3107: _TokenType_name[656:672],
|
||||
3108: _TokenType_name[672:691],
|
||||
3109: _TokenType_name[691:710],
|
||||
3110: _TokenType_name[710:730],
|
||||
3111: _TokenType_name[730:751],
|
||||
3112: _TokenType_name[751:768],
|
||||
3113: _TokenType_name[768:786],
|
||||
3114: _TokenType_name[786:804],
|
||||
3115: _TokenType_name[804:823],
|
||||
3116: _TokenType_name[823:842],
|
||||
3200: _TokenType_name[842:855],
|
||||
3201: _TokenType_name[855:871],
|
||||
3202: _TokenType_name[871:889],
|
||||
3203: _TokenType_name[889:905],
|
||||
3204: _TokenType_name[905:925],
|
||||
3205: _TokenType_name[925:949],
|
||||
3206: _TokenType_name[949:965],
|
||||
4000: _TokenType_name[965:973],
|
||||
4001: _TokenType_name[973:985],
|
||||
5000: _TokenType_name[985:996],
|
||||
6000: _TokenType_name[996:1003],
|
||||
6001: _TokenType_name[1003:1018],
|
||||
6002: _TokenType_name[1018:1034],
|
||||
6003: _TokenType_name[1034:1047],
|
||||
6004: _TokenType_name[1047:1061],
|
||||
6100: _TokenType_name[1061:1075],
|
||||
6101: _TokenType_name[1075:1093],
|
||||
7000: _TokenType_name[1093:1100],
|
||||
7001: _TokenType_name[1100:1114],
|
||||
7002: _TokenType_name[1114:1125],
|
||||
7003: _TokenType_name[1125:1137],
|
||||
7004: _TokenType_name[1137:1151],
|
||||
7005: _TokenType_name[1151:1166],
|
||||
7006: _TokenType_name[1166:1179],
|
||||
7007: _TokenType_name[1179:1192],
|
||||
7008: _TokenType_name[1192:1205],
|
||||
7009: _TokenType_name[1205:1222],
|
||||
7010: _TokenType_name[1222:1238],
|
||||
7011: _TokenType_name[1238:1254],
|
||||
8000: _TokenType_name[1254:1258],
|
||||
8001: _TokenType_name[1258:1272],
|
||||
8002: _TokenType_name[1272:1282],
|
||||
8003: _TokenType_name[1282:1297],
|
||||
0: _TokenType_name[84:91],
|
||||
1000: _TokenType_name[91:98],
|
||||
1001: _TokenType_name[98:113],
|
||||
1002: _TokenType_name[113:131],
|
||||
1003: _TokenType_name[131:147],
|
||||
1004: _TokenType_name[147:160],
|
||||
1005: _TokenType_name[160:175],
|
||||
1006: _TokenType_name[175:186],
|
||||
2000: _TokenType_name[186:190],
|
||||
2001: _TokenType_name[190:203],
|
||||
2002: _TokenType_name[203:214],
|
||||
2003: _TokenType_name[214:231],
|
||||
2004: _TokenType_name[231:240],
|
||||
2005: _TokenType_name[240:252],
|
||||
2006: _TokenType_name[252:265],
|
||||
2007: _TokenType_name[265:275],
|
||||
2008: _TokenType_name[275:288],
|
||||
2009: _TokenType_name[288:300],
|
||||
2010: _TokenType_name[300:317],
|
||||
2011: _TokenType_name[317:328],
|
||||
2012: _TokenType_name[328:337],
|
||||
2013: _TokenType_name[337:350],
|
||||
2014: _TokenType_name[350:362],
|
||||
2015: _TokenType_name[362:371],
|
||||
2016: _TokenType_name[371:381],
|
||||
2017: _TokenType_name[381:393],
|
||||
2018: _TokenType_name[393:400],
|
||||
2019: _TokenType_name[400:412],
|
||||
2020: _TokenType_name[412:433],
|
||||
2021: _TokenType_name[433:450],
|
||||
2022: _TokenType_name[450:468],
|
||||
2023: _TokenType_name[468:488],
|
||||
2024: _TokenType_name[488:505],
|
||||
3000: _TokenType_name[505:512],
|
||||
3001: _TokenType_name[512:523],
|
||||
3002: _TokenType_name[523:535],
|
||||
3100: _TokenType_name[535:548],
|
||||
3101: _TokenType_name[548:566],
|
||||
3102: _TokenType_name[566:583],
|
||||
3103: _TokenType_name[583:604],
|
||||
3104: _TokenType_name[604:624],
|
||||
3105: _TokenType_name[624:641],
|
||||
3106: _TokenType_name[641:663],
|
||||
3107: _TokenType_name[663:679],
|
||||
3108: _TokenType_name[679:698],
|
||||
3109: _TokenType_name[698:717],
|
||||
3110: _TokenType_name[717:737],
|
||||
3111: _TokenType_name[737:758],
|
||||
3112: _TokenType_name[758:775],
|
||||
3113: _TokenType_name[775:793],
|
||||
3114: _TokenType_name[793:811],
|
||||
3115: _TokenType_name[811:830],
|
||||
3116: _TokenType_name[830:849],
|
||||
3200: _TokenType_name[849:862],
|
||||
3201: _TokenType_name[862:878],
|
||||
3202: _TokenType_name[878:896],
|
||||
3203: _TokenType_name[896:912],
|
||||
3204: _TokenType_name[912:932],
|
||||
3205: _TokenType_name[932:956],
|
||||
3206: _TokenType_name[956:972],
|
||||
4000: _TokenType_name[972:980],
|
||||
4001: _TokenType_name[980:992],
|
||||
5000: _TokenType_name[992:1003],
|
||||
6000: _TokenType_name[1003:1010],
|
||||
6001: _TokenType_name[1010:1025],
|
||||
6002: _TokenType_name[1025:1041],
|
||||
6003: _TokenType_name[1041:1054],
|
||||
6004: _TokenType_name[1054:1068],
|
||||
6100: _TokenType_name[1068:1082],
|
||||
6101: _TokenType_name[1082:1100],
|
||||
7000: _TokenType_name[1100:1107],
|
||||
7001: _TokenType_name[1107:1121],
|
||||
7002: _TokenType_name[1121:1132],
|
||||
7003: _TokenType_name[1132:1144],
|
||||
7004: _TokenType_name[1144:1158],
|
||||
7005: _TokenType_name[1158:1173],
|
||||
7006: _TokenType_name[1173:1186],
|
||||
7007: _TokenType_name[1186:1199],
|
||||
7008: _TokenType_name[1199:1212],
|
||||
7009: _TokenType_name[1212:1229],
|
||||
7010: _TokenType_name[1229:1245],
|
||||
7011: _TokenType_name[1245:1261],
|
||||
8000: _TokenType_name[1261:1265],
|
||||
8001: _TokenType_name[1265:1279],
|
||||
8002: _TokenType_name[1279:1289],
|
||||
8003: _TokenType_name[1289:1304],
|
||||
}
|
||||
|
||||
func (i TokenType) String() string {
|
||||
if str, ok := _TokenType_map[i]; ok {
|
||||
return str
|
||||
}
|
||||
return fmt.Sprintf("TokenType(%d)", i)
|
||||
return "TokenType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
||||
|
||||
6
vendor/github.com/alecthomas/chroma/types.go
generated
vendored
6
vendor/github.com/alecthomas/chroma/types.go
generated
vendored
@@ -12,7 +12,7 @@ import (
|
||||
// It is also an Emitter, emitting a single token of itself
|
||||
type TokenType int
|
||||
|
||||
func (t *TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) }
|
||||
func (t TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) }
|
||||
func (t *TokenType) UnmarshalJSON(data []byte) error {
|
||||
key := ""
|
||||
err := json.Unmarshal(data, &key)
|
||||
@@ -54,6 +54,8 @@ const (
|
||||
Other
|
||||
// No highlighting.
|
||||
None
|
||||
// Used as an EOF marker / nil token
|
||||
EOFType TokenType = 0
|
||||
)
|
||||
|
||||
// Keywords.
|
||||
@@ -341,5 +343,5 @@ func (t TokenType) InSubCategory(other TokenType) bool {
|
||||
}
|
||||
|
||||
func (t TokenType) Emit(groups []string, lexer Lexer) Iterator {
|
||||
return Literator(&Token{Type: t, Value: groups[0]})
|
||||
return Literator(Token{Type: t, Value: groups[0]})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user