1
0
mirror of https://github.com/taigrr/wtf synced 2026-04-01 03:08:50 -07:00

Update dependencies to latest versions

This commit is contained in:
Chris Cummer
2019-01-11 16:44:42 -08:00
parent ea27f40164
commit 48cb7ba773
358 changed files with 29553 additions and 8982 deletions

View File

@@ -31,7 +31,7 @@ func (d *delegatingLexer) Config() *Config {
// An insertion is the character range where language tokens should be inserted.
type insertion struct {
start, end int
tokens []*Token
tokens []Token
}
func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
@@ -44,15 +44,15 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
insertions := []*insertion{}
var insert *insertion
offset := 0
var last *Token
var last Token
for _, t := range tokens {
if t.Type == Other {
if last != nil && insert != nil && last.Type != Other {
if last != EOF && insert != nil && last.Type != Other {
insert.end = offset
}
others.WriteString(t.Value)
} else {
if last == nil || last.Type == Other {
if last == EOF || last.Type == Other {
insert = &insertion{start: offset}
insertions = append(insertions, insert)
}
@@ -73,12 +73,12 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
}
// Interleave the two sets of tokens.
out := []*Token{}
var out []Token
offset = 0 // Offset into text.
tokenIndex := 0
nextToken := func() *Token {
nextToken := func() Token {
if tokenIndex >= len(rootTokens) {
return nil
return EOF
}
t := rootTokens[tokenIndex]
tokenIndex++
@@ -95,18 +95,18 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
}
t := nextToken()
i := nextInsertion()
for t != nil || i != nil {
for t != EOF || i != nil {
// fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
if t == nil || (i != nil && i.start < offset+len(t.Value)) {
var l *Token
if t == EOF || (i != nil && i.start < offset+len(t.Value)) {
var l Token
l, t = splitToken(t, i.start-offset)
if l != nil {
if l != EOF {
out = append(out, l)
offset += len(l.Value)
}
out = append(out, i.tokens...)
offset += i.end - i.start
if t == nil {
if t == EOF {
t = nextToken()
}
i = nextInsertion()
@@ -119,15 +119,15 @@ func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Itera
return Literator(out...), nil
}
func splitToken(t *Token, offset int) (l *Token, r *Token) {
if t == nil {
return nil, nil
func splitToken(t Token, offset int) (l Token, r Token) {
if t == EOF {
return EOF, EOF
}
if offset == 0 {
return nil, t
return EOF, t
}
if offset == len(t.Value) {
return t, nil
return t, EOF
}
l = t.Clone()
r = t.Clone()