diff --git a/Gopkg.lock b/Gopkg.lock index 8d56782e..b9d83a4d 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -2,198 +2,306 @@ [[projects]] + digest = "1:5c3894b2aa4d6bead0ceeea6831b305d62879c871780e7b76296ded1b004bc57" name = "cloud.google.com/go" packages = ["compute/metadata"] - revision = "aad3f485ee528456e0768f20397b4d9dd941e755" - version = "v0.25.0" + pruneopts = "UT" + revision = "64a2037ec6be8a4b0c1d1f706ed35b428b989239" + version = "v0.26.0" [[projects]] branch = "master" + digest = "1:1164f5eccc8905c03a2c688141c81f06522ed5fd2eb65cb43debeb730a018276" name = "github.com/adlio/trello" packages = ["."] + pruneopts = "UT" revision = "8a458717123e328d9103a3bf075e64bc1ec961f8" +[[projects]] + digest = "1:9253f97cfbbe049b631877c80badecc69620711b3e335f6cf97a7809681da388" + name = "github.com/alecthomas/chroma" + packages = [ + ".", + "formatters", + "formatters/html", + "lexers", + "lexers/a", + "lexers/b", + "lexers/c", + "lexers/circular", + "lexers/d", + "lexers/e", + "lexers/f", + "lexers/g", + "lexers/h", + "lexers/i", + "lexers/internal", + "lexers/j", + "lexers/k", + "lexers/l", + "lexers/m", + "lexers/n", + "lexers/o", + "lexers/p", + "lexers/q", + "lexers/r", + "lexers/s", + "lexers/t", + "lexers/v", + "lexers/w", + "lexers/x", + "lexers/y", + "styles", + ] + pruneopts = "UT" + revision = "5d7fef2ae60b501bbf28d476c3f273b8017d8261" + version = "v0.5.0" + [[projects]] branch = "master" + digest = "1:b5d1ba5dc01e8129b7df5cf95380a66b0e0f122850189b7ec255953b15522f16" name = "github.com/andygrunwald/go-gerrit" packages = ["."] - revision = "5ea603106b4869cc3f38322ee17d5fc1360ee1e3" + pruneopts = "UT" + revision = "197fe0d2e796b3d985b9cd91be8afd4415692f39" [[projects]] branch = "master" + digest = "1:6d7646b01dbbacd8c50be0cbddbcb633619f819e51f47a4114c8d4518ae7ac02" name = "github.com/briandowns/openweathermap" packages = ["."] - revision = "6a9abf92e34f4de62ac671caee3143f10b98892d" + pruneopts = "UT" + revision = "5f41b7c9d92de5d74bf32f4486375c7547bc8a3c" [[projects]] branch = "master" + digest = "1:710109527a119c813d4fac773712dd92f449ed7c2f7b81f49822f94d290c8f72" + name = "github.com/danwakefield/fnmatch" + packages = ["."] + pruneopts = "UT" + revision = "cbb64ac3d964b81592e64f957ad53df015803288" + +[[projects]] + branch = "master" + digest = "1:e3e85f98942fe3b51b8f9316d8d1dc1925353b26639d456030368ae278bb0475" name = "github.com/darkSasori/todoist" packages = ["."] + pruneopts = "UT" revision = "ec6b38b374ab9c60cc9716d2083ae66eb9383d03" [[projects]] + digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec" name = "github.com/davecgh/go-spew" packages = ["spew"] - revision = "346938d642f2ec3594ed81d874461961cd0faa76" - version = "v1.1.0" + pruneopts = "UT" + revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" + version = "v1.1.1" + +[[projects]] + digest = "1:72dc2b6056e7097f829260e4a2ff08d32fec6017df1982a66e110ab4128486f8" + name = "github.com/dlclark/regexp2" + packages = [ + ".", + "syntax", + ] + pruneopts = "UT" + revision = "487489b64fb796de2e55f4e8a4ad1e145f80e957" + version = "v1.1.6" [[projects]] branch = "master" + digest = "1:40d0056c1b1f503c366ba441df92a82b5a2654d6f3747b1689a611eb5c9ce0a2" name = "github.com/gdamore/encoding" packages = ["."] + pruneopts = "UT" revision = "b23993cbb6353f0e6aa98d0ee318a34728f628b9" [[projects]] + branch = "master" + digest = "1:00b9cce210566117aff926677c005aeaea6c85374e67bdcb72783af237c48f97" name = "github.com/gdamore/tcell" packages = [ ".", - "terminfo" + "terminfo", ] + pruneopts = "UT" revision = "de7e78efa4a71b3f36c7154989c529dbdf9ae623" - version = "v1.1.0" [[projects]] + digest = "1:15042ad3498153684d09f393bbaec6b216c8eec6d61f63dff711de7d64ed8861" name = "github.com/golang/protobuf" packages = ["proto"] + pruneopts = "UT" revision = "b4deda0973fb4c70b50d226b1af49f3da59f5265" version = "v1.1.0" [[projects]] branch = "master" + digest = "1:4be3c01ef56542da4f1268c52a8b88bae5309ec3dade7755e250bc933e13ac70" name = "github.com/google/go-github" packages = ["github"] - revision = "77a604e05ff9e668b242334d116bc6d1d0a12d66" + pruneopts = "UT" + revision = "d7732128a00e8e95e8fe896017da18ee20b2180d" [[projects]] branch = "master" + digest = "1:a63cff6b5d8b95638bfe300385d93b2a6d9d687734b863da8e09dc834510a690" name = "github.com/google/go-querystring" packages = ["query"] + pruneopts = "UT" revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a" [[projects]] + digest = "1:a2cff208d4759f6ba1b1cd228587b0a1869f95f22542ec9cd17fff64430113c7" name = "github.com/jessevdk/go-flags" packages = ["."] + pruneopts = "UT" revision = "c6ca198ec95c841fdb89fc0de7496fed11ab854e" version = "v1.4.0" [[projects]] + digest = "1:c65a16ac77d0b1aefc7009cabb6ac5ad05def02025f5be85f450c03f52cc6f86" name = "github.com/lucasb-eyer/go-colorful" packages = ["."] + pruneopts = "UT" revision = "345fbb3dbcdb252d9985ee899a84963c0fa24c82" version = "v1.0" [[projects]] + digest = "1:cdb899c199f907ac9fb50495ec71212c95cb5b0e0a8ee0800da0238036091033" name = "github.com/mattn/go-runewidth" packages = ["."] - revision = "9e777a8366cce605130a531d2cd6363d07ad7317" - version = "v0.0.2" + pruneopts = "UT" + revision = "ce7b0b5c7b45a81508558cd1dba6bb1e4ddb51bb" + version = "v0.0.3" [[projects]] branch = "master" + digest = "1:3a16300d913e1050d4a7ff296106a11b72da05c5dd3475f2ae1feb0830866461" name = "github.com/olebedev/config" packages = ["."] + pruneopts = "UT" revision = "ed90d2035b8114c30b9cb65e7d52e10a7148f8c6" [[projects]] + digest = "1:40e195917a951a8bf867cd05de2a46aaf1806c50cf92eebf4c16f78cd196f747" name = "github.com/pkg/errors" packages = ["."] + pruneopts = "UT" revision = "645ef00459ed84a119197bfb8d8205042c6df63d" version = "v0.8.0" [[projects]] + digest = "1:cfa0d7741863a0e1d30e0ccdd4b48a96a471cdb47892303de8b92c3713af3e77" name = "github.com/pkg/profile" packages = ["."] + pruneopts = "UT" revision = "5b67d428864e92711fcbd2f8629456121a56d91f" version = "v1.2.1" [[projects]] + digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe" name = "github.com/pmezard/go-difflib" packages = ["difflib"] + pruneopts = "UT" revision = "792786c7400a136282c1664665ae0a8db921c6c2" version = "v1.0.0" [[projects]] + digest = "1:0665976cc6d8c2e60dafca32820b6b7eca70f37ded0bcccc103388e1c0d05c53" name = "github.com/radovskyb/watcher" packages = ["."] + pruneopts = "UT" revision = "6145e1439b9de93806925353403f91d2abbad8a5" version = "v1.0.2" [[projects]] branch = "master" + digest = "1:9deb696e9739b45e91369ffb3876d4e0d2bc45e0d35062fb3c5aa44fa77eff6c" name = "github.com/rivo/tview" packages = ["."] - revision = "6614b16d9037759065eac63d8fb3d8408e63e222" + pruneopts = "UT" + revision = "21f50f5bc400083b4eb23304887d9cd0fc00d075" [[projects]] + digest = "1:18752d0b95816a1b777505a97f71c7467a8445b8ffb55631a7bf779f6ba4fa83" name = "github.com/stretchr/testify" packages = ["assert"] + pruneopts = "UT" revision = "f35b8ab0b5a2cef36673838d662e249dd9c94686" version = "v1.2.2" [[projects]] + branch = "master" + digest = "1:ca067f3d378064d50a239b1f0d4dcd2dab6356f7643993bdf02c91d6e621c028" name = "github.com/xanzy/go-gitlab" packages = ["."] - revision = "4f616cbce6a5f335461881701f772d8b8e16c2f6" - version = "v0.10.8" + pruneopts = "UT" + revision = "f3bc634ab936f7e4ee5e21334ccfdfeb5601d477" [[projects]] branch = "master" + digest = "1:3b19ea715e070a74397f3cfe950f0b555e66a3c23461e553777f52112a81b0e6" name = "github.com/yfronto/newrelic" packages = ["."] + pruneopts = "UT" revision = "7c9c2852e8f9e69a80bff4f4f1fe4cdd15eeba19" [[projects]] branch = "master" - name = "github.com/zyedidia/highlight" - packages = ["."] - revision = "201131ce5cf5ba174570dcd878d1f2698db5af9f" - -[[projects]] - branch = "master" + digest = "1:d6b719875cf8091fbab38527d81d34e71f4521b9ee9ccfbd4a32cff2ac5af96e" name = "golang.org/x/net" packages = [ "context", - "context/ctxhttp" + "context/ctxhttp", ] - revision = "a0f8a16cb08c06df97cbdf9c47f4731ba548c33c" + pruneopts = "UT" + revision = "aaf60122140d3fcf75376d319f0554393160eb50" [[projects]] branch = "master" + digest = "1:bea0314c10bd362ab623af4880d853b5bad3b63d0ab9945c47e461b8d04203ed" name = "golang.org/x/oauth2" packages = [ ".", "google", "internal", "jws", - "jwt" + "jwt", ] + pruneopts = "UT" revision = "3d292e4d0cdc3a0113e6d207bb137145ef1de42f" [[projects]] + digest = "1:37672ad5821719e2df8509c2edd4ba5ae192463237c73c3a2d24ef8b2bc9e36f" name = "golang.org/x/text" packages = [ "encoding", "encoding/internal/identifier", "internal/gen", "transform", - "unicode/cldr" + "unicode/cldr", ] + pruneopts = "UT" revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" version = "v0.3.0" [[projects]] branch = "master" + digest = "1:c8d95fbc783cab4e8986b1bf3dbe5312db7521e4342ac755280cfa78ce0ee792" name = "google.golang.org/api" packages = [ "calendar/v3", "gensupport", "googleapi", "googleapi/internal/uritemplates", - "sheets/v4" + "sheets/v4", ] - revision = "31ca0e01cd791f07750cb23fc99327721f753290" + pruneopts = "UT" + revision = "e21acd801f91da814261b938941d193bb036441a" [[projects]] + digest = "1:c8907869850adaa8bd7631887948d0684f3787d0912f1c01ab72581a6c34432e" name = "google.golang.org/appengine" packages = [ ".", @@ -205,20 +313,46 @@ "internal/modules", "internal/remote_api", "internal/urlfetch", - "urlfetch" + "urlfetch", ] + pruneopts = "UT" revision = "b1f26356af11148e710935ed1ac8a7f5702c7612" version = "v1.1.0" [[projects]] + digest = "1:342378ac4dcb378a5448dd723f0784ae519383532f5e70ade24132c4c8693202" name = "gopkg.in/yaml.v2" packages = ["."] + pruneopts = "UT" revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183" version = "v2.2.1" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "b36355370ccdc4bf6659b1412b29f9da474a922e52a2884f80b7205791e0d304" + input-imports = [ + "github.com/adlio/trello", + "github.com/alecthomas/chroma/formatters", + "github.com/alecthomas/chroma/lexers", + "github.com/alecthomas/chroma/styles", + "github.com/andygrunwald/go-gerrit", + "github.com/briandowns/openweathermap", + "github.com/darkSasori/todoist", + "github.com/gdamore/tcell", + "github.com/google/go-github/github", + "github.com/jessevdk/go-flags", + "github.com/olebedev/config", + "github.com/pkg/profile", + "github.com/radovskyb/watcher", + "github.com/rivo/tview", + "github.com/stretchr/testify/assert", + "github.com/xanzy/go-gitlab", + "github.com/yfronto/newrelic", + "golang.org/x/oauth2", + "golang.org/x/oauth2/google", + "google.golang.org/api/calendar/v3", + "google.golang.org/api/sheets/v4", + "gopkg.in/yaml.v2", + ] solver-name = "gps-cdcl" solver-version = 1 diff --git a/vendor/github.com/alecthomas/chroma/.gitignore b/vendor/github.com/alecthomas/chroma/.gitignore new file mode 100644 index 00000000..94dc6b97 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.gitignore @@ -0,0 +1,18 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib +/cmd/chroma/chroma + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +_models/ + diff --git a/vendor/github.com/alecthomas/chroma/.goreleaser.yml b/vendor/github.com/alecthomas/chroma/.goreleaser.yml new file mode 100644 index 00000000..e90c5113 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.goreleaser.yml @@ -0,0 +1,31 @@ +project_name: chroma +release: + github: + owner: alecthomas + name: chroma +brew: + install: bin.install "chroma" +builds: +- goos: + - linux + - darwin + - windows + goarch: + - amd64 + - "386" + goarm: + - "6" + main: ./cmd/chroma/main.go + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} + binary: chroma +archive: + format: tar.gz + name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{ + .Arm }}{{ end }}' + files: + - COPYING + - README* +snapshot: + name_template: SNAPSHOT-{{ .Commit }} +checksum: + name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt' diff --git a/vendor/github.com/alecthomas/chroma/.travis.yml b/vendor/github.com/alecthomas/chroma/.travis.yml new file mode 100644 index 00000000..3e27eb3e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/.travis.yml @@ -0,0 +1,5 @@ +sudo: false +language: go +after_success: + go get github.com/goreleaser/goreleaser && goreleaser + diff --git a/vendor/github.com/alecthomas/chroma/COPYING b/vendor/github.com/alecthomas/chroma/COPYING new file mode 100644 index 00000000..92dc39f7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/COPYING @@ -0,0 +1,19 @@ +Copyright (C) 2017 Alec Thomas + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alecthomas/chroma/README.md b/vendor/github.com/alecthomas/chroma/README.md new file mode 100644 index 00000000..681f10ab --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/README.md @@ -0,0 +1,210 @@ +# Chroma - A general purpose syntax highlighter in pure Go [![](https://godoc.org/github.com/alecthomas/chroma?status.svg)](http://godoc.org/github.com/alecthomas/chroma) [![Build Status](https://travis-ci.org/alecthomas/chroma.png)](https://travis-ci.org/alecthomas/chroma) [![Gitter chat](https://badges.gitter.im/alecthomas.png)](https://gitter.im/alecthomas/Lobby) + +> **NOTE:** As Chroma has just been released, its API is still in flux. That said, the high-level interface should not change significantly. + +Chroma takes source code and other structured text and converts it into syntax +highlighted HTML, ANSI-coloured text, etc. + +Chroma is based heavily on [Pygments](http://pygments.org/), and includes +translators for Pygments lexers and styles. + +## Table of Contents + + + +1. [Supported languages](#supported-languages) +1. [Using the library](#using-the-library) + 1. [Quick start](#quick-start) + 1. [Identifying the language](#identifying-the-language) + 1. [Formatting the output](#formatting-the-output) + 1. [The HTML formatter](#the-html-formatter) +1. [More detail](#more-detail) + 1. [Lexers](#lexers) + 1. [Formatters](#formatters) + 1. [Styles](#styles) +1. [Command-line interface](#command-line-interface) +1. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments) + + + +## Supported languages + +ABNF, ANTLR, APL, ActionScript, ActionScript 3, Ada, Angular2, ApacheConf, AppleScript, Awk, BNF, Ballerina, Base Makefile, Bash, Batchfile, BlitzBasic, Brainfuck, C, C#, C++, CFEngine3, CMake, COBOL, CSS, Cap'n Proto, Ceylon, ChaiScript, Cheetah, Clojure, CoffeeScript, Common Lisp, Coq, Crystal, Cython, DTD, Dart, Diff, Django/Jinja, Docker, EBNF, Elixir, Elm, EmacsLisp, Erlang, FSharp, Factor, Fish, Forth, Fortran, GAS, GDScript, GLSL, Genshi, Genshi HTML, Genshi Text, Gnuplot, Go, Go HTML Template, Go Text Template, Groovy, HTML, HTTP, Handlebars, Haskell, Haxe, Hexdump, Hy, INI, Idris, Io, JSON, JSX, Java, JavaScript, Julia, Kotlin, LLVM, Lighttpd configuration file, Lua, Mako, Mason, Mathematica, MiniZinc, Modula-2, MorrowindScript, MySQL, Myghty, NASM, Newspeak, Nginx configuration file, Nim, Nix, OCaml, Objective-C, Octave, Org Mode, PHP, PL/pgSQL, POVRay, PacmanConf, Perl, Pig, PkgConfig, PostScript, PostgreSQL SQL dialect, PowerShell, Prolog, Protocol Buffer, Puppet, Python, Python 3, QBasic, R, Racket, Ragel, Rexx, Ruby, Rust, SCSS, SPARQL, SQL, Sass, Scala, Scheme, Scilab, Smalltalk, Smarty, Snobol, Solidity, SquidConf, Swift, TASM, TOML, Tcl, Tcsh, TeX, Termcap, Terminfo, Terraform, Thrift, Transact-SQL, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData, VHDL, VimL, WDTE, XML, Xorg, YAML, cfstatement, markdown, reStructuredText, reg, systemverilog, verilog + +_I will attempt to keep this section up to date, but an authoritative list can be +displayed with `chroma --list`._ + +## Using the library + +Chroma, like Pygments, has the concepts of +[lexers](https://github.com/alecthomas/chroma/tree/master/lexers), +[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and +[styles](https://github.com/alecthomas/chroma/tree/master/styles). + +Lexers convert source text into a stream of tokens, styles specify how token +types are mapped to colours, and formatters convert tokens and styles into +formatted output. + +A package exists for each of these, containing a global `Registry` variable +with all of the registered implementations. There are also helper functions +for using the registry in each package, such as looking up lexers by name or +matching filenames, etc. + +In all cases, if a lexer, formatter or style can not be determined, `nil` will +be returned. In this situation you may want to default to the `Fallback` +value in each respective package, which provides sane defaults. + +### Quick start + +A convenience function exists that can be used to simply format some source +text, without any effort: + +```go +err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai") +``` + +### Identifying the language + +To highlight code, you'll first have to identify what language the code is +written in. There are three primary ways to do that: + +1. Detect the language from its filename. + + ```go + lexer := lexers.Match("foo.go") + ``` + +3. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`). + + ```go + lexer := lexers.Get("go") + ``` + +3. Detect the language from its content. + + ```go + lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n") + ``` + +In all cases, `nil` will be returned if the language can not be identified. + +```go +if lexer == nil { + lexer = lexers.Fallback +} +``` + +At this point, it should be noted that some lexers can be extremely chatty. To +mitigate this, you can use the coalescing lexer to coalesce runs of identical +token types into a single token: + +```go +lexer = chroma.Coalesce(lexer) +``` + +### Formatting the output + +Once a language is identified you will need to pick a formatter and a style (theme). + +```go +style := styles.Get("swapoff") +if style == nil { + style = styles.Fallback +} +formatter := formatters.Get("html") +if formatter == nil { + formatter = formatters.Fallback +} +``` + +Then obtain an iterator over the tokens: + +```go +contents, err := ioutil.ReadAll(r) +iterator, err := lexer.Tokenise(nil, string(contents)) +``` + +And finally, format the tokens from the iterator: + +```go +err := formatter.Format(w, style, iterator) +``` + +### The HTML formatter + +By default the `html` registered formatter generates standalone HTML with +embedded CSS. More flexibility is available through the `formatters/html` package. + +Firstly, the output generated by the formatter can be customised with the +following constructor options: + +- `Standalone()` - generate standalone HTML with embedded CSS. +- `WithClasses()` - use classes rather than inlined style attributes. +- `ClassPrefix(prefix)` - prefix each generated CSS class. +- `TabWidth(width)` - Set the rendered tab width, in characters. +- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`). +- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`). +- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans. + +If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with: + +```go +formatter := html.New(html.WithClasses()) +err := formatter.WriteCSS(w, style) +``` + +## More detail + +### Lexers + +See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/) +for details on implementing lexers. Most concepts apply directly to Chroma, +but see existing lexer implementations for real examples. + +In many cases lexers can be automatically converted directly from Pygments by +using the included Python 3 script `pygments2chroma.py`. I use something like +the following: + +``` +python3 ~/Projects/chroma/_tools/pygments2chroma.py \ + pygments.lexers.jvm.KotlinLexer \ + > ~/Projects/chroma/lexers/kotlin.go \ + && gofmt -s -w ~/Projects/chroma/lexers/*.go +``` + +See notes in [pygments-lexers.go](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt) +for a list of lexers, and notes on some of the issues importing them. + +### Formatters + +Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour. + +A `noop` formatter is included that outputs the token text only, and a `tokens` +formatter outputs raw tokens. The latter is useful for debugging lexers. + +### Styles + +Chroma styles use the [same syntax](http://pygments.org/docs/styles/) as Pygments. + +All Pygments styles have been converted to Chroma using the `_tools/style.py` script. + +For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/). + +## Command-line interface + +A command-line interface to Chroma is included. It can be installed with: + +``` +go get -u github.com/alecthomas/chroma/cmd/chroma +``` + +## What's missing compared to Pygments? + +- Quite a few lexers, for various reasons (pull-requests welcome): + - Pygments lexers for complex languages often include custom code to + handle certain aspects, such as Perl6's ability to nest code inside + regular expressions. These require time and effort to convert. + - I mostly only converted languages I had heard of, to reduce the porting cost. +- Some more esoteric features of Pygments are omitted for simplicity. +- Though the Chroma API supports content detection, very few languages support them. + I have plans to implement a statistical analyser at some point, but not enough time. diff --git a/vendor/github.com/alecthomas/chroma/coalesce.go b/vendor/github.com/alecthomas/chroma/coalesce.go new file mode 100644 index 00000000..5af0a7c6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/coalesce.go @@ -0,0 +1,35 @@ +package chroma + +// Coalesce is a Lexer interceptor that collapses runs of common types into a single token. +func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} } + +type coalescer struct{ Lexer } + +func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + var prev *Token + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + return func() *Token { + for token := it(); token != nil; token = it() { + if len(token.Value) == 0 { + continue + } + if prev == nil { + prev = token + } else { + if prev.Type == token.Type && len(prev.Value) < 8192 { + prev.Value += token.Value + } else { + out := prev + prev = token + return out + } + } + } + out := prev + prev = nil + return out + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/colour.go b/vendor/github.com/alecthomas/chroma/colour.go new file mode 100644 index 00000000..b2417ff8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/colour.go @@ -0,0 +1,155 @@ +package chroma + +import ( + "fmt" + "math" + "strconv" + "strings" +) + +// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values. +var ANSI2RGB = map[string]string{ + "#ansiblack": "000000", + "#ansidarkred": "7f0000", + "#ansidarkgreen": "007f00", + "#ansibrown": "7f7fe0", + "#ansidarkblue": "00007f", + "#ansipurple": "7f007f", + "#ansiteal": "007f7f", + "#ansilightgray": "e5e5e5", + // Normal + "#ansidarkgray": "555555", + "#ansired": "ff0000", + "#ansigreen": "00ff00", + "#ansiyellow": "ffff00", + "#ansiblue": "0000ff", + "#ansifuchsia": "ff00ff", + "#ansiturquoise": "00ffff", + "#ansiwhite": "ffffff", + + // Aliases without the "ansi" prefix, because...why? + "#black": "000000", + "#darkred": "7f0000", + "#darkgreen": "007f00", + "#brown": "7f7fe0", + "#darkblue": "00007f", + "#purple": "7f007f", + "#teal": "007f7f", + "#lightgray": "e5e5e5", + // Normal + "#darkgray": "555555", + "#red": "ff0000", + "#green": "00ff00", + "#yellow": "ffff00", + "#blue": "0000ff", + "#fuchsia": "ff00ff", + "#turquoise": "00ffff", + "#white": "ffffff", +} + +// Colour represents an RGB colour. +type Colour int32 + +// NewColour creates a Colour directly from RGB values. +func NewColour(r, g, b uint8) Colour { + return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b)) +} + +// Distance between this colour and another. +// +// This uses the approach described here (https://www.compuphase.com/cmetric.htm). +// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs. +func (c Colour) Distance(e2 Colour) float64 { + ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue()) + br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue()) + rmean := (ar + br) / 2 + r := ar - br + g := ag - bg + b := ab - bb + return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8))) +} + +// Brighten returns a copy of this colour with its brightness adjusted. +// +// If factor is negative, the colour is darkened. +// +// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html). +func (c Colour) Brighten(factor float64) Colour { + r := float64(c.Red()) + g := float64(c.Green()) + b := float64(c.Blue()) + + if factor < 0 { + factor++ + r *= factor + g *= factor + b *= factor + } else { + r = (255-r)*factor + r + g = (255-g)*factor + g + b = (255-b)*factor + b + } + return NewColour(uint8(r), uint8(g), uint8(b)) +} + +// Brightness of the colour (roughly) in the range 0.0 to 1.0 +func (c Colour) Brightness() float64 { + return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0 +} + +// ParseColour in the forms #rgb, #rrggbb, #ansi, or #. +// Will return an "unset" colour if invalid. +func ParseColour(colour string) Colour { + colour = normaliseColour(colour) + n, err := strconv.ParseUint(colour, 16, 32) + if err != nil { + return 0 + } + return Colour(n + 1) +} + +// MustParseColour is like ParseColour except it panics if the colour is invalid. +// +// Will panic if colour is in an invalid format. +func MustParseColour(colour string) Colour { + parsed := ParseColour(colour) + if !parsed.IsSet() { + panic(fmt.Errorf("invalid colour %q", colour)) + } + return parsed +} + +func (c Colour) IsSet() bool { return c != 0 } + +func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) } +func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) } + +// Red component of colour. +func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) } + +// Green component of colour. +func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) } + +// Blue component of colour. +func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) } + +// Colours is an orderable set of colours. +type Colours []Colour + +func (c Colours) Len() int { return len(c) } +func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c Colours) Less(i, j int) bool { return c[i] < c[j] } + +// Convert colours to #rrggbb. +func normaliseColour(colour string) string { + if ansi, ok := ANSI2RGB[colour]; ok { + return ansi + } + if strings.HasPrefix(colour, "#") { + colour = colour[1:] + if len(colour) == 3 { + return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3] + } + } + return colour +} diff --git a/vendor/github.com/alecthomas/chroma/delegate.go b/vendor/github.com/alecthomas/chroma/delegate.go new file mode 100644 index 00000000..06f55dbc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/delegate.go @@ -0,0 +1,137 @@ +package chroma + +import ( + "bytes" +) + +type delegatingLexer struct { + root Lexer + language Lexer +} + +// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP +// inside HTML or PHP inside plain text. +// +// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language +// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer. +// Finally, these two sets of tokens are merged. +// +// The lexers from the template lexer package use this base lexer. +func DelegatingLexer(root Lexer, language Lexer) Lexer { + return &delegatingLexer{ + root: root, + language: language, + } +} + +func (d *delegatingLexer) Config() *Config { + return d.language.Config() +} + +// An insertion is the character range where language tokens should be inserted. +type insertion struct { + start, end int + tokens []*Token +} + +func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + tokens, err := Tokenise(Coalesce(d.language), options, text) + if err != nil { + return nil, err + } + // Compute insertions and gather "Other" tokens. + others := &bytes.Buffer{} + insertions := []*insertion{} + var insert *insertion + offset := 0 + var last *Token + for _, t := range tokens { + if t.Type == Other { + if last != nil && insert != nil && last.Type != Other { + insert.end = offset + } + others.WriteString(t.Value) + } else { + if last == nil || last.Type == Other { + insert = &insertion{start: offset} + insertions = append(insertions, insert) + } + insert.tokens = append(insert.tokens, t) + } + last = t + offset += len(t.Value) + } + + if len(insertions) == 0 { + return d.root.Tokenise(options, text) + } + + // Lex the other tokens. + rootTokens, err := Tokenise(Coalesce(d.root), options, others.String()) + if err != nil { + return nil, err + } + + // Interleave the two sets of tokens. + out := []*Token{} + offset = 0 // Offset into text. + tokenIndex := 0 + nextToken := func() *Token { + if tokenIndex >= len(rootTokens) { + return nil + } + t := rootTokens[tokenIndex] + tokenIndex++ + return t + } + insertionIndex := 0 + nextInsertion := func() *insertion { + if insertionIndex >= len(insertions) { + return nil + } + i := insertions[insertionIndex] + insertionIndex++ + return i + } + t := nextToken() + i := nextInsertion() + for t != nil || i != nil { + // fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...)) + if t == nil || (i != nil && i.start < offset+len(t.Value)) { + var l *Token + l, t = splitToken(t, i.start-offset) + if l != nil { + out = append(out, l) + offset += len(l.Value) + } + out = append(out, i.tokens...) + offset += i.end - i.start + if t == nil { + t = nextToken() + } + i = nextInsertion() + } else { + out = append(out, t) + offset += len(t.Value) + t = nextToken() + } + } + return Literator(out...), nil +} + +func splitToken(t *Token, offset int) (l *Token, r *Token) { + if t == nil { + return nil, nil + } + if offset == 0 { + return nil, t + } + if offset == len(t.Value) { + return t, nil + } + l = t.Clone() + r = t.Clone() + l.Value = l.Value[:offset] + r.Value = r.Value[offset:] + return +} diff --git a/vendor/github.com/alecthomas/chroma/doc.go b/vendor/github.com/alecthomas/chroma/doc.go new file mode 100644 index 00000000..4dde77c8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/doc.go @@ -0,0 +1,7 @@ +// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI- +// coloured text, etc. +// +// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles. +// +// For more information, go here: https://github.com/alecthomas/chroma +package chroma diff --git a/vendor/github.com/alecthomas/chroma/formatter.go b/vendor/github.com/alecthomas/chroma/formatter.go new file mode 100644 index 00000000..56670241 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatter.go @@ -0,0 +1,43 @@ +package chroma + +import ( + "io" +) + +// A Formatter for Chroma lexers. +type Formatter interface { + // Format returns a formatting function for tokens. + // + // If the iterator panics, the Formatter should recover. + Format(w io.Writer, style *Style, iterator Iterator) error +} + +// A FormatterFunc is a Formatter implemented as a function. +// +// Guards against iterator panics. +type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error + +func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return f(w, s, it) +} + +type recoveringFormatter struct { + Formatter +} + +func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return r.Formatter.Format(w, s, it) +} + +// RecoveringFormatter wraps a formatter with panic recovery. +func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} } diff --git a/vendor/github.com/alecthomas/chroma/formatters/api.go b/vendor/github.com/alecthomas/chroma/formatters/api.go new file mode 100644 index 00000000..d5fe28ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/api.go @@ -0,0 +1,55 @@ +package formatters + +import ( + "io" + "sort" + + "github.com/alecthomas/chroma" + "github.com/alecthomas/chroma/formatters/html" +) + +var ( + // NoOp formatter. + NoOp = Register("noop", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, iterator chroma.Iterator) error { + for t := iterator(); t != nil; t = iterator() { + if _, err := io.WriteString(w, t.Value); err != nil { + return err + } + } + return nil + })) + // Default HTML formatter outputs self-contained HTML. + htmlFull = Register("html", html.New(html.Standalone(), html.WithClasses())) +) + +// Fallback formatter. +var Fallback = NoOp + +// Registry of Formatters. +var Registry = map[string]chroma.Formatter{} + +// Names of registered formatters. +func Names() []string { + out := []string{} + for name := range Registry { + out = append(out, name) + } + sort.Strings(out) + return out +} + +// Get formatter by name. +// +// If the given formatter is not found, the Fallback formatter will be returned. +func Get(name string) chroma.Formatter { + if f, ok := Registry[name]; ok { + return f + } + return Fallback +} + +// Register a named formatter. +func Register(name string, formatter chroma.Formatter) chroma.Formatter { + Registry[name] = formatter + return formatter +} diff --git a/vendor/github.com/alecthomas/chroma/formatters/html/html.go b/vendor/github.com/alecthomas/chroma/formatters/html/html.go new file mode 100644 index 00000000..806721cc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/html/html.go @@ -0,0 +1,402 @@ +package html + +import ( + "fmt" + "html" + "io" + "sort" + "strings" + + "github.com/alecthomas/chroma" +) + +// Option sets an option of the HTML formatter. +type Option func(f *Formatter) + +// Standalone configures the HTML formatter for generating a standalone HTML document. +func Standalone() Option { return func(f *Formatter) { f.standalone = true } } + +// ClassPrefix sets the CSS class prefix. +func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } } + +// WithClasses emits HTML using CSS classes, rather than inline styles. +func WithClasses() Option { return func(f *Formatter) { f.Classes = true } } + +// TabWidth sets the number of characters for a tab. Defaults to 8. +func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } } + +// WithLineNumbers formats output with line numbers. +func WithLineNumbers() Option { + return func(f *Formatter) { + f.lineNumbers = true + } +} + +// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers +// and code in table td's, which make them copy-and-paste friendly. +func LineNumbersInTable() Option { + return func(f *Formatter) { + f.lineNumbersInTable = true + } +} + +// HighlightLines higlights the given line ranges with the Highlight style. +// +// A range is the beginning and ending of a range as 1-based line numbers, inclusive. +func HighlightLines(ranges [][2]int) Option { + return func(f *Formatter) { + f.highlightRanges = ranges + sort.Sort(f.highlightRanges) + } +} + +// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1. +func BaseLineNumber(n int) Option { + return func(f *Formatter) { + f.baseLineNumber = n + } +} + +// New HTML formatter. +func New(options ...Option) *Formatter { + f := &Formatter{ + baseLineNumber: 1, + } + for _, option := range options { + option(f) + } + return f +} + +// Formatter that generates HTML. +type Formatter struct { + standalone bool + prefix string + Classes bool // Exported field to detect when classes are being used + tabWidth int + lineNumbers bool + lineNumbersInTable bool + highlightRanges highlightRanges + baseLineNumber int +} + +type highlightRanges [][2]int + +func (h highlightRanges) Len() int { return len(h) } +func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] } +func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] } + +func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + return f.writeHTML(w, style, iterator.Tokens()) +} + +func brightenOrDarken(colour chroma.Colour, factor float64) chroma.Colour { + if colour.Brightness() < 0.5 { + return colour.Brighten(factor) + } + return colour.Brighten(-factor) +} + +// Ensure that style entries exist for highlighting, etc. +func (f *Formatter) restyle(style *chroma.Style) (*chroma.Style, error) { + builder := style.Builder() + bg := builder.Get(chroma.Background) + // If we don't have a line highlight colour, make one that is 10% brighter/darker than the background. + if !style.Has(chroma.LineHighlight) { + highlight := chroma.StyleEntry{Background: bg.Background} + highlight.Background = brightenOrDarken(highlight.Background, 0.1) + builder.AddEntry(chroma.LineHighlight, highlight) + } + // If we don't have line numbers, use the text colour but 20% brighter/darker + if !style.Has(chroma.LineNumbers) { + text := chroma.StyleEntry{Colour: bg.Colour} + text.Colour = brightenOrDarken(text.Colour, 0.5) + builder.AddEntry(chroma.LineNumbers, text) + builder.AddEntry(chroma.LineNumbersTable, text) + } + return builder.Build() +} + +// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked). +// +// OTOH we need to be super careful about correct escaping... +func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []*chroma.Token) (err error) { // nolint: gocyclo + style, err = f.restyle(style) + if err != nil { + return err + } + css := f.styleToCSS(style) + if !f.Classes { + for t, style := range css { + css[t] = compressStyle(style) + } + } + if f.standalone { + fmt.Fprint(w, "\n") + if f.Classes { + fmt.Fprint(w, "") + } + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.Background)) + } + + wrapInTable := f.lineNumbers && f.lineNumbersInTable + + lines := splitTokensIntoLines(tokens) + lineDigits := len(fmt.Sprintf("%d", len(lines))) + highlightIndex := 0 + + if wrapInTable { + // List line numbers in its own + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.Background)) + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineTable)) + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD)) + fmt.Fprintf(w, "", f.styleAttr(css, chroma.Background)) + for index := range lines { + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + if highlight { + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineHighlight)) + } + + fmt.Fprintf(w, "%*d\n", f.styleAttr(css, chroma.LineNumbersTable), lineDigits, line) + + if highlight { + fmt.Fprintf(w, "") + } + } + fmt.Fprint(w, "\n") + fmt.Fprintf(w, "\n", f.styleAttr(css, chroma.LineTableTD)) + } + + fmt.Fprintf(w, "", f.styleAttr(css, chroma.Background)) + highlightIndex = 0 + for index, tokens := range lines { + // 1-based line number. + line := f.baseLineNumber + index + highlight, next := f.shouldHighlight(highlightIndex, line) + if next { + highlightIndex++ + } + if highlight { + fmt.Fprintf(w, "", f.styleAttr(css, chroma.LineHighlight)) + } + + if f.lineNumbers && !wrapInTable { + fmt.Fprintf(w, "%*d", f.styleAttr(css, chroma.LineNumbers), lineDigits, line) + } + + for _, token := range tokens { + html := html.EscapeString(token.String()) + attr := f.styleAttr(css, token.Type) + if attr != "" { + html = fmt.Sprintf("%s", attr, html) + } + fmt.Fprint(w, html) + } + if highlight { + fmt.Fprintf(w, "") + } + } + + fmt.Fprint(w, "") + + if wrapInTable { + fmt.Fprint(w, "\n") + fmt.Fprint(w, "\n") + } + + if f.standalone { + fmt.Fprint(w, "\n\n") + fmt.Fprint(w, "\n") + } + + return nil +} + +func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) { + next := false + for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] { + highlightIndex++ + next = true + } + if highlightIndex < len(f.highlightRanges) { + hrange := f.highlightRanges[highlightIndex] + if line >= hrange[0] && line <= hrange[1] { + return true, next + } + } + return false, next +} + +func (f *Formatter) class(t chroma.TokenType) string { + for t != 0 { + if cls, ok := chroma.StandardTypes[t]; ok { + if cls != "" { + return f.prefix + cls + } + return "" + } + t = t.Parent() + } + if cls := chroma.StandardTypes[t]; cls != "" { + return f.prefix + cls + } + return "" +} + +func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType) string { + if f.Classes { + cls := f.class(tt) + if cls == "" { + return "" + } + return string(fmt.Sprintf(` class="%s"`, cls)) + } + if _, ok := styles[tt]; !ok { + tt = tt.SubCategory() + if _, ok := styles[tt]; !ok { + tt = tt.Category() + if _, ok := styles[tt]; !ok { + return "" + } + } + } + return fmt.Sprintf(` style="%s"`, styles[tt]) +} + +func (f *Formatter) tabWidthStyle() string { + if f.tabWidth != 0 && f.tabWidth != 8 { + return fmt.Sprintf("; -moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d", f.tabWidth) + } + return "" +} + +// WriteCSS writes CSS style definitions (without any surrounding HTML). +func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error { + css := f.styleToCSS(style) + // Special-case background as it is mapped to the outer ".chroma" class. + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil { + return err + } + // Special-case code column of table to expand width. + if f.lineNumbers && f.lineNumbersInTable { + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }", + chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil { + return err + } + } + tts := []int{} + for tt := range css { + tts = append(tts, int(tt)) + } + sort.Ints(tts) + for _, ti := range tts { + tt := chroma.TokenType(ti) + if tt == chroma.Background { + continue + } + styles := css[tt] + if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, f.class(tt), styles); err != nil { + return err + } + } + return nil +} + +func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string { + classes := map[chroma.TokenType]string{} + bg := style.Get(chroma.Background) + // Convert the style. + for t := range chroma.StandardTypes { + entry := style.Get(t) + if t != chroma.Background { + entry = entry.Sub(bg) + } + if entry.IsZero() { + continue + } + classes[t] = StyleEntryToCSS(entry) + } + classes[chroma.Background] += f.tabWidthStyle() + lineNumbersStyle := "margin-right: 0.4em; padding: 0 0.4em 0 0.4em;" + // All rules begin with default rules followed by user provided rules + classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers] + classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable] + classes[chroma.LineHighlight] = "display: block; width: 100%;" + classes[chroma.LineHighlight] + classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0; width: auto; overflow: auto; display: block;" + classes[chroma.LineTable] + classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD] + return classes +} + +// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes. +func StyleEntryToCSS(e chroma.StyleEntry) string { + styles := []string{} + if e.Colour.IsSet() { + styles = append(styles, "color: "+e.Colour.String()) + } + if e.Background.IsSet() { + styles = append(styles, "background-color: "+e.Background.String()) + } + if e.Bold == chroma.Yes { + styles = append(styles, "font-weight: bold") + } + if e.Italic == chroma.Yes { + styles = append(styles, "font-style: italic") + } + if e.Underline == chroma.Yes { + styles = append(styles, "text-decoration: underline") + } + return strings.Join(styles, "; ") +} + +// Compress CSS attributes - remove spaces, transform 6-digit colours to 3. +func compressStyle(s string) string { + parts := strings.Split(s, ";") + out := []string{} + for _, p := range parts { + p = strings.Join(strings.Fields(p), " ") + p = strings.Replace(p, ": ", ":", 1) + if strings.Contains(p, "#") { + c := p[len(p)-6:] + if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] { + p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5] + } + } + out = append(out, p) + } + return strings.Join(out, ";") +} + +func splitTokensIntoLines(tokens []*chroma.Token) (out [][]*chroma.Token) { + line := []*chroma.Token{} + for _, token := range tokens { + for strings.Contains(token.Value, "\n") { + parts := strings.SplitAfterN(token.Value, "\n", 2) + // Token becomes the tail. + token.Value = parts[1] + + // Append the head to the line and flush the line. + clone := token.Clone() + clone.Value = parts[0] + line = append(line, clone) + out = append(out, line) + line = nil + } + line = append(line, token) + } + if len(line) > 0 { + out = append(out, line) + } + return +} diff --git a/vendor/github.com/alecthomas/chroma/formatters/json.go b/vendor/github.com/alecthomas/chroma/formatters/json.go new file mode 100644 index 00000000..4ba1dd46 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/json.go @@ -0,0 +1,31 @@ +package formatters + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/alecthomas/chroma" +) + +// JSON formatter outputs the raw token structures as JSON. +var JSON = Register("json", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error { + fmt.Fprintln(w, "[") + i := 0 + for t := it(); t != nil; t = it() { + if i > 0 { + fmt.Fprintln(w, ",") + } + i++ + bytes, err := json.Marshal(t) + if err != nil { + return err + } + if _, err := fmt.Fprint(w, " "+string(bytes)); err != nil { + return err + } + } + fmt.Fprintln(w) + fmt.Fprintln(w, "]") + return nil +})) diff --git a/vendor/github.com/alecthomas/chroma/formatters/tokens.go b/vendor/github.com/alecthomas/chroma/formatters/tokens.go new file mode 100644 index 00000000..614987e2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/tokens.go @@ -0,0 +1,18 @@ +package formatters + +import ( + "fmt" + "io" + + "github.com/alecthomas/chroma" +) + +// Tokens formatter outputs the raw token structures. +var Tokens = Register("tokens", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error { + for t := it(); t != nil; t = it() { + if _, err := fmt.Fprintln(w, t.GoString()); err != nil { + return err + } + } + return nil +})) diff --git a/vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go b/vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go new file mode 100644 index 00000000..a068cc59 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/tty_indexed.go @@ -0,0 +1,250 @@ +package formatters + +import ( + "fmt" + "io" + "math" + + "github.com/alecthomas/chroma" +) + +type ttyTable struct { + foreground map[chroma.Colour]string + background map[chroma.Colour]string +} + +var c = chroma.MustParseColour + +var ttyTables = map[int]*ttyTable{ + 8: { + foreground: map[chroma.Colour]string{ + c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m", + c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m", + c("#555555"): "\033[90m", c("#ff0000"): "\033[91m", c("#00ff00"): "\033[92m", c("#ffff00"): "\033[93m", + c("#0000ff"): "\033[94m", c("#ff00ff"): "\033[95m", c("#00ffff"): "\033[96m", c("#ffffff"): "\033[97m", + }, + background: map[chroma.Colour]string{ + c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m", + c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m", + c("#555555"): "\033[100m", c("#ff0000"): "\033[101m", c("#00ff00"): "\033[102m", c("#ffff00"): "\033[103m", + c("#0000ff"): "\033[104m", c("#ff00ff"): "\033[105m", c("#00ffff"): "\033[106m", c("#ffffff"): "\033[107m", + }, + }, + 256: { + foreground: map[chroma.Colour]string{ + c("#000000"): "\033[38;5;0m", c("#800000"): "\033[38;5;1m", c("#008000"): "\033[38;5;2m", c("#808000"): "\033[38;5;3m", + c("#000080"): "\033[38;5;4m", c("#800080"): "\033[38;5;5m", c("#008080"): "\033[38;5;6m", c("#c0c0c0"): "\033[38;5;7m", + c("#808080"): "\033[38;5;8m", c("#ff0000"): "\033[38;5;9m", c("#00ff00"): "\033[38;5;10m", c("#ffff00"): "\033[38;5;11m", + c("#0000ff"): "\033[38;5;12m", c("#ff00ff"): "\033[38;5;13m", c("#00ffff"): "\033[38;5;14m", c("#ffffff"): "\033[38;5;15m", + c("#000000"): "\033[38;5;16m", c("#00005f"): "\033[38;5;17m", c("#000087"): "\033[38;5;18m", c("#0000af"): "\033[38;5;19m", + c("#0000d7"): "\033[38;5;20m", c("#0000ff"): "\033[38;5;21m", c("#005f00"): "\033[38;5;22m", c("#005f5f"): "\033[38;5;23m", + c("#005f87"): "\033[38;5;24m", c("#005faf"): "\033[38;5;25m", c("#005fd7"): "\033[38;5;26m", c("#005fff"): "\033[38;5;27m", + c("#008700"): "\033[38;5;28m", c("#00875f"): "\033[38;5;29m", c("#008787"): "\033[38;5;30m", c("#0087af"): "\033[38;5;31m", + c("#0087d7"): "\033[38;5;32m", c("#0087ff"): "\033[38;5;33m", c("#00af00"): "\033[38;5;34m", c("#00af5f"): "\033[38;5;35m", + c("#00af87"): "\033[38;5;36m", c("#00afaf"): "\033[38;5;37m", c("#00afd7"): "\033[38;5;38m", c("#00afff"): "\033[38;5;39m", + c("#00d700"): "\033[38;5;40m", c("#00d75f"): "\033[38;5;41m", c("#00d787"): "\033[38;5;42m", c("#00d7af"): "\033[38;5;43m", + c("#00d7d7"): "\033[38;5;44m", c("#00d7ff"): "\033[38;5;45m", c("#00ff00"): "\033[38;5;46m", c("#00ff5f"): "\033[38;5;47m", + c("#00ff87"): "\033[38;5;48m", c("#00ffaf"): "\033[38;5;49m", c("#00ffd7"): "\033[38;5;50m", c("#00ffff"): "\033[38;5;51m", + c("#5f0000"): "\033[38;5;52m", c("#5f005f"): "\033[38;5;53m", c("#5f0087"): "\033[38;5;54m", c("#5f00af"): "\033[38;5;55m", + c("#5f00d7"): "\033[38;5;56m", c("#5f00ff"): "\033[38;5;57m", c("#5f5f00"): "\033[38;5;58m", c("#5f5f5f"): "\033[38;5;59m", + c("#5f5f87"): "\033[38;5;60m", c("#5f5faf"): "\033[38;5;61m", c("#5f5fd7"): "\033[38;5;62m", c("#5f5fff"): "\033[38;5;63m", + c("#5f8700"): "\033[38;5;64m", c("#5f875f"): "\033[38;5;65m", c("#5f8787"): "\033[38;5;66m", c("#5f87af"): "\033[38;5;67m", + c("#5f87d7"): "\033[38;5;68m", c("#5f87ff"): "\033[38;5;69m", c("#5faf00"): "\033[38;5;70m", c("#5faf5f"): "\033[38;5;71m", + c("#5faf87"): "\033[38;5;72m", c("#5fafaf"): "\033[38;5;73m", c("#5fafd7"): "\033[38;5;74m", c("#5fafff"): "\033[38;5;75m", + c("#5fd700"): "\033[38;5;76m", c("#5fd75f"): "\033[38;5;77m", c("#5fd787"): "\033[38;5;78m", c("#5fd7af"): "\033[38;5;79m", + c("#5fd7d7"): "\033[38;5;80m", c("#5fd7ff"): "\033[38;5;81m", c("#5fff00"): "\033[38;5;82m", c("#5fff5f"): "\033[38;5;83m", + c("#5fff87"): "\033[38;5;84m", c("#5fffaf"): "\033[38;5;85m", c("#5fffd7"): "\033[38;5;86m", c("#5fffff"): "\033[38;5;87m", + c("#870000"): "\033[38;5;88m", c("#87005f"): "\033[38;5;89m", c("#870087"): "\033[38;5;90m", c("#8700af"): "\033[38;5;91m", + c("#8700d7"): "\033[38;5;92m", c("#8700ff"): "\033[38;5;93m", c("#875f00"): "\033[38;5;94m", c("#875f5f"): "\033[38;5;95m", + c("#875f87"): "\033[38;5;96m", c("#875faf"): "\033[38;5;97m", c("#875fd7"): "\033[38;5;98m", c("#875fff"): "\033[38;5;99m", + c("#878700"): "\033[38;5;100m", c("#87875f"): "\033[38;5;101m", c("#878787"): "\033[38;5;102m", c("#8787af"): "\033[38;5;103m", + c("#8787d7"): "\033[38;5;104m", c("#8787ff"): "\033[38;5;105m", c("#87af00"): "\033[38;5;106m", c("#87af5f"): "\033[38;5;107m", + c("#87af87"): "\033[38;5;108m", c("#87afaf"): "\033[38;5;109m", c("#87afd7"): "\033[38;5;110m", c("#87afff"): "\033[38;5;111m", + c("#87d700"): "\033[38;5;112m", c("#87d75f"): "\033[38;5;113m", c("#87d787"): "\033[38;5;114m", c("#87d7af"): "\033[38;5;115m", + c("#87d7d7"): "\033[38;5;116m", c("#87d7ff"): "\033[38;5;117m", c("#87ff00"): "\033[38;5;118m", c("#87ff5f"): "\033[38;5;119m", + c("#87ff87"): "\033[38;5;120m", c("#87ffaf"): "\033[38;5;121m", c("#87ffd7"): "\033[38;5;122m", c("#87ffff"): "\033[38;5;123m", + c("#af0000"): "\033[38;5;124m", c("#af005f"): "\033[38;5;125m", c("#af0087"): "\033[38;5;126m", c("#af00af"): "\033[38;5;127m", + c("#af00d7"): "\033[38;5;128m", c("#af00ff"): "\033[38;5;129m", c("#af5f00"): "\033[38;5;130m", c("#af5f5f"): "\033[38;5;131m", + c("#af5f87"): "\033[38;5;132m", c("#af5faf"): "\033[38;5;133m", c("#af5fd7"): "\033[38;5;134m", c("#af5fff"): "\033[38;5;135m", + c("#af8700"): "\033[38;5;136m", c("#af875f"): "\033[38;5;137m", c("#af8787"): "\033[38;5;138m", c("#af87af"): "\033[38;5;139m", + c("#af87d7"): "\033[38;5;140m", c("#af87ff"): "\033[38;5;141m", c("#afaf00"): "\033[38;5;142m", c("#afaf5f"): "\033[38;5;143m", + c("#afaf87"): "\033[38;5;144m", c("#afafaf"): "\033[38;5;145m", c("#afafd7"): "\033[38;5;146m", c("#afafff"): "\033[38;5;147m", + c("#afd700"): "\033[38;5;148m", c("#afd75f"): "\033[38;5;149m", c("#afd787"): "\033[38;5;150m", c("#afd7af"): "\033[38;5;151m", + c("#afd7d7"): "\033[38;5;152m", c("#afd7ff"): "\033[38;5;153m", c("#afff00"): "\033[38;5;154m", c("#afff5f"): "\033[38;5;155m", + c("#afff87"): "\033[38;5;156m", c("#afffaf"): "\033[38;5;157m", c("#afffd7"): "\033[38;5;158m", c("#afffff"): "\033[38;5;159m", + c("#d70000"): "\033[38;5;160m", c("#d7005f"): "\033[38;5;161m", c("#d70087"): "\033[38;5;162m", c("#d700af"): "\033[38;5;163m", + c("#d700d7"): "\033[38;5;164m", c("#d700ff"): "\033[38;5;165m", c("#d75f00"): "\033[38;5;166m", c("#d75f5f"): "\033[38;5;167m", + c("#d75f87"): "\033[38;5;168m", c("#d75faf"): "\033[38;5;169m", c("#d75fd7"): "\033[38;5;170m", c("#d75fff"): "\033[38;5;171m", + c("#d78700"): "\033[38;5;172m", c("#d7875f"): "\033[38;5;173m", c("#d78787"): "\033[38;5;174m", c("#d787af"): "\033[38;5;175m", + c("#d787d7"): "\033[38;5;176m", c("#d787ff"): "\033[38;5;177m", c("#d7af00"): "\033[38;5;178m", c("#d7af5f"): "\033[38;5;179m", + c("#d7af87"): "\033[38;5;180m", c("#d7afaf"): "\033[38;5;181m", c("#d7afd7"): "\033[38;5;182m", c("#d7afff"): "\033[38;5;183m", + c("#d7d700"): "\033[38;5;184m", c("#d7d75f"): "\033[38;5;185m", c("#d7d787"): "\033[38;5;186m", c("#d7d7af"): "\033[38;5;187m", + c("#d7d7d7"): "\033[38;5;188m", c("#d7d7ff"): "\033[38;5;189m", c("#d7ff00"): "\033[38;5;190m", c("#d7ff5f"): "\033[38;5;191m", + c("#d7ff87"): "\033[38;5;192m", c("#d7ffaf"): "\033[38;5;193m", c("#d7ffd7"): "\033[38;5;194m", c("#d7ffff"): "\033[38;5;195m", + c("#ff0000"): "\033[38;5;196m", c("#ff005f"): "\033[38;5;197m", c("#ff0087"): "\033[38;5;198m", c("#ff00af"): "\033[38;5;199m", + c("#ff00d7"): "\033[38;5;200m", c("#ff00ff"): "\033[38;5;201m", c("#ff5f00"): "\033[38;5;202m", c("#ff5f5f"): "\033[38;5;203m", + c("#ff5f87"): "\033[38;5;204m", c("#ff5faf"): "\033[38;5;205m", c("#ff5fd7"): "\033[38;5;206m", c("#ff5fff"): "\033[38;5;207m", + c("#ff8700"): "\033[38;5;208m", c("#ff875f"): "\033[38;5;209m", c("#ff8787"): "\033[38;5;210m", c("#ff87af"): "\033[38;5;211m", + c("#ff87d7"): "\033[38;5;212m", c("#ff87ff"): "\033[38;5;213m", c("#ffaf00"): "\033[38;5;214m", c("#ffaf5f"): "\033[38;5;215m", + c("#ffaf87"): "\033[38;5;216m", c("#ffafaf"): "\033[38;5;217m", c("#ffafd7"): "\033[38;5;218m", c("#ffafff"): "\033[38;5;219m", + c("#ffd700"): "\033[38;5;220m", c("#ffd75f"): "\033[38;5;221m", c("#ffd787"): "\033[38;5;222m", c("#ffd7af"): "\033[38;5;223m", + c("#ffd7d7"): "\033[38;5;224m", c("#ffd7ff"): "\033[38;5;225m", c("#ffff00"): "\033[38;5;226m", c("#ffff5f"): "\033[38;5;227m", + c("#ffff87"): "\033[38;5;228m", c("#ffffaf"): "\033[38;5;229m", c("#ffffd7"): "\033[38;5;230m", c("#ffffff"): "\033[38;5;231m", + c("#080808"): "\033[38;5;232m", c("#121212"): "\033[38;5;233m", c("#1c1c1c"): "\033[38;5;234m", c("#262626"): "\033[38;5;235m", + c("#303030"): "\033[38;5;236m", c("#3a3a3a"): "\033[38;5;237m", c("#444444"): "\033[38;5;238m", c("#4e4e4e"): "\033[38;5;239m", + c("#585858"): "\033[38;5;240m", c("#626262"): "\033[38;5;241m", c("#6c6c6c"): "\033[38;5;242m", c("#767676"): "\033[38;5;243m", + c("#808080"): "\033[38;5;244m", c("#8a8a8a"): "\033[38;5;245m", c("#949494"): "\033[38;5;246m", c("#9e9e9e"): "\033[38;5;247m", + c("#a8a8a8"): "\033[38;5;248m", c("#b2b2b2"): "\033[38;5;249m", c("#bcbcbc"): "\033[38;5;250m", c("#c6c6c6"): "\033[38;5;251m", + c("#d0d0d0"): "\033[38;5;252m", c("#dadada"): "\033[38;5;253m", c("#e4e4e4"): "\033[38;5;254m", c("#eeeeee"): "\033[38;5;255m", + }, + background: map[chroma.Colour]string{ + c("#000000"): "\033[48;5;0m", c("#800000"): "\033[48;5;1m", c("#008000"): "\033[48;5;2m", c("#808000"): "\033[48;5;3m", + c("#000080"): "\033[48;5;4m", c("#800080"): "\033[48;5;5m", c("#008080"): "\033[48;5;6m", c("#c0c0c0"): "\033[48;5;7m", + c("#808080"): "\033[48;5;8m", c("#ff0000"): "\033[48;5;9m", c("#00ff00"): "\033[48;5;10m", c("#ffff00"): "\033[48;5;11m", + c("#0000ff"): "\033[48;5;12m", c("#ff00ff"): "\033[48;5;13m", c("#00ffff"): "\033[48;5;14m", c("#ffffff"): "\033[48;5;15m", + c("#000000"): "\033[48;5;16m", c("#00005f"): "\033[48;5;17m", c("#000087"): "\033[48;5;18m", c("#0000af"): "\033[48;5;19m", + c("#0000d7"): "\033[48;5;20m", c("#0000ff"): "\033[48;5;21m", c("#005f00"): "\033[48;5;22m", c("#005f5f"): "\033[48;5;23m", + c("#005f87"): "\033[48;5;24m", c("#005faf"): "\033[48;5;25m", c("#005fd7"): "\033[48;5;26m", c("#005fff"): "\033[48;5;27m", + c("#008700"): "\033[48;5;28m", c("#00875f"): "\033[48;5;29m", c("#008787"): "\033[48;5;30m", c("#0087af"): "\033[48;5;31m", + c("#0087d7"): "\033[48;5;32m", c("#0087ff"): "\033[48;5;33m", c("#00af00"): "\033[48;5;34m", c("#00af5f"): "\033[48;5;35m", + c("#00af87"): "\033[48;5;36m", c("#00afaf"): "\033[48;5;37m", c("#00afd7"): "\033[48;5;38m", c("#00afff"): "\033[48;5;39m", + c("#00d700"): "\033[48;5;40m", c("#00d75f"): "\033[48;5;41m", c("#00d787"): "\033[48;5;42m", c("#00d7af"): "\033[48;5;43m", + c("#00d7d7"): "\033[48;5;44m", c("#00d7ff"): "\033[48;5;45m", c("#00ff00"): "\033[48;5;46m", c("#00ff5f"): "\033[48;5;47m", + c("#00ff87"): "\033[48;5;48m", c("#00ffaf"): "\033[48;5;49m", c("#00ffd7"): "\033[48;5;50m", c("#00ffff"): "\033[48;5;51m", + c("#5f0000"): "\033[48;5;52m", c("#5f005f"): "\033[48;5;53m", c("#5f0087"): "\033[48;5;54m", c("#5f00af"): "\033[48;5;55m", + c("#5f00d7"): "\033[48;5;56m", c("#5f00ff"): "\033[48;5;57m", c("#5f5f00"): "\033[48;5;58m", c("#5f5f5f"): "\033[48;5;59m", + c("#5f5f87"): "\033[48;5;60m", c("#5f5faf"): "\033[48;5;61m", c("#5f5fd7"): "\033[48;5;62m", c("#5f5fff"): "\033[48;5;63m", + c("#5f8700"): "\033[48;5;64m", c("#5f875f"): "\033[48;5;65m", c("#5f8787"): "\033[48;5;66m", c("#5f87af"): "\033[48;5;67m", + c("#5f87d7"): "\033[48;5;68m", c("#5f87ff"): "\033[48;5;69m", c("#5faf00"): "\033[48;5;70m", c("#5faf5f"): "\033[48;5;71m", + c("#5faf87"): "\033[48;5;72m", c("#5fafaf"): "\033[48;5;73m", c("#5fafd7"): "\033[48;5;74m", c("#5fafff"): "\033[48;5;75m", + c("#5fd700"): "\033[48;5;76m", c("#5fd75f"): "\033[48;5;77m", c("#5fd787"): "\033[48;5;78m", c("#5fd7af"): "\033[48;5;79m", + c("#5fd7d7"): "\033[48;5;80m", c("#5fd7ff"): "\033[48;5;81m", c("#5fff00"): "\033[48;5;82m", c("#5fff5f"): "\033[48;5;83m", + c("#5fff87"): "\033[48;5;84m", c("#5fffaf"): "\033[48;5;85m", c("#5fffd7"): "\033[48;5;86m", c("#5fffff"): "\033[48;5;87m", + c("#870000"): "\033[48;5;88m", c("#87005f"): "\033[48;5;89m", c("#870087"): "\033[48;5;90m", c("#8700af"): "\033[48;5;91m", + c("#8700d7"): "\033[48;5;92m", c("#8700ff"): "\033[48;5;93m", c("#875f00"): "\033[48;5;94m", c("#875f5f"): "\033[48;5;95m", + c("#875f87"): "\033[48;5;96m", c("#875faf"): "\033[48;5;97m", c("#875fd7"): "\033[48;5;98m", c("#875fff"): "\033[48;5;99m", + c("#878700"): "\033[48;5;100m", c("#87875f"): "\033[48;5;101m", c("#878787"): "\033[48;5;102m", c("#8787af"): "\033[48;5;103m", + c("#8787d7"): "\033[48;5;104m", c("#8787ff"): "\033[48;5;105m", c("#87af00"): "\033[48;5;106m", c("#87af5f"): "\033[48;5;107m", + c("#87af87"): "\033[48;5;108m", c("#87afaf"): "\033[48;5;109m", c("#87afd7"): "\033[48;5;110m", c("#87afff"): "\033[48;5;111m", + c("#87d700"): "\033[48;5;112m", c("#87d75f"): "\033[48;5;113m", c("#87d787"): "\033[48;5;114m", c("#87d7af"): "\033[48;5;115m", + c("#87d7d7"): "\033[48;5;116m", c("#87d7ff"): "\033[48;5;117m", c("#87ff00"): "\033[48;5;118m", c("#87ff5f"): "\033[48;5;119m", + c("#87ff87"): "\033[48;5;120m", c("#87ffaf"): "\033[48;5;121m", c("#87ffd7"): "\033[48;5;122m", c("#87ffff"): "\033[48;5;123m", + c("#af0000"): "\033[48;5;124m", c("#af005f"): "\033[48;5;125m", c("#af0087"): "\033[48;5;126m", c("#af00af"): "\033[48;5;127m", + c("#af00d7"): "\033[48;5;128m", c("#af00ff"): "\033[48;5;129m", c("#af5f00"): "\033[48;5;130m", c("#af5f5f"): "\033[48;5;131m", + c("#af5f87"): "\033[48;5;132m", c("#af5faf"): "\033[48;5;133m", c("#af5fd7"): "\033[48;5;134m", c("#af5fff"): "\033[48;5;135m", + c("#af8700"): "\033[48;5;136m", c("#af875f"): "\033[48;5;137m", c("#af8787"): "\033[48;5;138m", c("#af87af"): "\033[48;5;139m", + c("#af87d7"): "\033[48;5;140m", c("#af87ff"): "\033[48;5;141m", c("#afaf00"): "\033[48;5;142m", c("#afaf5f"): "\033[48;5;143m", + c("#afaf87"): "\033[48;5;144m", c("#afafaf"): "\033[48;5;145m", c("#afafd7"): "\033[48;5;146m", c("#afafff"): "\033[48;5;147m", + c("#afd700"): "\033[48;5;148m", c("#afd75f"): "\033[48;5;149m", c("#afd787"): "\033[48;5;150m", c("#afd7af"): "\033[48;5;151m", + c("#afd7d7"): "\033[48;5;152m", c("#afd7ff"): "\033[48;5;153m", c("#afff00"): "\033[48;5;154m", c("#afff5f"): "\033[48;5;155m", + c("#afff87"): "\033[48;5;156m", c("#afffaf"): "\033[48;5;157m", c("#afffd7"): "\033[48;5;158m", c("#afffff"): "\033[48;5;159m", + c("#d70000"): "\033[48;5;160m", c("#d7005f"): "\033[48;5;161m", c("#d70087"): "\033[48;5;162m", c("#d700af"): "\033[48;5;163m", + c("#d700d7"): "\033[48;5;164m", c("#d700ff"): "\033[48;5;165m", c("#d75f00"): "\033[48;5;166m", c("#d75f5f"): "\033[48;5;167m", + c("#d75f87"): "\033[48;5;168m", c("#d75faf"): "\033[48;5;169m", c("#d75fd7"): "\033[48;5;170m", c("#d75fff"): "\033[48;5;171m", + c("#d78700"): "\033[48;5;172m", c("#d7875f"): "\033[48;5;173m", c("#d78787"): "\033[48;5;174m", c("#d787af"): "\033[48;5;175m", + c("#d787d7"): "\033[48;5;176m", c("#d787ff"): "\033[48;5;177m", c("#d7af00"): "\033[48;5;178m", c("#d7af5f"): "\033[48;5;179m", + c("#d7af87"): "\033[48;5;180m", c("#d7afaf"): "\033[48;5;181m", c("#d7afd7"): "\033[48;5;182m", c("#d7afff"): "\033[48;5;183m", + c("#d7d700"): "\033[48;5;184m", c("#d7d75f"): "\033[48;5;185m", c("#d7d787"): "\033[48;5;186m", c("#d7d7af"): "\033[48;5;187m", + c("#d7d7d7"): "\033[48;5;188m", c("#d7d7ff"): "\033[48;5;189m", c("#d7ff00"): "\033[48;5;190m", c("#d7ff5f"): "\033[48;5;191m", + c("#d7ff87"): "\033[48;5;192m", c("#d7ffaf"): "\033[48;5;193m", c("#d7ffd7"): "\033[48;5;194m", c("#d7ffff"): "\033[48;5;195m", + c("#ff0000"): "\033[48;5;196m", c("#ff005f"): "\033[48;5;197m", c("#ff0087"): "\033[48;5;198m", c("#ff00af"): "\033[48;5;199m", + c("#ff00d7"): "\033[48;5;200m", c("#ff00ff"): "\033[48;5;201m", c("#ff5f00"): "\033[48;5;202m", c("#ff5f5f"): "\033[48;5;203m", + c("#ff5f87"): "\033[48;5;204m", c("#ff5faf"): "\033[48;5;205m", c("#ff5fd7"): "\033[48;5;206m", c("#ff5fff"): "\033[48;5;207m", + c("#ff8700"): "\033[48;5;208m", c("#ff875f"): "\033[48;5;209m", c("#ff8787"): "\033[48;5;210m", c("#ff87af"): "\033[48;5;211m", + c("#ff87d7"): "\033[48;5;212m", c("#ff87ff"): "\033[48;5;213m", c("#ffaf00"): "\033[48;5;214m", c("#ffaf5f"): "\033[48;5;215m", + c("#ffaf87"): "\033[48;5;216m", c("#ffafaf"): "\033[48;5;217m", c("#ffafd7"): "\033[48;5;218m", c("#ffafff"): "\033[48;5;219m", + c("#ffd700"): "\033[48;5;220m", c("#ffd75f"): "\033[48;5;221m", c("#ffd787"): "\033[48;5;222m", c("#ffd7af"): "\033[48;5;223m", + c("#ffd7d7"): "\033[48;5;224m", c("#ffd7ff"): "\033[48;5;225m", c("#ffff00"): "\033[48;5;226m", c("#ffff5f"): "\033[48;5;227m", + c("#ffff87"): "\033[48;5;228m", c("#ffffaf"): "\033[48;5;229m", c("#ffffd7"): "\033[48;5;230m", c("#ffffff"): "\033[48;5;231m", + c("#080808"): "\033[48;5;232m", c("#121212"): "\033[48;5;233m", c("#1c1c1c"): "\033[48;5;234m", c("#262626"): "\033[48;5;235m", + c("#303030"): "\033[48;5;236m", c("#3a3a3a"): "\033[48;5;237m", c("#444444"): "\033[48;5;238m", c("#4e4e4e"): "\033[48;5;239m", + c("#585858"): "\033[48;5;240m", c("#626262"): "\033[48;5;241m", c("#6c6c6c"): "\033[48;5;242m", c("#767676"): "\033[48;5;243m", + c("#808080"): "\033[48;5;244m", c("#8a8a8a"): "\033[48;5;245m", c("#949494"): "\033[48;5;246m", c("#9e9e9e"): "\033[48;5;247m", + c("#a8a8a8"): "\033[48;5;248m", c("#b2b2b2"): "\033[48;5;249m", c("#bcbcbc"): "\033[48;5;250m", c("#c6c6c6"): "\033[48;5;251m", + c("#d0d0d0"): "\033[48;5;252m", c("#dadada"): "\033[48;5;253m", c("#e4e4e4"): "\033[48;5;254m", c("#eeeeee"): "\033[48;5;255m", + }, + }, +} + +func entryToEscapeSequence(table *ttyTable, entry chroma.StyleEntry) string { + out := "" + if entry.Bold == chroma.Yes { + out += "\033[1m" + } + if entry.Underline == chroma.Yes { + out += "\033[4m" + } + if entry.Colour.IsSet() { + out += table.foreground[findClosest(table, entry.Colour)] + } + if entry.Background.IsSet() { + out += table.background[findClosest(table, entry.Background)] + } + return out +} + +func findClosest(table *ttyTable, seeking chroma.Colour) chroma.Colour { + closestColour := chroma.Colour(0) + closest := float64(math.MaxFloat64) + for colour := range table.foreground { + distance := colour.Distance(seeking) + if distance < closest { + closest = distance + closestColour = colour + } + } + return closestColour +} + +func styleToEscapeSequence(table *ttyTable, style *chroma.Style) map[chroma.TokenType]string { + out := map[chroma.TokenType]string{} + for _, ttype := range style.Types() { + entry := style.Get(ttype) + out[ttype] = entryToEscapeSequence(table, entry) + } + return out +} + +type indexedTTYFormatter struct { + table *ttyTable +} + +func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) { + defer func() { + if perr := recover(); perr != nil { + err = perr.(error) + } + }() + theme := styleToEscapeSequence(c.table, style) + for token := it(); token != nil; token = it() { + // TODO: Cache token lookups? + clr, ok := theme[token.Type] + if !ok { + clr, ok = theme[token.Type.SubCategory()] + if !ok { + clr = theme[token.Type.Category()] + // if !ok { + // clr = theme[chroma.InheritStyle] + // } + } + } + if clr != "" { + fmt.Fprint(w, clr) + } + fmt.Fprint(w, token.Value) + if clr != "" { + fmt.Fprintf(w, "\033[0m") + } + } + return nil +} + +// TTY8 is an 8-colour terminal formatter. +// +// The Lab colour space is used to map RGB values to the most appropriate index colour. +var TTY8 = Register("terminal", &indexedTTYFormatter{ttyTables[8]}) + +// TTY256 is a 256-colour terminal formatter. +// +// The Lab colour space is used to map RGB values to the most appropriate index colour. +var TTY256 = Register("terminal256", &indexedTTYFormatter{ttyTables[256]}) diff --git a/vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go b/vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go new file mode 100644 index 00000000..e9c264a7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/formatters/tty_truecolour.go @@ -0,0 +1,38 @@ +package formatters + +import ( + "fmt" + "io" + + "github.com/alecthomas/chroma" +) + +// TTY16m is a true-colour terminal formatter. +var TTY16m = Register("terminal16m", chroma.FormatterFunc(trueColourFormatter)) + +func trueColourFormatter(w io.Writer, style *chroma.Style, it chroma.Iterator) error { + for token := it(); token != nil; token = it() { + entry := style.Get(token.Type) + if !entry.IsZero() { + out := "" + if entry.Bold == chroma.Yes { + out += "\033[1m" + } + if entry.Underline == chroma.Yes { + out += "\033[4m" + } + if entry.Colour.IsSet() { + out += fmt.Sprintf("\033[38;2;%d;%d;%dm", entry.Colour.Red(), entry.Colour.Green(), entry.Colour.Blue()) + } + if entry.Background.IsSet() { + out += fmt.Sprintf("\033[48;2;%d;%d;%dm", entry.Background.Red(), entry.Background.Green(), entry.Background.Blue()) + } + fmt.Fprint(w, out) + } + fmt.Fprint(w, token.Value) + if !entry.IsZero() { + fmt.Fprint(w, "\033[0m") + } + } + return nil +} diff --git a/vendor/github.com/alecthomas/chroma/iterator.go b/vendor/github.com/alecthomas/chroma/iterator.go new file mode 100644 index 00000000..43861c8a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/iterator.go @@ -0,0 +1,43 @@ +package chroma + +// An Iterator across tokens. +// +// nil will be returned at the end of the Token stream. +// +// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover. +type Iterator func() *Token + +// Tokens consumes all tokens from the iterator and returns them as a slice. +func (i Iterator) Tokens() []*Token { + out := []*Token{} + for t := i(); t != nil; t = i() { + out = append(out, t) + } + return out +} + +// Concaterator concatenates tokens from a series of iterators. +func Concaterator(iterators ...Iterator) Iterator { + return func() *Token { + for len(iterators) > 0 { + t := iterators[0]() + if t != nil { + return t + } + iterators = iterators[1:] + } + return nil + } +} + +// Literator converts a sequence of literal Tokens into an Iterator. +func Literator(tokens ...*Token) Iterator { + return func() (out *Token) { + if len(tokens) == 0 { + return nil + } + token := tokens[0] + tokens = tokens[1:] + return token + } +} diff --git a/vendor/github.com/alecthomas/chroma/lexer.go b/vendor/github.com/alecthomas/chroma/lexer.go new file mode 100644 index 00000000..6a13f082 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexer.go @@ -0,0 +1,117 @@ +package chroma + +import ( + "fmt" +) + +var ( + defaultOptions = &TokeniseOptions{ + State: "root", + } +) + +// Config for a lexer. +type Config struct { + // Name of the lexer. + Name string + + // Shortcuts for the lexer + Aliases []string + + // File name globs + Filenames []string + + // Secondary file name globs + AliasFilenames []string + + // MIME types + MimeTypes []string + + // Regex matching is case-insensitive. + CaseInsensitive bool + + // Regex matches all characters. + DotAll bool + + // Regex does not match across lines ($ matches EOL). + // + // Defaults to multiline. + NotMultiline bool + + // Don't strip leading and trailing newlines from the input. + // DontStripNL bool + + // Strip all leading and trailing whitespace from the input + // StripAll bool + + // Make sure that the input ends with a newline. This + // is required for some lexers that consume input linewise. + EnsureNL bool + + // If given and greater than 0, expand tabs in the input. + // TabSize int + + // Priority of lexer. + // + // If this is 0 it will be treated as a default of 1. + Priority float32 +} + +// Token output to formatter. +type Token struct { + Type TokenType `json:"type"` + Value string `json:"value"` +} + +func (t *Token) String() string { return t.Value } +func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) } + +func (t *Token) Clone() *Token { + clone := &Token{} + *clone = *t + return clone +} + +type TokeniseOptions struct { + // State to start tokenisation in. Defaults to "root". + State string + // Nested tokenisation. + Nested bool +} + +// A Lexer for tokenising source code. +type Lexer interface { + // Config describing the features of the Lexer. + Config() *Config + // Tokenise returns an Iterator over tokens in text. + Tokenise(options *TokeniseOptions, text string) (Iterator, error) +} + +// Lexers is a slice of lexers sortable by name. +type Lexers []Lexer + +func (l Lexers) Len() int { return len(l) } +func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l Lexers) Less(i, j int) bool { return l[i].Config().Name < l[j].Config().Name } + +// PrioritisedLexers is a slice of lexers sortable by priority. +type PrioritisedLexers []Lexer + +func (l PrioritisedLexers) Len() int { return len(l) } +func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l PrioritisedLexers) Less(i, j int) bool { + ip := l[i].Config().Priority + if ip == 0 { + ip = 1 + } + jp := l[j].Config().Priority + if jp == 0 { + jp = 1 + } + return ip > jp +} + +// Analyser determines how appropriate this lexer is for the given text. +type Analyser interface { + AnalyseText(text string) float32 +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go b/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go new file mode 100644 index 00000000..ff29aed2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/abnf.go @@ -0,0 +1,38 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Abnf lexer. +var Abnf = internal.Register(MustNewLexer( + &Config{ + Name: "ABNF", + Aliases: []string{"abnf"}, + Filenames: []string{"*.abnf"}, + MimeTypes: []string{"text/x-abnf"}, + }, + Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`(%[si])?"[^"]*"`, Literal, nil}, + {`%b[01]+\-[01]+\b`, Literal, nil}, + {`%b[01]+(\.[01]+)*\b`, Literal, nil}, + {`%d[0-9]+\-[0-9]+\b`, Literal, nil}, + {`%d[0-9]+(\.[0-9]+)*\b`, Literal, nil}, + {`%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b`, Literal, nil}, + {`%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b`, Literal, nil}, + {`\b[0-9]+\*[0-9]+`, Operator, nil}, + {`\b[0-9]+\*`, Operator, nil}, + {`\b[0-9]+`, Operator, nil}, + {`\*`, Operator, nil}, + {Words(``, `\b`, `ALPHA`, `BIT`, `CHAR`, `CR`, `CRLF`, `CTL`, `DIGIT`, `DQUOTE`, `HEXDIG`, `HTAB`, `LF`, `LWSP`, `OCTET`, `SP`, `VCHAR`, `WSP`), Keyword, nil}, + {`[a-zA-Z][a-zA-Z0-9-]+\b`, NameClass, nil}, + {`(=/|=|/)`, Operator, nil}, + {`[\[\]()]`, Punctuation, nil}, + {`\s+`, Text, nil}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go new file mode 100644 index 00000000..43d38521 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript.go @@ -0,0 +1,39 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Actionscript lexer. +var Actionscript = internal.Register(MustNewLexer( + &Config{ + Name: "ActionScript", + Aliases: []string{"as", "actionscript"}, + Filenames: []string{"*.as"}, + MimeTypes: []string{"application/x-actionscript", "text/x-actionscript", "text/actionscript"}, + NotMultiline: true, + DotAll: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, nil}, + {`[~^*!%&<>|+=:;,/?\\-]+`, Operator, nil}, + {`[{}\[\]();.]+`, Punctuation, nil}, + {Words(``, `\b`, `case`, `default`, `for`, `each`, `in`, `while`, `do`, `break`, `return`, `continue`, `if`, `else`, `throw`, `try`, `catch`, `var`, `with`, `new`, `typeof`, `arguments`, `instanceof`, `this`, `switch`), Keyword, nil}, + {Words(``, `\b`, `class`, `public`, `final`, `internal`, `native`, `override`, `private`, `protected`, `static`, `import`, `extends`, `implements`, `interface`, `intrinsic`, `return`, `super`, `dynamic`, `function`, `const`, `get`, `namespace`, `package`, `set`), KeywordDeclaration, nil}, + {`(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b`, KeywordConstant, nil}, + {Words(``, `\b`, `Accessibility`, `AccessibilityProperties`, `ActionScriptVersion`, `ActivityEvent`, `AntiAliasType`, `ApplicationDomain`, `AsBroadcaster`, `Array`, `AsyncErrorEvent`, `AVM1Movie`, `BevelFilter`, `Bitmap`, `BitmapData`, `BitmapDataChannel`, `BitmapFilter`, `BitmapFilterQuality`, `BitmapFilterType`, `BlendMode`, `BlurFilter`, `Boolean`, `ByteArray`, `Camera`, `Capabilities`, `CapsStyle`, `Class`, `Color`, `ColorMatrixFilter`, `ColorTransform`, `ContextMenu`, `ContextMenuBuiltInItems`, `ContextMenuEvent`, `ContextMenuItem`, `ConvultionFilter`, `CSMSettings`, `DataEvent`, `Date`, `DefinitionError`, `DeleteObjectSample`, `Dictionary`, `DisplacmentMapFilter`, `DisplayObject`, `DisplacmentMapFilterMode`, `DisplayObjectContainer`, `DropShadowFilter`, `Endian`, `EOFError`, `Error`, `ErrorEvent`, `EvalError`, `Event`, `EventDispatcher`, `EventPhase`, `ExternalInterface`, `FileFilter`, `FileReference`, `FileReferenceList`, `FocusDirection`, `FocusEvent`, `Font`, `FontStyle`, `FontType`, `FrameLabel`, `FullScreenEvent`, `Function`, `GlowFilter`, `GradientBevelFilter`, `GradientGlowFilter`, `GradientType`, `Graphics`, `GridFitType`, `HTTPStatusEvent`, `IBitmapDrawable`, `ID3Info`, `IDataInput`, `IDataOutput`, `IDynamicPropertyOutputIDynamicPropertyWriter`, `IEventDispatcher`, `IExternalizable`, `IllegalOperationError`, `IME`, `IMEConversionMode`, `IMEEvent`, `int`, `InteractiveObject`, `InterpolationMethod`, `InvalidSWFError`, `InvokeEvent`, `IOError`, `IOErrorEvent`, `JointStyle`, `Key`, `Keyboard`, `KeyboardEvent`, `KeyLocation`, `LineScaleMode`, `Loader`, `LoaderContext`, `LoaderInfo`, `LoadVars`, `LocalConnection`, `Locale`, `Math`, `Matrix`, `MemoryError`, `Microphone`, `MorphShape`, `Mouse`, `MouseEvent`, `MovieClip`, `MovieClipLoader`, `Namespace`, `NetConnection`, `NetStatusEvent`, `NetStream`, `NewObjectSample`, `Number`, `Object`, `ObjectEncoding`, `PixelSnapping`, `Point`, `PrintJob`, `PrintJobOptions`, `PrintJobOrientation`, `ProgressEvent`, `Proxy`, `QName`, `RangeError`, `Rectangle`, `ReferenceError`, `RegExp`, `Responder`, `Sample`, `Scene`, `ScriptTimeoutError`, `Security`, `SecurityDomain`, `SecurityError`, `SecurityErrorEvent`, `SecurityPanel`, `Selection`, `Shape`, `SharedObject`, `SharedObjectFlushStatus`, `SimpleButton`, `Socket`, `Sound`, `SoundChannel`, `SoundLoaderContext`, `SoundMixer`, `SoundTransform`, `SpreadMethod`, `Sprite`, `StackFrame`, `StackOverflowError`, `Stage`, `StageAlign`, `StageDisplayState`, `StageQuality`, `StageScaleMode`, `StaticText`, `StatusEvent`, `String`, `StyleSheet`, `SWFVersion`, `SyncEvent`, `SyntaxError`, `System`, `TextColorType`, `TextField`, `TextFieldAutoSize`, `TextFieldType`, `TextFormat`, `TextFormatAlign`, `TextLineMetrics`, `TextRenderer`, `TextSnapshot`, `Timer`, `TimerEvent`, `Transform`, `TypeError`, `uint`, `URIError`, `URLLoader`, `URLLoaderDataFormat`, `URLRequest`, `URLRequestHeader`, `URLRequestMethod`, `URLStream`, `URLVariabeles`, `VerifyError`, `Video`, `XML`, `XMLDocument`, `XMLList`, `XMLNode`, `XMLNodeType`, `XMLSocket`, `XMLUI`), NameBuiltin, nil}, + {Words(``, `\b`, `decodeURI`, `decodeURIComponent`, `encodeURI`, `escape`, `eval`, `isFinite`, `isNaN`, `isXMLName`, `clearInterval`, `fscommand`, `getTimer`, `getURL`, `getVersion`, `parseFloat`, `parseInt`, `setInterval`, `trace`, `updateAfterEvent`, `unescape`), NameFunction, nil}, + {`[$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go new file mode 100644 index 00000000..3404bd55 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/actionscript3.go @@ -0,0 +1,56 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Actionscript 3 lexer. +var Actionscript3 = internal.Register(MustNewLexer( + &Config{ + Name: "ActionScript 3", + Aliases: []string{"as3", "actionscript3"}, + Filenames: []string{"*.as"}, + MimeTypes: []string{"application/x-actionscript3", "text/x-actionscript3", "text/actionscript3"}, + DotAll: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`(function\s+)([$a-zA-Z_]\w*)(\s*)(\()`, ByGroups(KeywordDeclaration, NameFunction, Text, Operator), Push("funcparams")}, + {`(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)`, ByGroups(KeywordDeclaration, Text, Name, Text, Punctuation, Text, KeywordType), nil}, + {`(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)`, ByGroups(Keyword, Text, NameNamespace, Text), nil}, + {`(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()`, ByGroups(Keyword, Text, KeywordType, Text, Operator), nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`/(\\\\|\\/|[^\n])*/[gisx]*`, LiteralStringRegex, nil}, + {`(\.)([$a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, + {`(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b`, Keyword, nil}, + {`(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b`, KeywordDeclaration, nil}, + {`(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b`, KeywordConstant, nil}, + {`(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b`, NameFunction, nil}, + {`[$a-zA-Z_]\w*`, Name, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[~^*!%&<>|+=:;,/?\\{}\[\]().-]+`, Operator, nil}, + }, + "funcparams": { + {`\s+`, Text, nil}, + {`(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)`, ByGroups(Text, Punctuation, Name, Text, Operator, Text, KeywordType, Text), Push("defval")}, + {`\)`, Operator, Push("type")}, + }, + "type": { + {`(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)`, ByGroups(Text, Operator, Text, KeywordType), Pop(2)}, + {`\s+`, Text, Pop(2)}, + Default(Pop(2)), + }, + "defval": { + {`(=)(\s*)([^(),]+)(\s*)(,?)`, ByGroups(Operator, Text, UsingSelf("root"), Text, Operator), Pop(1)}, + {`,`, Operator, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/ada.go b/vendor/github.com/alecthomas/chroma/lexers/a/ada.go new file mode 100644 index 00000000..d9b34e3c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/ada.go @@ -0,0 +1,114 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ada lexer. +var Ada = internal.Register(MustNewLexer( + &Config{ + Name: "Ada", + Aliases: []string{"ada", "ada95", "ada2005"}, + Filenames: []string{"*.adb", "*.ads", "*.ada"}, + MimeTypes: []string{"text/x-ada"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`--.*?\n`, CommentSingle, nil}, + {`[^\S\n]+`, Text, nil}, + {`function|procedure|entry`, KeywordDeclaration, Push("subprogram")}, + {`(subtype|type)(\s+)(\w+)`, ByGroups(KeywordDeclaration, Text, KeywordType), Push("type_def")}, + {`task|protected`, KeywordDeclaration, nil}, + {`(subtype)(\s+)`, ByGroups(KeywordDeclaration, Text), nil}, + {`(end)(\s+)`, ByGroups(KeywordReserved, Text), Push("end")}, + {`(pragma)(\s+)(\w+)`, ByGroups(KeywordReserved, Text, CommentPreproc), nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {Words(``, `\b`, `Address`, `Byte`, `Boolean`, `Character`, `Controlled`, `Count`, `Cursor`, `Duration`, `File_Mode`, `File_Type`, `Float`, `Generator`, `Integer`, `Long_Float`, `Long_Integer`, `Long_Long_Float`, `Long_Long_Integer`, `Natural`, `Positive`, `Reference_Type`, `Short_Float`, `Short_Integer`, `Short_Short_Float`, `Short_Short_Integer`, `String`, `Wide_Character`, `Wide_String`), KeywordType, nil}, + {`(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b`, OperatorWord, nil}, + {`generic|private`, KeywordDeclaration, nil}, + {`package`, KeywordDeclaration, Push("package")}, + {`array\b`, KeywordReserved, Push("array_def")}, + {`(with|use)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(\w+)(\s*)(:)(\s*)(constant)`, ByGroups(NameConstant, Text, Punctuation, Text, KeywordReserved), nil}, + {`<<\w+>>`, NameLabel, nil}, + {`(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)`, ByGroups(NameLabel, Text, Punctuation, Text, KeywordReserved), nil}, + {Words(`\b`, `\b`, `abort`, `abs`, `abstract`, `accept`, `access`, `aliased`, `all`, `array`, `at`, `begin`, `body`, `case`, `constant`, `declare`, `delay`, `delta`, `digits`, `do`, `else`, `elsif`, `end`, `entry`, `exception`, `exit`, `interface`, `for`, `goto`, `if`, `is`, `limited`, `loop`, `new`, `null`, `of`, `or`, `others`, `out`, `overriding`, `pragma`, `protected`, `raise`, `range`, `record`, `renames`, `requeue`, `return`, `reverse`, `select`, `separate`, `subtype`, `synchronized`, `task`, `tagged`, `terminate`, `then`, `type`, `until`, `when`, `while`, `xor`), KeywordReserved, nil}, + {`"[^"]*"`, LiteralString, nil}, + Include("attribute"), + Include("numbers"), + {`'[^']'`, LiteralStringChar, nil}, + {`(\w+)(\s*|[(,])`, ByGroups(Name, UsingSelf("root")), nil}, + {`(<>|=>|:=|[()|:;,.'])`, Punctuation, nil}, + {`[*<>+=/&-]`, Operator, nil}, + {`\n+`, Text, nil}, + }, + "numbers": { + {`[0-9_]+#[0-9a-f]+#`, LiteralNumberHex, nil}, + {`[0-9_]+\.[0-9_]*`, LiteralNumberFloat, nil}, + {`[0-9_]+`, LiteralNumberInteger, nil}, + }, + "attribute": { + {`(')(\w+)`, ByGroups(Punctuation, NameAttribute), nil}, + }, + "subprogram": { + {`\(`, Punctuation, Push("#pop", "formal_part")}, + {`;`, Punctuation, Pop(1)}, + {`is\b`, KeywordReserved, Pop(1)}, + {`"[^"]+"|\w+`, NameFunction, nil}, + Include("root"), + }, + "end": { + {`(if|case|record|loop|select)`, KeywordReserved, nil}, + {`"[^"]+"|[\w.]+`, NameFunction, nil}, + {`\s+`, Text, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "type_def": { + {`;`, Punctuation, Pop(1)}, + {`\(`, Punctuation, Push("formal_part")}, + {`with|and|use`, KeywordReserved, nil}, + {`array\b`, KeywordReserved, Push("#pop", "array_def")}, + {`record\b`, KeywordReserved, Push("record_def")}, + {`(null record)(;)`, ByGroups(KeywordReserved, Punctuation), Pop(1)}, + Include("root"), + }, + "array_def": { + {`;`, Punctuation, Pop(1)}, + {`(\w+)(\s+)(range)`, ByGroups(KeywordType, Text, KeywordReserved), nil}, + Include("root"), + }, + "record_def": { + {`end record`, KeywordReserved, Pop(1)}, + Include("root"), + }, + "import": { + {`[\w.]+`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "formal_part": { + {`\)`, Punctuation, Pop(1)}, + {`\w+`, NameVariable, nil}, + {`,|:[^=]`, Punctuation, nil}, + {`(in|not|null|out|access)\b`, KeywordReserved, nil}, + Include("root"), + }, + "package": { + {`body`, KeywordDeclaration, nil}, + {`is\s+new|renames`, KeywordReserved, nil}, + {`is`, KeywordReserved, Pop(1)}, + {`;`, Punctuation, Pop(1)}, + {`\(`, Punctuation, Push("package_instantiation")}, + {`([\w.]+)`, NameClass, nil}, + Include("root"), + }, + "package_instantiation": { + {`("[^"]+"|\w+)(\s+)(=>)`, ByGroups(NameVariable, Text, Punctuation), nil}, + {`[\w.\'"]`, Text, nil}, + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go b/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go new file mode 100644 index 00000000..5258c928 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/angular2.go @@ -0,0 +1,42 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Angular2 lexer. +var Angular2 = internal.Register(MustNewLexer( + &Config{ + Name: "Angular2", + Aliases: []string{"ng2"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`[^{([*#]+`, Other, nil}, + {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("ngExpression")}, + {`([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text, Operator, Text), Push("attr")}, + {`([([]+)([\w:.-]+)([\])]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Text), nil}, + {`([*#])([\w:.-]+)(\s*)(=)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation, Operator), Push("attr")}, + {`([*#])([\w:.-]+)(\s*)`, ByGroups(Punctuation, NameAttribute, Punctuation), nil}, + }, + "ngExpression": { + {`\s+(\|\s+)?`, Text, nil}, + {`\}\}`, CommentPreproc, Pop(1)}, + {`:?(true|false)`, LiteralStringBoolean, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`[a-zA-Z][\w-]*(\(.*\))?`, NameVariable, nil}, + {`\.[\w-]+(\(.*\))?`, NameVariable, nil}, + {`(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)`, ByGroups(Operator, Text, LiteralString, Text, Operator, Text, LiteralString, Text), nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go b/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go new file mode 100644 index 00000000..d7649d4a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/antlr.go @@ -0,0 +1,101 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// ANTLR lexer. +var ANTLR = internal.Register(MustNewLexer( + &Config{ + Name: "ANTLR", + Aliases: []string{"antlr"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + Rules{ + "whitespace": { + {`\s+`, TextWhitespace, nil}, + }, + "comments": { + {`//.*$`, Comment, nil}, + {`/\*(.|\n)*?\*/`, Comment, nil}, + }, + "root": { + Include("whitespace"), + Include("comments"), + {`(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)`, ByGroups(Keyword, TextWhitespace, Keyword, TextWhitespace, NameClass, Punctuation), nil}, + {`options\b`, Keyword, Push("options")}, + {`tokens\b`, Keyword, Push("tokens")}, + {`(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), Push("action")}, + {`(catch|finally)\b`, Keyword, Push("exception")}, + {`(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, NameLabel, TextWhitespace, Punctuation), Push("action")}, + {`((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?`, ByGroups(Keyword, TextWhitespace, NameLabel, Punctuation), Push("rule-alts", "rule-prelims")}, + }, + "exception": { + {`\n`, TextWhitespace, Pop(1)}, + {`\s`, TextWhitespace, nil}, + Include("comments"), + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + }, + "rule-prelims": { + Include("whitespace"), + Include("comments"), + {`returns\b`, Keyword, nil}, + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + {`(throws)(\s+)([A-Za-z]\w*)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {`(,)(\s*)([A-Za-z]\w*)`, ByGroups(Punctuation, TextWhitespace, NameLabel), nil}, + {`options\b`, Keyword, Push("options")}, + {`(scope)(\s+)(\{)`, ByGroups(Keyword, TextWhitespace, Punctuation), Push("action")}, + {`(scope)(\s+)([A-Za-z]\w*)(\s*)(;)`, ByGroups(Keyword, TextWhitespace, NameLabel, TextWhitespace, Punctuation), nil}, + {`(@[A-Za-z]\w*)(\s*)(\{)`, ByGroups(NameLabel, TextWhitespace, Punctuation), Push("action")}, + {`:`, Punctuation, Pop(1)}, + }, + "rule-alts": { + Include("whitespace"), + Include("comments"), + {`options\b`, Keyword, Push("options")}, + {`:`, Punctuation, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`<<([^>]|>[^>])>>`, LiteralString, nil}, + {`\$?[A-Z_]\w*`, NameConstant, nil}, + {`\$?[a-z_]\w*`, NameVariable, nil}, + {`(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)`, Operator, nil}, + {`,`, Punctuation, nil}, + {`\[`, Punctuation, Push("nested-arg-action")}, + {`\{`, Punctuation, Push("action")}, + {`;`, Punctuation, Pop(1)}, + }, + "tokens": { + Include("whitespace"), + Include("comments"), + {`\{`, Punctuation, nil}, + {`([A-Z]\w*)(\s*)(=)?(\s*)(\'(?:\\\\|\\\'|[^\']*)\')?(\s*)(;)`, ByGroups(NameLabel, TextWhitespace, Punctuation, TextWhitespace, LiteralString, TextWhitespace, Punctuation), nil}, + {`\}`, Punctuation, Pop(1)}, + }, + "options": { + Include("whitespace"), + Include("comments"), + {`\{`, Punctuation, nil}, + {`([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\'(?:\\\\|\\\'|[^\']*)\'|[0-9]+|\*)(\s*)(;)`, ByGroups(NameVariable, TextWhitespace, Punctuation, TextWhitespace, Text, TextWhitespace, Punctuation), nil}, + {`\}`, Punctuation, Pop(1)}, + }, + "action": { + {`([^${}\'"/\\]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+`, Other, nil}, + {`(\\)(%)`, ByGroups(Punctuation, Other), nil}, + {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "nested-arg-action": { + {`([^$\[\]\'"/]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+`, Other, nil}, + {`\[`, Punctuation, Push()}, + {`\]`, Punctuation, Pop(1)}, + {`(\$[a-zA-Z]+)(\.?)(text|value)?`, ByGroups(NameVariable, Punctuation, NameProperty), nil}, + {`(\\\\|\\\]|\\\[|[^\[\]])+`, Other, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apache.go b/vendor/github.com/alecthomas/chroma/lexers/a/apache.go new file mode 100644 index 00000000..6c56a1db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/apache.go @@ -0,0 +1,38 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Apacheconf lexer. +var Apacheconf = internal.Register(MustNewLexer( + &Config{ + Name: "ApacheConf", + Aliases: []string{"apacheconf", "aconf", "apache"}, + Filenames: []string{".htaccess", "apache.conf", "apache2.conf"}, + MimeTypes: []string{"text/x-apacheconf"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`(#.*?)$`, Comment, nil}, + {`(<[^\s>]+)(?:(\s+)(.*?))?(>)`, ByGroups(NameTag, Text, LiteralString, NameTag), nil}, + {`([a-z]\w*)(\s+)`, ByGroups(NameBuiltin, Text), Push("value")}, + {`\.+`, Text, nil}, + }, + "value": { + {`\\\n`, Text, nil}, + {`$`, Text, Pop(1)}, + {`\\`, Text, nil}, + {`[^\S\n]+`, Text, nil}, + {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + {`/([a-z0-9][\w./-]+)`, LiteralStringOther, nil}, + {`(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b`, Keyword, nil}, + {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, + {`[^\s"\\]+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/apl.go b/vendor/github.com/alecthomas/chroma/lexers/a/apl.go new file mode 100644 index 00000000..820e13b6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/apl.go @@ -0,0 +1,36 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Apl lexer. +var Apl = internal.Register(MustNewLexer( + &Config{ + Name: "APL", + Aliases: []string{"apl"}, + Filenames: []string{"*.apl"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`[⍝#].*$`, CommentSingle, nil}, + {`\'((\'\')|[^\'])*\'`, LiteralStringSingle, nil}, + {`"(("")|[^"])*"`, LiteralStringDouble, nil}, + {`[⋄◇()]`, Punctuation, nil}, + {`[\[\];]`, LiteralStringRegex, nil}, + {`⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameFunction, nil}, + {`[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*`, NameVariable, nil}, + {`¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?`, LiteralNumber, nil}, + {`[\.\\/⌿⍀¨⍣⍨⍠⍤∘]`, NameAttribute, nil}, + {`[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]`, Operator, nil}, + {`⍬`, NameConstant, nil}, + {`[⎕⍞]`, NameVariableGlobal, nil}, + {`[←→]`, KeywordDeclaration, nil}, + {`[⍺⍵⍶⍹∇:]`, NameBuiltinPseudo, nil}, + {`[{}]`, KeywordType, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go b/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go new file mode 100644 index 00000000..84ddf1f7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/applescript.go @@ -0,0 +1,55 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Applescript lexer. +var Applescript = internal.Register(MustNewLexer( + &Config{ + Name: "AppleScript", + Aliases: []string{"applescript"}, + Filenames: []string{"*.applescript"}, + MimeTypes: []string{}, + DotAll: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`¬\n`, LiteralStringEscape, nil}, + {`'s\s+`, Text, nil}, + {`(--|#).*?$`, Comment, nil}, + {`\(\*`, CommentMultiline, Push("comment")}, + {`[(){}!,.:]`, Punctuation, nil}, + {`(«)([^»]+)(»)`, ByGroups(Text, NameBuiltin, Text), nil}, + {`\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)`, ByGroups(Keyword, NameBuiltin), nil}, + {`(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)`, Operator, nil}, + {`\b(and|or|is equal|equals|(is )?equal to|is not|isn't|isn't equal( to)?|is not equal( to)?|doesn't equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn't less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn't greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn't less than|does not come before|doesn't come before|(is )?less than or equal( to)?|is not greater than|isn't greater than|does not come after|doesn't come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn't contain|is in|is contained by|is not in|is not contained by|isn't contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b`, OperatorWord, nil}, + {`^(\s*(?:on|end)\s+)(zoomed|write to file|will zoom|will show|will select tab view item|will resize( sub views)?|will resign active|will quit|will pop up|will open|will move|will miniaturize|will hide|will finish launching|will display outline cell|will display item cell|will display cell|will display browser cell|will dismiss|will close|will become active|was miniaturized|was hidden|update toolbar item|update parameters|update menu item|shown|should zoom|should selection change|should select tab view item|should select row|should select item|should select column|should quit( after last window closed)?|should open( untitled)?|should expand item|should end editing|should collapse item|should close|should begin editing|selection changing|selection changed|selected tab view item|scroll wheel|rows changed|right mouse up|right mouse dragged|right mouse down|resized( sub views)?|resigned main|resigned key|resigned active|read from file|prepare table drop|prepare table drag|prepare outline drop|prepare outline drag|prepare drop|plugin loaded|parameters updated|panel ended|opened|open untitled|number of rows|number of items|number of browser rows|moved|mouse up|mouse moved|mouse exited|mouse entered|mouse dragged|mouse down|miniaturized|load data representation|launched|keyboard up|keyboard down|items changed|item value changed|item value|item expandable|idle|exposed|end editing|drop|drag( (entered|exited|updated))?|double clicked|document nib name|dialog ended|deminiaturized|data representation|conclude drop|column resized|column moved|column clicked|closed|clicked toolbar item|clicked|choose menu item|child of item|changed|change item value|change cell value|cell value changed|cell value|bounds changed|begin editing|became main|became key|awake from nib|alert ended|activated|action|accept table drop|accept outline drop)`, ByGroups(Keyword, NameFunction), nil}, + {`^(\s*)(in|on|script|to)(\s+)`, ByGroups(Text, Keyword, Text), nil}, + {`\b(as )(alias |application |boolean |class |constant |date |file |integer |list |number |POSIX file |real |record |reference |RGB color |script |text |unit types|(?:Unicode )?text|string)\b`, ByGroups(Keyword, NameClass), nil}, + {`\b(AppleScript|current application|false|linefeed|missing value|pi|quote|result|return|space|tab|text item delimiters|true|version)\b`, NameConstant, nil}, + {`\b(ASCII (character|number)|activate|beep|choose URL|choose application|choose color|choose file( name)?|choose folder|choose from list|choose remote application|clipboard info|close( access)?|copy|count|current date|delay|delete|display (alert|dialog)|do shell script|duplicate|exists|get eof|get volume settings|info for|launch|list (disks|folder)|load script|log|make|mount volume|new|offset|open( (for access|location))?|path to|print|quit|random number|read|round|run( script)?|say|scripting components|set (eof|the clipboard to|volume)|store script|summarize|system attribute|system info|the clipboard|time to GMT|write|quoted form)\b`, NameBuiltin, nil}, + {`\b(considering|else|error|exit|from|if|ignoring|in|repeat|tell|then|times|to|try|until|using terms from|while|whith|with timeout( of)?|with transaction|by|continue|end|its?|me|my|return|of|as)\b`, Keyword, nil}, + {`\b(global|local|prop(erty)?|set|get)\b`, Keyword, nil}, + {`\b(but|put|returning|the)\b`, NameBuiltin, nil}, + {`\b(attachment|attribute run|character|day|month|paragraph|word|year)s?\b`, NameBuiltin, nil}, + {`\b(about|above|against|apart from|around|aside from|at|below|beneath|beside|between|for|given|instead of|on|onto|out of|over|since)\b`, NameBuiltin, nil}, + {`\b(accepts arrow key|action method|active|alignment|allowed identifiers|allows branch selection|allows column reordering|allows column resizing|allows column selection|allows customization|allows editing text attributes|allows empty selection|allows mixed state|allows multiple selection|allows reordering|allows undo|alpha( value)?|alternate image|alternate increment value|alternate title|animation delay|associated file name|associated object|auto completes|auto display|auto enables items|auto repeat|auto resizes( outline column)?|auto save expanded items|auto save name|auto save table columns|auto saves configuration|auto scroll|auto sizes all columns to fit|auto sizes cells|background color|bezel state|bezel style|bezeled|border rect|border type|bordered|bounds( rotation)?|box type|button returned|button type|can choose directories|can choose files|can draw|can hide|cell( (background color|size|type))?|characters|class|click count|clicked( data)? column|clicked data item|clicked( data)? row|closeable|collating|color( (mode|panel))|command key down|configuration|content(s| (size|view( margins)?))?|context|continuous|control key down|control size|control tint|control view|controller visible|coordinate system|copies( on scroll)?|corner view|current cell|current column|current( field)? editor|current( menu)? item|current row|current tab view item|data source|default identifiers|delta (x|y|z)|destination window|directory|display mode|displayed cell|document( (edited|rect|view))?|double value|dragged column|dragged distance|dragged items|draws( cell)? background|draws grid|dynamically scrolls|echos bullets|edge|editable|edited( data)? column|edited data item|edited( data)? row|enabled|enclosing scroll view|ending page|error handling|event number|event type|excluded from windows menu|executable path|expanded|fax number|field editor|file kind|file name|file type|first responder|first visible column|flipped|floating|font( panel)?|formatter|frameworks path|frontmost|gave up|grid color|has data items|has horizontal ruler|has horizontal scroller|has parent data item|has resize indicator|has shadow|has sub menu|has vertical ruler|has vertical scroller|header cell|header view|hidden|hides when deactivated|highlights by|horizontal line scroll|horizontal page scroll|horizontal ruler view|horizontally resizable|icon image|id|identifier|ignores multiple clicks|image( (alignment|dims when disabled|frame style|scaling))?|imports graphics|increment value|indentation per level|indeterminate|index|integer value|intercell spacing|item height|key( (code|equivalent( modifier)?|window))?|knob thickness|label|last( visible)? column|leading offset|leaf|level|line scroll|loaded|localized sort|location|loop mode|main( (bunde|menu|window))?|marker follows cell|matrix mode|maximum( content)? size|maximum visible columns|menu( form representation)?|miniaturizable|miniaturized|minimized image|minimized title|minimum column width|minimum( content)? size|modal|modified|mouse down state|movie( (controller|file|rect))?|muted|name|needs display|next state|next text|number of tick marks|only tick mark values|opaque|open panel|option key down|outline table column|page scroll|pages across|pages down|palette label|pane splitter|parent data item|parent window|pasteboard|path( (names|separator))?|playing|plays every frame|plays selection only|position|preferred edge|preferred type|pressure|previous text|prompt|properties|prototype cell|pulls down|rate|released when closed|repeated|requested print time|required file type|resizable|resized column|resource path|returns records|reuses columns|rich text|roll over|row height|rulers visible|save panel|scripts path|scrollable|selectable( identifiers)?|selected cell|selected( data)? columns?|selected data items?|selected( data)? rows?|selected item identifier|selection by rect|send action on arrow key|sends action when done editing|separates columns|separator item|sequence number|services menu|shared frameworks path|shared support path|sheet|shift key down|shows alpha|shows state by|size( mode)?|smart insert delete enabled|sort case sensitivity|sort column|sort order|sort type|sorted( data rows)?|sound|source( mask)?|spell checking enabled|starting page|state|string value|sub menu|super menu|super view|tab key traverses cells|tab state|tab type|tab view|table view|tag|target( printer)?|text color|text container insert|text container origin|text returned|tick mark position|time stamp|title(d| (cell|font|height|position|rect))?|tool tip|toolbar|trailing offset|transparent|treat packages as directories|truncated labels|types|unmodified characters|update views|use sort indicator|user defaults|uses data source|uses ruler|uses threaded animation|uses title from previous column|value wraps|version|vertical( (line scroll|page scroll|ruler view))?|vertically resizable|view|visible( document rect)?|volume|width|window|windows menu|wraps|zoomable|zoomed)\b`, NameAttribute, nil}, + {`\b(action cell|alert reply|application|box|browser( cell)?|bundle|button( cell)?|cell|clip view|color well|color-panel|combo box( item)?|control|data( (cell|column|item|row|source))?|default entry|dialog reply|document|drag info|drawer|event|font(-panel)?|formatter|image( (cell|view))?|matrix|menu( item)?|item|movie( view)?|open-panel|outline view|panel|pasteboard|plugin|popup button|progress indicator|responder|save-panel|scroll view|secure text field( cell)?|slider|sound|split view|stepper|tab view( item)?|table( (column|header cell|header view|view))|text( (field( cell)?|view))?|toolbar( item)?|user-defaults|view|window)s?\b`, NameBuiltin, nil}, + {`\b(animate|append|call method|center|close drawer|close panel|display|display alert|display dialog|display panel|go|hide|highlight|increment|item for|load image|load movie|load nib|load panel|load sound|localized string|lock focus|log|open drawer|path for|pause|perform action|play|register|resume|scroll|select( all)?|show|size to fit|start|step back|step forward|stop|synchronize|unlock focus|update)\b`, NameBuiltin, nil}, + {`\b((in )?back of|(in )?front of|[0-9]+(st|nd|rd|th)|first|second|third|fourth|fifth|sixth|seventh|eighth|ninth|tenth|after|back|before|behind|every|front|index|last|middle|some|that|through|thru|where|whose)\b`, NameBuiltin, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`\b([a-zA-Z]\w*)\b`, NameVariable, nil}, + {`[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?`, LiteralNumberFloat, nil}, + {`[-+]?\d+`, LiteralNumberInteger, nil}, + }, + "comment": { + {`\(\*`, CommentMultiline, Push()}, + {`\*\)`, CommentMultiline, Pop(1)}, + {`[^*(]+`, CommentMultiline, nil}, + {`[*(]`, CommentMultiline, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/a/awk.go b/vendor/github.com/alecthomas/chroma/lexers/a/awk.go new file mode 100644 index 00000000..d9198f12 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/a/awk.go @@ -0,0 +1,48 @@ +package a + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Awk lexer. +var Awk = internal.Register(MustNewLexer( + &Config{ + Name: "Awk", + Aliases: []string{"awk", "gawk", "mawk", "nawk"}, + Filenames: []string{"*.awk"}, + MimeTypes: []string{"application/x-awk"}, + }, + Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {`#.*$`, CommentSingle, nil}, + }, + "slashstartsregex": { + Include("commentsandwhitespace"), + {`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/\B`, LiteralStringRegex, Pop(1)}, + {`(?=/)`, Text, Push("#pop", "badregex")}, + Default(Pop(1)), + }, + "badregex": { + {`\n`, Text, Pop(1)}, + }, + "root": { + {`^(?=\s|/)`, Text, Push("slashstartsregex")}, + Include("commentsandwhitespace"), + {`\+\+|--|\|\||&&|in\b|\$|!?~|(\*\*|[-<>+*%\^/!=|])=?`, Operator, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`(break|continue|do|while|exit|for|if|else|return)\b`, Keyword, Push("slashstartsregex")}, + {`function\b`, KeywordDeclaration, Push("slashstartsregex")}, + {`(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next|nextfile|print|printf|strftime|systime|delete|system)\b`, KeywordReserved, nil}, + {`(ARGC|ARGIND|ARGV|BEGIN|CONVFMT|ENVIRON|END|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|SUBSEP)\b`, NameBuiltin, nil}, + {`[$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go b/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go new file mode 100644 index 00000000..9b2665a7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/ballerina.go @@ -0,0 +1,46 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ballerina lexer. +var Ballerina = internal.Register(MustNewLexer( + &Config{ + Name: "Ballerina", + Aliases: []string{"ballerina"}, + Filenames: []string{"*.bal"}, + MimeTypes: []string{"text/x-ballerina"}, + DotAll: true, + }, + Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b`, Keyword, nil}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`@[^\W\d][\w.]*`, NameDecorator, nil}, + {`(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b`, KeywordDeclaration, nil}, + {`(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b`, KeywordType, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`import(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, + {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, + {`([^\W\d]|\$)[\w$]*`, Name, nil}, + {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, + {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, + {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`\n`, Text, nil}, + }, + "import": { + {`[\w.]+`, NameNamespace, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bash.go b/vendor/github.com/alecthomas/chroma/lexers/b/bash.go new file mode 100644 index 00000000..ff4e4a80 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bash.go @@ -0,0 +1,95 @@ +package b + +import ( + "regexp" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var bashAnalyserRe = regexp.MustCompile(`(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)`) + +// Bash lexer. +var Bash = internal.Register(MustNewLexer( + &Config{ + Name: "Bash", + Aliases: []string{"bash", "sh", "ksh", "zsh", "shell"}, + Filenames: []string{"*.sh", "*.ksh", "*.bash", "*.ebuild", "*.eclass", "*.exheres-0", "*.exlib", "*.zsh", "*.zshrc", ".bashrc", "bashrc", ".bash_*", "bash_*", "zshrc", ".zshrc", "PKGBUILD"}, + MimeTypes: []string{"application/x-sh", "application/x-shellscript"}, + }, + Rules{ + "root": { + Include("basic"), + {"`", LiteralStringBacktick, Push("backticks")}, + Include("data"), + Include("interp"), + }, + "interp": { + {`\$\(\(`, Keyword, Push("math")}, + {`\$\(`, Keyword, Push("paren")}, + {`\$\{#?`, LiteralStringInterpol, Push("curly")}, + {`\$[a-zA-Z_]\w*`, NameVariable, nil}, + {`\$(?:\d+|[#$?!_*@-])`, NameVariable, nil}, + {`\$`, Text, nil}, + }, + "basic": { + {`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil}, + {"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil}, + {`\A#!.+\n`, CommentPreproc, nil}, + {`#.*\n`, CommentSingle, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]{}()=]`, Operator, nil}, + {`<<<`, Operator, nil}, + {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + {`&&|\|\|`, Operator, nil}, + }, + "data": { + {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`(?s)'.*?'`, LiteralStringSingle, nil}, + {`;`, Punctuation, nil}, + {`&`, Punctuation, nil}, + {`\|`, Punctuation, nil}, + {`\s+`, Text, nil}, + {`\d+\b`, LiteralNumber, nil}, + {"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil}, + {`<`, Text, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, + Include("interp"), + }, + "curly": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`:-`, Keyword, nil}, + {`\w+`, NameVariable, nil}, + {"[^}:\"\\'`$\\\\]+", Punctuation, nil}, + {`:`, Punctuation, nil}, + Include("root"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "math": { + {`\)\)`, Keyword, Pop(1)}, + {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, + {`\d+#\d+`, LiteralNumber, nil}, + {`\d+#(?! )`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + Include("root"), + }, + "backticks": { + {"`", LiteralStringBacktick, Pop(1)}, + Include("root"), + }, + }, +).SetAnalyser(func(text string) float32 { + if bashAnalyserRe.FindString(text) != "" { + return 1.0 + } + return 0.0 +})) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/batch.go b/vendor/github.com/alecthomas/chroma/lexers/b/batch.go new file mode 100644 index 00000000..dc6ce524 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/batch.go @@ -0,0 +1,194 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Batchfile lexer. +var Batchfile = internal.Register(MustNewLexer( + &Config{ + Name: "Batchfile", + Aliases: []string{"bat", "batch", "dosbatch", "winbatch"}, + Filenames: []string{"*.bat", "*.cmd"}, + MimeTypes: []string{"application/x-dos-batch"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)`, CommentSingle, nil}, + {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow")}, + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + Include("redirect"), + {`[\n\x1a]+`, Text, nil}, + {`\(`, Punctuation, Push("root/compound")}, + {`@+`, Punctuation, nil}, + {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, + {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow")}, + {Words(``, `(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow")}, + {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call")}, + {`call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])`, Keyword, nil}, + {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, + {`(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, + {`for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^)`, Keyword, Push("for2", "for")}, + {`(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label")}, + {`(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, + {`rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))`, CommentSingle, Push("follow")}, + {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic")}, + {`(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow")}, + Default(Push("follow")), + }, + "follow": { + {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, + Include("redirect"), + {`(?=[\n\x1a])`, Text, Pop(1)}, + {`\|\|?|&&?`, Punctuation, Pop(1)}, + Include("text"), + }, + "arithmetic": { + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[\da-f]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[(),]+`, Punctuation, nil}, + {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, + {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+`, UsingSelf("variable"), nil}, + {`(?=[\x00|&])`, Text, Pop(1)}, + Include("follow"), + }, + "call": { + {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, + }, + "label": { + {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^"%^\n\x1a&<>|])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, + }, + "redirect": { + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, + }, + "root/compound": { + {`\)`, Punctuation, Pop(1)}, + {`(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))`, Text, Push("follow/compound")}, + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + Include("redirect/compound"), + {`[\n\x1a]+`, Text, nil}, + {`\(`, Punctuation, Push("root/compound")}, + {`@+`, Punctuation, nil}, + {`((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, + {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*)`, ByGroups(Keyword, UsingSelf("text")), Push("follow/compound")}, + {Words(``, `(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, `assoc`, `break`, `cd`, `chdir`, `cls`, `color`, `copy`, `date`, `del`, `dir`, `dpath`, `echo`, `endlocal`, `erase`, `exit`, `ftype`, `keys`, `md`, `mkdir`, `mklink`, `move`, `path`, `pause`, `popd`, `prompt`, `pushd`, `rd`, `ren`, `rename`, `rmdir`, `setlocal`, `shift`, `start`, `time`, `title`, `type`, `ver`, `verify`, `vol`), Keyword, Push("follow/compound")}, + {`(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("call/compound")}, + {`call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))`, Keyword, nil}, + {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/f", "for")}, + {`(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("for/l", "for")}, + {`for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^)`, Keyword, Push("for2", "for")}, + {`(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)`, ByGroups(Keyword, UsingSelf("text"), Punctuation), Push("label/compound")}, + {`(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), Keyword, UsingSelf("text")), Push("(?", "if")}, + {`rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))`, CommentSingle, Push("follow/compound")}, + {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)`, ByGroups(Keyword, UsingSelf("text"), Keyword), Push("arithmetic/compound")}, + {`(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=)]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)`, ByGroups(Keyword, UsingSelf("text"), Keyword, UsingSelf("text"), UsingSelf("variable"), Punctuation), Push("follow/compound")}, + Default(Push("follow/compound")), + }, + "follow/compound": { + {`(?=\))`, Text, Pop(1)}, + {`((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)`, ByGroups(Text, Punctuation, Text, NameLabel, CommentSingle), nil}, + Include("redirect/compound"), + {`(?=[\n\x1a])`, Text, Pop(1)}, + {`\|\|?|&&?`, Punctuation, Pop(1)}, + Include("text"), + }, + "arithmetic/compound": { + {`(?=\))`, Text, Pop(1)}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[\da-f]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[(),]+`, Punctuation, nil}, + {`([=+\-*/!~]|%|\^\^)+`, Operator, nil}, + {`((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+`, UsingSelf("variable"), nil}, + {`(?=[\x00|&])`, Text, Pop(1)}, + Include("follow"), + }, + "call/compound": { + {`(?=\))`, Text, Pop(1)}, + {`(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))`, ByGroups(Punctuation, NameLabel), Pop(1)}, + }, + "label/compound": { + {`(?=\))`, Text, Pop(1)}, + {`((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^"%^\n\x1a&<>|)])*)`, ByGroups(NameLabel, CommentSingle), Pop(1)}, + }, + "redirect/compound": { + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)`, ByGroups(LiteralNumberInteger, Punctuation, Text, LiteralNumberInteger), nil}, + {`((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+))+))`, ByGroups(LiteralNumberInteger, Punctuation, UsingSelf("text")), nil}, + }, + "variable-or-escape": { + {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, + {`%%|\^[\n\x1a]?(\^!|[\w\W])`, LiteralStringEscape, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))`, NameVariable, nil}, + {`\^!|%%`, LiteralStringEscape, nil}, + {`[^"%^\n\x1a]+|[%^]`, LiteralStringDouble, nil}, + Default(Pop(1)), + }, + "sqstring": { + Include("variable-or-escape"), + {`[^%]+|%`, LiteralStringSingle, nil}, + }, + "bqstring": { + Include("variable-or-escape"), + {`[^%]+|%`, LiteralStringBacktick, nil}, + }, + "text": { + {`"`, LiteralStringDouble, Push("string")}, + Include("variable-or-escape"), + {`[^"%^\n\x1a&<>|\t\v\f\r ,;=\xa0\d)]+|.`, Text, nil}, + }, + "variable": { + {`"`, LiteralStringDouble, Push("string")}, + Include("variable-or-escape"), + {`[^"%^\n\x1a]+|.`, NameVariable, nil}, + }, + "for": { + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()`, ByGroups(UsingSelf("text"), Keyword, UsingSelf("text"), Punctuation), Pop(1)}, + Include("follow"), + }, + "for2": { + {`\)`, Punctuation, nil}, + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))`, ByGroups(UsingSelf("text"), Keyword), Pop(1)}, + {`[\n\x1a]+`, Text, nil}, + Include("follow"), + }, + "for/f": { + {`(")((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"])*?")([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(LiteralStringDouble, UsingSelf("string"), Text, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("#pop", "for2", "string")}, + {`('(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?')([\n\x1a\t\v\f\r ,;=\xa0]*)(\))`, ByGroups(UsingSelf("sqstring"), Text, Punctuation), nil}, + {"(`(?:%%|(?:(?:%(?:\\*|(?:~[a-z]*(?:\\$[^:]+:)?)?\\d|[^%:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])[^=\\n\\x1a]*=(?:[^%\\n\\x1a^]|\\^[^%\\n\\x1a])*)?)?%))|(?:\\^?![^!:\\n\\x1a]+(?::(?:~(?:-?\\d+)?(?:,(?:-?\\d+)?)?|(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])[^=\\n\\x1a]*=(?:[^!\\n\\x1a^]|\\^[^!\\n\\x1a])*)?)?\\^?!))|[\\w\\W])*?`)([\\n\\x1a\\t\\v\\f\\r ,;=\\xa0]*)(\\))", ByGroups(UsingSelf("bqstring"), Text, Punctuation), nil}, + Include("for2"), + }, + "for/l": { + {`-?\d+`, LiteralNumberInteger, nil}, + Include("for2"), + }, + "if": { + {`((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)`, ByGroups(Keyword, UsingSelf("text"), LiteralNumberInteger), Pop(1)}, + {`(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text"), UsingSelf("variable")), Pop(1)}, + {`(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(Keyword, UsingSelf("text")), Pop(1)}, + {`((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))`, ByGroups(UsingSelf("arithmetic"), OperatorWord, UsingSelf("arithmetic")), Pop(1)}, + {`(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)`, UsingSelf("text"), Push("#pop", "if2")}, + }, + "if2": { + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), Operator, UsingSelf("text")), Pop(1)}, + {`((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))`, ByGroups(UsingSelf("text"), OperatorWord, UsingSelf("text")), Pop(1)}, + }, + "(?": { + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + {`\(`, Punctuation, Push("#pop", "else?", "root/compound")}, + Default(Pop(1)), + }, + "else?": { + {`(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)`, UsingSelf("text"), nil}, + {`else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])`, Keyword, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go b/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go new file mode 100644 index 00000000..5d5ffc85 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/blitz.go @@ -0,0 +1,48 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Blitzbasic lexer. +var Blitzbasic = internal.Register(MustNewLexer( + &Config{ + Name: "BlitzBasic", + Aliases: []string{"blitzbasic", "b3d", "bplus"}, + Filenames: []string{"*.bb", "*.decls"}, + MimeTypes: []string{"text/x-bb"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`[ \t]+`, Text, nil}, + {`;.*?\n`, CommentSingle, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, + {`\.[0-9]+(?!\.)`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`\$[0-9a-f]+`, LiteralNumberHex, nil}, + {`\%[10]+`, LiteralNumberBin, nil}, + {Words(`\b`, `\b`, `Shl`, `Shr`, `Sar`, `Mod`, `Or`, `And`, `Not`, `Abs`, `Sgn`, `Handle`, `Int`, `Float`, `Str`, `First`, `Last`, `Before`, `After`), Operator, nil}, + {`([+\-*/~=<>^])`, Operator, nil}, + {`[(),:\[\]\\]`, Punctuation, nil}, + {`\.([ \t]*)([a-z]\w*)`, NameLabel, nil}, + {`\b(New)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, + {`\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameLabel), nil}, + {`\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b`, ByGroups(Operator, Text, Punctuation, Text, NameClass), nil}, + {`\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()`, ByGroups(NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass, Text, Punctuation), nil}, + {`\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(KeywordReserved, Text, NameFunction, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, + {`\b(Type)([ \t]+)([a-z]\w*)`, ByGroups(KeywordReserved, Text, NameClass), nil}, + {`\b(Pi|True|False|Null)\b`, KeywordConstant, nil}, + {`\b(Local|Global|Const|Field|Dim)\b`, KeywordDeclaration, nil}, + {Words(`\b`, `\b`, `End`, `Return`, `Exit`, `Chr`, `Len`, `Asc`, `New`, `Delete`, `Insert`, `Include`, `Function`, `Type`, `If`, `Then`, `Else`, `ElseIf`, `EndIf`, `For`, `To`, `Next`, `Step`, `Each`, `While`, `Wend`, `Repeat`, `Until`, `Forever`, `Select`, `Case`, `Default`, `Goto`, `Gosub`, `Data`, `Read`, `Restore`), KeywordReserved, nil}, + {`([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?`, ByGroups(NameVariable, Text, KeywordType, Text, Punctuation, Text, NameClass), nil}, + }, + "string": { + {`""`, LiteralStringDouble, nil}, + {`"C?`, LiteralStringDouble, Pop(1)}, + {`[^"]+`, LiteralStringDouble, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go b/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go new file mode 100644 index 00000000..5123a45a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/bnf.go @@ -0,0 +1,24 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Bnf lexer. +var Bnf = internal.Register(MustNewLexer( + &Config{ + Name: "BNF", + Aliases: []string{"bnf"}, + Filenames: []string{"*.bnf"}, + MimeTypes: []string{"text/x-bnf"}, + }, + Rules{ + "root": { + {`(<)([ -;=?-~]+)(>)`, ByGroups(Punctuation, NameClass, Punctuation), nil}, + {`::=`, Operator, nil}, + {`[^<>:]+`, Text, nil}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go b/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go new file mode 100644 index 00000000..6fac5f5e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/b/brainfuck.go @@ -0,0 +1,34 @@ +package b + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Brainfuck lexer. +var Brainfuck = internal.Register(MustNewLexer( + &Config{ + Name: "Brainfuck", + Aliases: []string{"brainfuck", "bf"}, + Filenames: []string{"*.bf", "*.b"}, + MimeTypes: []string{"application/x-brainfuck"}, + }, + Rules{ + "common": { + {`[.,]+`, NameTag, nil}, + {`[+-]+`, NameBuiltin, nil}, + {`[<>]+`, NameVariable, nil}, + {`[^.,+\-<>\[\]]+`, Comment, nil}, + }, + "root": { + {`\[`, Keyword, Push("loop")}, + {`\]`, Error, nil}, + Include("common"), + }, + "loop": { + {`\[`, Keyword, Push()}, + {`\]`, Keyword, Pop(1)}, + Include("common"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/c.go b/vendor/github.com/alecthomas/chroma/lexers/c/c.go new file mode 100644 index 00000000..df2c0faa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/c.go @@ -0,0 +1,91 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// C lexer. +var C = internal.Register(MustNewLexer( + &Config{ + Name: "C", + Aliases: []string{"c"}, + Filenames: []string{"*.c", "*.h", "*.idc"}, + MimeTypes: []string{"text/x-chdr", "text/x-csrc"}, + }, + Rules{ + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statements": { + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?|+=:;,./?-]`, Operator, nil}, + {`\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?`, LiteralNumberFloat, nil}, + {`#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+`, LiteralNumberHex, nil}, + {`#[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\$([01]{4})(_[01]{4})+`, LiteralNumberBin, nil}, + {`\$[01]+`, LiteralNumberBin, nil}, + {`\d{1,3}(_\d{3})+[kMGTP]?`, LiteralNumberInteger, nil}, + {`[0-9]+[kMGTP]?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[A-Za-z_]\w*`, NameClass, Pop(1)}, + }, + "import": { + {`[a-z][\w.]*`, NameNamespace, Pop(1)}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go b/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go new file mode 100644 index 00000000..f96252fa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cfengine3.go @@ -0,0 +1,56 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cfengine3 lexer. +var Cfengine3 = internal.Register(MustNewLexer( + &Config{ + Name: "CFEngine3", + Aliases: []string{"cfengine3", "cf3"}, + Filenames: []string{"*.cf"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`#.*?\n`, Comment, nil}, + {`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil}, + {`(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()`, ByGroups(Keyword, Text, Keyword, Text, NameFunction, Punctuation), Push("arglist")}, + {`(body|bundle)(\s+)(\S+)(\s+)(\w+)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)`, ByGroups(Punctuation, NameVariable, Punctuation, Text, KeywordType, Text, Operator, Text), nil}, + {`(\S+)(\s*)(=>)(\s*)`, ByGroups(KeywordReserved, Text, Operator, Text), nil}, + {`"`, LiteralString, Push("string")}, + {`(\w+)(\()`, ByGroups(NameFunction, Punctuation), nil}, + {`([\w.!&|()]+)(::)`, ByGroups(NameClass, Punctuation), nil}, + {`(\w+)(:)`, ByGroups(KeywordDeclaration, Punctuation), nil}, + {`@[{(][^)}]+[})]`, NameVariable, nil}, + {`[(){},;]`, Punctuation, nil}, + {`=>`, Operator, nil}, + {`->`, Operator, nil}, + {`\d+\.\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`\w+`, NameFunction, nil}, + {`\s+`, Text, nil}, + }, + "string": { + {`\$[{(]`, LiteralStringInterpol, Push("interpol")}, + {`\\.`, LiteralStringEscape, nil}, + {`"`, LiteralString, Pop(1)}, + {`\n`, LiteralString, nil}, + {`.`, LiteralString, nil}, + }, + "interpol": { + {`\$[{(]`, LiteralStringInterpol, Push()}, + {`[})]`, LiteralStringInterpol, Pop(1)}, + {`[^${()}]+`, LiteralStringInterpol, nil}, + }, + "arglist": { + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, nil}, + {`\w+`, NameVariable, nil}, + {`\s+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go b/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go new file mode 100644 index 00000000..d2aa50db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/chaiscript.go @@ -0,0 +1,63 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Chaiscript lexer. +var Chaiscript = internal.Register(MustNewLexer( + &Config{ + Name: "ChaiScript", + Aliases: []string{"chai", "chaiscript"}, + Filenames: []string{"*.chai"}, + MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"}, + DotAll: true, + }, + Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`^\#.*?\n`, CommentSingle, nil}, + }, + "slashstartsregex": { + Include("commentsandwhitespace"), + {`/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`(?=/)`, Text, Push("#pop", "badregex")}, + Default(Pop(1)), + }, + "badregex": { + {`\n`, Text, Pop(1)}, + }, + "root": { + Include("commentsandwhitespace"), + {`\n`, Text, nil}, + {`[^\S\n]+`, Text, nil}, + {`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`[=+\-*/]`, Operator, nil}, + {`(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b`, Keyword, Push("slashstartsregex")}, + {`(var)\b`, KeywordDeclaration, Push("slashstartsregex")}, + {`(attr|def|fun)\b`, KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(eval|throw)\b`, NameBuiltin, nil}, + {"`\\S+`", NameBuiltin, nil}, + {`[$a-zA-Z_]\w*`, NameOther, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"`, LiteralStringDouble, Push("dqstring")}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + "dqstring": { + {`\$\{[^"}]+?\}`, LiteralStringInterpol, nil}, + {`\$`, LiteralStringDouble, nil}, + {`\\\\`, LiteralStringDouble, nil}, + {`\\"`, LiteralStringDouble, nil}, + {`[^\\"$]+`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go b/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go new file mode 100644 index 00000000..b2cb9c40 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cheetah.go @@ -0,0 +1,37 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Cheetah lexer. +var Cheetah = internal.Register(MustNewLexer( + &Config{ + Name: "Cheetah", + Aliases: []string{"cheetah", "spitfire"}, + Filenames: []string{"*.tmpl", "*.spt"}, + MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"}, + }, + Rules{ + "root": { + {`(##[^\n]*)$`, ByGroups(Comment), nil}, + {`#[*](.|\n)*?[*]#`, Comment, nil}, + {`#end[^#\n]*(?:#|$)`, CommentPreproc, nil}, + {`#slurp$`, CommentPreproc, nil}, + {`(#[a-zA-Z]+)([^#\n]*)(#|$)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(\$)([a-zA-Z_][\w.]*\w)`, ByGroups(CommentPreproc, Using(Python)), nil}, + {`(\$\{!?)(.*?)(\})(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?=\#[#a-zA-Z]*) | # an eval comment + (?=\$[a-zA-Z_{]) | # a substitution + \Z # end of string + ) + `, Other, nil}, + {`\s+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cl.go b/vendor/github.com/alecthomas/chroma/lexers/c/cl.go new file mode 100644 index 00000000..6e048dea --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cl.go @@ -0,0 +1,74 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Common Lisp lexer. +var CommonLisp = internal.Register(MustNewLexer( + &Config{ + Name: "Common Lisp", + Aliases: []string{"common-lisp", "cl", "lisp"}, + Filenames: []string{"*.cl", "*.lisp"}, + MimeTypes: []string{"text/x-common-lisp"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + Default(Push("body")), + }, + "multiline-comment": { + {`#\|`, CommentMultiline, Push()}, + {`\|#`, CommentMultiline, Pop(1)}, + {`[^|#]+`, CommentMultiline, nil}, + {`[|#]`, CommentMultiline, nil}, + }, + "commented-form": { + {`\(`, CommentPreproc, Push()}, + {`\)`, CommentPreproc, Pop(1)}, + {`[^()]+`, CommentPreproc, nil}, + }, + "body": { + {`\s+`, Text, nil}, + {`;.*$`, CommentSingle, nil}, + {`#\|`, CommentMultiline, Push("multiline-comment")}, + {`#\d*Y.*$`, CommentSpecial, nil}, + {`"(\\.|\\\n|[^"\\])*"`, LiteralString, nil}, + {`:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`::(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`:#(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'`, Operator, nil}, + {"`", Operator, nil}, + {"[-+]?\\d+\\.?(?=[ \"()\\'\\n,;`])", LiteralNumberInteger, nil}, + {"[-+]?\\d+/\\d+(?=[ \"()\\'\\n,;`])", LiteralNumber, nil}, + {"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\'\\n,;`])", LiteralNumberFloat, nil}, + {"#\\\\.(?=[ \"()\\'\\n,;`])", LiteralStringChar, nil}, + {`#\\(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringChar, nil}, + {`#\(`, Operator, Push("body")}, + {`#\d*\*[01]*`, LiteralOther, nil}, + {`#:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, LiteralStringSymbol, nil}, + {`#[.,]`, Operator, nil}, + {`#\'`, NameFunction, nil}, + {`#b[+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil}, + {`#o[+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil}, + {`#x[+-]?[0-9a-f]+(/[0-9a-f]+)?`, LiteralNumberHex, nil}, + {`#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?`, LiteralNumber, nil}, + {`(#c)(\()`, ByGroups(LiteralNumber, Punctuation), Push("body")}, + {`(#\d+a)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, + {`(#s)(\()`, ByGroups(LiteralOther, Punctuation), Push("body")}, + {`#p?"(\\.|[^"])*"`, LiteralOther, nil}, + {`#\d+=`, Operator, nil}, + {`#\d+#`, Operator, nil}, + {"#+nil(?=[ \"()\\'\\n,;`])\\s*\\(", CommentPreproc, Push("commented-form")}, + {`#[+-]`, Operator, nil}, + {`(,@|,|\.)`, Operator, nil}, + {"(t|nil)(?=[ \"()\\'\\n,;`])", NameConstant, nil}, + {`\*(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)\*`, NameVariableGlobal, nil}, + {`(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)`, NameVariable, nil}, + {`\(`, Punctuation, Push("body")}, + {`\)`, Punctuation, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go b/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go new file mode 100644 index 00000000..e63752a5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/clojure.go @@ -0,0 +1,38 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Clojure lexer. +var Clojure = internal.Register(MustNewLexer( + &Config{ + Name: "Clojure", + Aliases: []string{"clojure", "clj"}, + Filenames: []string{"*.clj"}, + MimeTypes: []string{"text/x-clojure", "application/x-clojure"}, + }, + Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`[,\s]+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`0x-?[abcdef\d]+`, LiteralNumberHex, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {`\\(.|[a-z]+)`, LiteralStringChar, nil}, + {`::?#?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {"~@|[`\\'#^~&@]", Operator, nil}, + {Words(``, ` `, `.`, `def`, `do`, `fn`, `if`, `let`, `new`, `quote`, `var`, `loop`), Keyword, nil}, + {Words(``, ` `, `def-`, `defn`, `defn-`, `defmacro`, `defmulti`, `defmethod`, `defstruct`, `defonce`, `declare`, `definline`, `definterface`, `defprotocol`, `defrecord`, `deftype`, `defproject`, `ns`), KeywordDeclaration, nil}, + {Words(``, ` `, `*`, `+`, `-`, `->`, `/`, `<`, `<=`, `=`, `==`, `>`, `>=`, `..`, `accessor`, `agent`, `agent-errors`, `aget`, `alength`, `all-ns`, `alter`, `and`, `append-child`, `apply`, `array-map`, `aset`, `aset-boolean`, `aset-byte`, `aset-char`, `aset-double`, `aset-float`, `aset-int`, `aset-long`, `aset-short`, `assert`, `assoc`, `await`, `await-for`, `bean`, `binding`, `bit-and`, `bit-not`, `bit-or`, `bit-shift-left`, `bit-shift-right`, `bit-xor`, `boolean`, `branch?`, `butlast`, `byte`, `cast`, `char`, `children`, `class`, `clear-agent-errors`, `comment`, `commute`, `comp`, `comparator`, `complement`, `concat`, `conj`, `cons`, `constantly`, `cond`, `if-not`, `construct-proxy`, `contains?`, `count`, `create-ns`, `create-struct`, `cycle`, `dec`, `deref`, `difference`, `disj`, `dissoc`, `distinct`, `doall`, `doc`, `dorun`, `doseq`, `dosync`, `dotimes`, `doto`, `double`, `down`, `drop`, `drop-while`, `edit`, `end?`, `ensure`, `eval`, `every?`, `false?`, `ffirst`, `file-seq`, `filter`, `find`, `find-doc`, `find-ns`, `find-var`, `first`, `float`, `flush`, `for`, `fnseq`, `frest`, `gensym`, `get-proxy-class`, `get`, `hash-map`, `hash-set`, `identical?`, `identity`, `if-let`, `import`, `in-ns`, `inc`, `index`, `insert-child`, `insert-left`, `insert-right`, `inspect-table`, `inspect-tree`, `instance?`, `int`, `interleave`, `intersection`, `into`, `into-array`, `iterate`, `join`, `key`, `keys`, `keyword`, `keyword?`, `last`, `lazy-cat`, `lazy-cons`, `left`, `lefts`, `line-seq`, `list*`, `list`, `load`, `load-file`, `locking`, `long`, `loop`, `macroexpand`, `macroexpand-1`, `make-array`, `make-node`, `map`, `map-invert`, `map?`, `mapcat`, `max`, `max-key`, `memfn`, `merge`, `merge-with`, `meta`, `min`, `min-key`, `name`, `namespace`, `neg?`, `new`, `newline`, `next`, `nil?`, `node`, `not`, `not-any?`, `not-every?`, `not=`, `ns-imports`, `ns-interns`, `ns-map`, `ns-name`, `ns-publics`, `ns-refers`, `ns-resolve`, `ns-unmap`, `nth`, `nthrest`, `or`, `parse`, `partial`, `path`, `peek`, `pop`, `pos?`, `pr`, `pr-str`, `print`, `print-str`, `println`, `println-str`, `prn`, `prn-str`, `project`, `proxy`, `proxy-mappings`, `quot`, `rand`, `rand-int`, `range`, `re-find`, `re-groups`, `re-matcher`, `re-matches`, `re-pattern`, `re-seq`, `read`, `read-line`, `reduce`, `ref`, `ref-set`, `refer`, `rem`, `remove`, `remove-method`, `remove-ns`, `rename`, `rename-keys`, `repeat`, `replace`, `replicate`, `resolve`, `rest`, `resultset-seq`, `reverse`, `rfirst`, `right`, `rights`, `root`, `rrest`, `rseq`, `second`, `select`, `select-keys`, `send`, `send-off`, `seq`, `seq-zip`, `seq?`, `set`, `short`, `slurp`, `some`, `sort`, `sort-by`, `sorted-map`, `sorted-map-by`, `sorted-set`, `special-symbol?`, `split-at`, `split-with`, `str`, `string?`, `struct`, `struct-map`, `subs`, `subvec`, `symbol`, `symbol?`, `sync`, `take`, `take-nth`, `take-while`, `test`, `time`, `to-array`, `to-array-2d`, `tree-seq`, `true?`, `union`, `up`, `update-proxy`, `val`, `vals`, `var-get`, `var-set`, `var?`, `vector`, `vector-zip`, `vector?`, `when`, `when-first`, `when-let`, `when-not`, `with-local-vars`, `with-meta`, `with-open`, `with-out-str`, `xml-seq`, `xml-zip`, `zero?`, `zipmap`, `zipper`), NameBuiltin, nil}, + {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, + {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, + {`(\[|\])`, Punctuation, nil}, + {`(\{|\})`, Punctuation, nil}, + {`(\(|\))`, Punctuation, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go b/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go new file mode 100644 index 00000000..163f17d9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cmake.go @@ -0,0 +1,44 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cmake lexer. +var Cmake = internal.Register(MustNewLexer( + &Config{ + Name: "CMake", + Aliases: []string{"cmake"}, + Filenames: []string{"*.cmake", "CMakeLists.txt"}, + MimeTypes: []string{"text/x-cmake"}, + }, + Rules{ + "root": { + {`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")}, + Include("keywords"), + Include("ws"), + }, + "args": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + {`(\$\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(\$ENV\{)(.+?)(\})`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(\$<)(.+?)(>)`, ByGroups(Operator, NameVariable, Operator), nil}, + {`(?s)".*?"`, LiteralStringDouble, nil}, + {`\\\S+`, LiteralString, nil}, + {`[^)$"# \t\n]+`, LiteralString, nil}, + {`\n`, Text, nil}, + Include("keywords"), + Include("ws"), + }, + "string": {}, + "keywords": { + {`\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b`, Keyword, nil}, + }, + "ws": { + {`[ \t]+`, Text, nil}, + {`#.*\n`, Comment, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go b/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go new file mode 100644 index 00000000..e9ae0bb7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cobol.go @@ -0,0 +1,51 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cobol lexer. +var Cobol = internal.Register(MustNewLexer( + &Config{ + Name: "COBOL", + Aliases: []string{"cobol"}, + Filenames: []string{"*.cob", "*.COB", "*.cpy", "*.CPY"}, + MimeTypes: []string{"text/x-cobol"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + Include("comment"), + Include("strings"), + Include("core"), + Include("nums"), + {`[a-z0-9]([\w\-]*[a-z0-9]+)?`, NameVariable, nil}, + {`[ \t]+`, Text, nil}, + }, + "comment": { + {`(^.{6}[*/].*\n|^.{6}|\*>.*\n)`, Comment, nil}, + }, + "core": { + {`(^|(?<=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))`, NameConstant, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCEPT`, `ADD`, `ALLOCATE`, `CALL`, `CANCEL`, `CLOSE`, `COMPUTE`, `CONFIGURATION`, `CONTINUE`, `DATA`, `DELETE`, `DISPLAY`, `DIVIDE`, `DIVISION`, `ELSE`, `END`, `END-ACCEPT`, `END-ADD`, `END-CALL`, `END-COMPUTE`, `END-DELETE`, `END-DISPLAY`, `END-DIVIDE`, `END-EVALUATE`, `END-IF`, `END-MULTIPLY`, `END-OF-PAGE`, `END-PERFORM`, `END-READ`, `END-RETURN`, `END-REWRITE`, `END-SEARCH`, `END-START`, `END-STRING`, `END-SUBTRACT`, `END-UNSTRING`, `END-WRITE`, `ENVIRONMENT`, `EVALUATE`, `EXIT`, `FD`, `FILE`, `FILE-CONTROL`, `FOREVER`, `FREE`, `GENERATE`, `GO`, `GOBACK`, `IDENTIFICATION`, `IF`, `INITIALIZE`, `INITIATE`, `INPUT-OUTPUT`, `INSPECT`, `INVOKE`, `I-O-CONTROL`, `LINKAGE`, `LOCAL-STORAGE`, `MERGE`, `MOVE`, `MULTIPLY`, `OPEN`, `PERFORM`, `PROCEDURE`, `PROGRAM-ID`, `RAISE`, `READ`, `RELEASE`, `RESUME`, `RETURN`, `REWRITE`, `SCREEN`, `SD`, `SEARCH`, `SECTION`, `SET`, `SORT`, `START`, `STOP`, `STRING`, `SUBTRACT`, `SUPPRESS`, `TERMINATE`, `THEN`, `UNLOCK`, `UNSTRING`, `USE`, `VALIDATE`, `WORKING-STORAGE`, `WRITE`), KeywordReserved, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACCESS`, `ADDRESS`, `ADVANCING`, `AFTER`, `ALL`, `ALPHABET`, `ALPHABETIC`, `ALPHABETIC-LOWER`, `ALPHABETIC-UPPER`, `ALPHANUMERIC`, `ALPHANUMERIC-EDITED`, `ALSO`, `ALTER`, `ALTERNATEANY`, `ARE`, `AREA`, `AREAS`, `ARGUMENT-NUMBER`, `ARGUMENT-VALUE`, `AS`, `ASCENDING`, `ASSIGN`, `AT`, `AUTO`, `AUTO-SKIP`, `AUTOMATIC`, `AUTOTERMINATE`, `BACKGROUND-COLOR`, `BASED`, `BEEP`, `BEFORE`, `BELL`, `BLANK`, `BLINK`, `BLOCK`, `BOTTOM`, `BY`, `BYTE-LENGTH`, `CHAINING`, `CHARACTER`, `CHARACTERS`, `CLASS`, `CODE`, `CODE-SET`, `COL`, `COLLATING`, `COLS`, `COLUMN`, `COLUMNS`, `COMMA`, `COMMAND-LINE`, `COMMIT`, `COMMON`, `CONSTANT`, `CONTAINS`, `CONTENT`, `CONTROL`, `CONTROLS`, `CONVERTING`, `COPY`, `CORR`, `CORRESPONDING`, `COUNT`, `CRT`, `CURRENCY`, `CURSOR`, `CYCLE`, `DATE`, `DAY`, `DAY-OF-WEEK`, `DE`, `DEBUGGING`, `DECIMAL-POINT`, `DECLARATIVES`, `DEFAULT`, `DELIMITED`, `DELIMITER`, `DEPENDING`, `DESCENDING`, `DETAIL`, `DISK`, `DOWN`, `DUPLICATES`, `DYNAMIC`, `EBCDIC`, `ENTRY`, `ENVIRONMENT-NAME`, `ENVIRONMENT-VALUE`, `EOL`, `EOP`, `EOS`, `ERASE`, `ERROR`, `ESCAPE`, `EXCEPTION`, `EXCLUSIVE`, `EXTEND`, `EXTERNAL`, `FILE-ID`, `FILLER`, `FINAL`, `FIRST`, `FIXED`, `FLOAT-LONG`, `FLOAT-SHORT`, `FOOTING`, `FOR`, `FOREGROUND-COLOR`, `FORMAT`, `FROM`, `FULL`, `FUNCTION`, `FUNCTION-ID`, `GIVING`, `GLOBAL`, `GROUP`, `HEADING`, `HIGHLIGHT`, `I-O`, `ID`, `IGNORE`, `IGNORING`, `IN`, `INDEX`, `INDEXED`, `INDICATE`, `INITIAL`, `INITIALIZED`, `INPUT`, `INTO`, `INTRINSIC`, `INVALID`, `IS`, `JUST`, `JUSTIFIED`, `KEY`, `LABEL`, `LAST`, `LEADING`, `LEFT`, `LENGTH`, `LIMIT`, `LIMITS`, `LINAGE`, `LINAGE-COUNTER`, `LINE`, `LINES`, `LOCALE`, `LOCK`, `LOWLIGHT`, `MANUAL`, `MEMORY`, `MINUS`, `MODE`, `MULTIPLE`, `NATIONAL`, `NATIONAL-EDITED`, `NATIVE`, `NEGATIVE`, `NEXT`, `NO`, `NULL`, `NULLS`, `NUMBER`, `NUMBERS`, `NUMERIC`, `NUMERIC-EDITED`, `OBJECT-COMPUTER`, `OCCURS`, `OF`, `OFF`, `OMITTED`, `ON`, `ONLY`, `OPTIONAL`, `ORDER`, `ORGANIZATION`, `OTHER`, `OUTPUT`, `OVERFLOW`, `OVERLINE`, `PACKED-DECIMAL`, `PADDING`, `PAGE`, `PARAGRAPH`, `PLUS`, `POINTER`, `POSITION`, `POSITIVE`, `PRESENT`, `PREVIOUS`, `PRINTER`, `PRINTING`, `PROCEDURE-POINTER`, `PROCEDURES`, `PROCEED`, `PROGRAM`, `PROGRAM-POINTER`, `PROMPT`, `QUOTE`, `QUOTES`, `RANDOM`, `RD`, `RECORD`, `RECORDING`, `RECORDS`, `RECURSIVE`, `REDEFINES`, `REEL`, `REFERENCE`, `RELATIVE`, `REMAINDER`, `REMOVAL`, `RENAMES`, `REPLACING`, `REPORT`, `REPORTING`, `REPORTS`, `REPOSITORY`, `REQUIRED`, `RESERVE`, `RETURNING`, `REVERSE-VIDEO`, `REWIND`, `RIGHT`, `ROLLBACK`, `ROUNDED`, `RUN`, `SAME`, `SCROLL`, `SECURE`, `SEGMENT-LIMIT`, `SELECT`, `SENTENCE`, `SEPARATE`, `SEQUENCE`, `SEQUENTIAL`, `SHARING`, `SIGN`, `SIGNED`, `SIGNED-INT`, `SIGNED-LONG`, `SIGNED-SHORT`, `SIZE`, `SORT-MERGE`, `SOURCE`, `SOURCE-COMPUTER`, `SPECIAL-NAMES`, `STANDARD`, `STANDARD-1`, `STANDARD-2`, `STATUS`, `SUM`, `SYMBOLIC`, `SYNC`, `SYNCHRONIZED`, `TALLYING`, `TAPE`, `TEST`, `THROUGH`, `THRU`, `TIME`, `TIMES`, `TO`, `TOP`, `TRAILING`, `TRANSFORM`, `TYPE`, `UNDERLINE`, `UNIT`, `UNSIGNED`, `UNSIGNED-INT`, `UNSIGNED-LONG`, `UNSIGNED-SHORT`, `UNTIL`, `UP`, `UPDATE`, `UPON`, `USAGE`, `USING`, `VALUE`, `VALUES`, `VARYING`, `WAIT`, `WHEN`, `WITH`, `WORDS`, `YYYYDDD`, `YYYYMMDD`), KeywordPseudo, nil}, + {Words(`(^|(?<=[^\w\-]))`, `\s*($|(?=[^\w\-]))`, `ACTIVE-CLASS`, `ALIGNED`, `ANYCASE`, `ARITHMETIC`, `ATTRIBUTE`, `B-AND`, `B-NOT`, `B-OR`, `B-XOR`, `BIT`, `BOOLEAN`, `CD`, `CENTER`, `CF`, `CH`, `CHAIN`, `CLASS-ID`, `CLASSIFICATION`, `COMMUNICATION`, `CONDITION`, `DATA-POINTER`, `DESTINATION`, `DISABLE`, `EC`, `EGI`, `EMI`, `ENABLE`, `END-RECEIVE`, `ENTRY-CONVENTION`, `EO`, `ESI`, `EXCEPTION-OBJECT`, `EXPANDS`, `FACTORY`, `FLOAT-BINARY-16`, `FLOAT-BINARY-34`, `FLOAT-BINARY-7`, `FLOAT-DECIMAL-16`, `FLOAT-DECIMAL-34`, `FLOAT-EXTENDED`, `FORMAT`, `FUNCTION-POINTER`, `GET`, `GROUP-USAGE`, `IMPLEMENTS`, `INFINITY`, `INHERITS`, `INTERFACE`, `INTERFACE-ID`, `INVOKE`, `LC_ALL`, `LC_COLLATE`, `LC_CTYPE`, `LC_MESSAGES`, `LC_MONETARY`, `LC_NUMERIC`, `LC_TIME`, `LINE-COUNTER`, `MESSAGE`, `METHOD`, `METHOD-ID`, `NESTED`, `NONE`, `NORMAL`, `OBJECT`, `OBJECT-REFERENCE`, `OPTIONS`, `OVERRIDE`, `PAGE-COUNTER`, `PF`, `PH`, `PROPERTY`, `PROTOTYPE`, `PURGE`, `QUEUE`, `RAISE`, `RAISING`, `RECEIVE`, `RELATION`, `REPLACE`, `REPRESENTS-NOT-A-NUMBER`, `RESET`, `RESUME`, `RETRY`, `RF`, `RH`, `SECONDS`, `SEGMENT`, `SELF`, `SEND`, `SOURCES`, `STATEMENT`, `STEP`, `STRONG`, `SUB-QUEUE-1`, `SUB-QUEUE-2`, `SUB-QUEUE-3`, `SUPER`, `SYMBOL`, `SYSTEM-DEFAULT`, `TABLE`, `TERMINAL`, `TEXT`, `TYPEDEF`, `UCS-4`, `UNIVERSAL`, `USER-DEFAULT`, `UTF-16`, `UTF-8`, `VAL-STATUS`, `VALID`, `VALIDATE`, `VALIDATE-STATUS`), Error, nil}, + {`(^|(?<=[^\w\-]))(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|BINARY-C-LONG|BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|BINARY)\s*($|(?=[^\w\-]))`, KeywordType, nil}, + {`(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)`, Operator, nil}, + {`([(),;:&%.])`, Punctuation, nil}, + {`(^|(?<=[^\w\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*($|(?=[^\w\-]))`, NameFunction, nil}, + {`(^|(?<=[^\w\-]))(true|false)\s*($|(?=[^\w\-]))`, NameBuiltin, nil}, + {`(^|(?<=[^\w\-]))(equal|equals|ne|lt|le|gt|ge|greater|less|than|not|and|or)\s*($|(?=[^\w\-]))`, OperatorWord, nil}, + }, + "strings": { + {`"[^"\n]*("|\n)`, LiteralStringDouble, nil}, + {`'[^'\n]*('|\n)`, LiteralStringSingle, nil}, + }, + "nums": { + {`\d+(\s*|\.$|$)`, LiteralNumberInteger, nil}, + {`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go b/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go new file mode 100644 index 00000000..e402b8f2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/coffee.go @@ -0,0 +1,91 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Coffeescript lexer. +var Coffeescript = internal.Register(MustNewLexer( + &Config{ + Name: "CoffeeScript", + Aliases: []string{"coffee-script", "coffeescript", "coffee"}, + Filenames: []string{"*.coffee"}, + MimeTypes: []string{"text/coffeescript"}, + NotMultiline: true, + DotAll: true, + }, + Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {`###[^#].*?###`, CommentMultiline, nil}, + {`#(?!##[^#]).*?\n`, CommentSingle, nil}, + }, + "multilineregex": { + {`[^/#]+`, LiteralStringRegex, nil}, + {`///([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`#\{`, LiteralStringInterpol, Push("interpoling_string")}, + {`[/#]`, LiteralStringRegex, nil}, + }, + "slashstartsregex": { + Include("commentsandwhitespace"), + {`///`, LiteralStringRegex, Push("#pop", "multilineregex")}, + {`/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)`, LiteralStringRegex, Pop(1)}, + {`/`, Operator, nil}, + Default(Pop(1)), + }, + "root": { + Include("commentsandwhitespace"), + {`^(?=\s|/)`, Text, Push("slashstartsregex")}, + {"\\+\\+|~|&&|\\band\\b|\\bor\\b|\\bis\\b|\\bisnt\\b|\\bnot\\b|\\?|:|\\|\\||\\\\(?=\\n)|(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\\|\\^/])=?", Operator, Push("slashstartsregex")}, + {`(?:\([^()]*\))?\s*[=-]>`, NameFunction, Push("slashstartsregex")}, + {`[{(\[;,]`, Punctuation, Push("slashstartsregex")}, + {`[})\].]`, Punctuation, nil}, + {`(?=|<|>|==`, Operator, nil}, + {`mod\b`, Operator, nil}, + {`(eq|lt|gt|lte|gte|not|is|and|or)\b`, Operator, nil}, + {`\|\||&&`, Operator, nil}, + {`\?`, Operator, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`'.*?'`, LiteralStringSingle, nil}, + {`\d+`, LiteralNumber, nil}, + {`(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b`, Keyword, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(application|session|client|cookie|super|this|variables|arguments)\b`, NameConstant, nil}, + {`([a-z_$][\w.]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-z_$][\w.]*`, NameVariable, nil}, + {`[()\[\]{};:,.\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "string": { + {`""`, LiteralStringDouble, nil}, + {`#.+?#`, LiteralStringInterpol, nil}, + {`[^"#]+`, LiteralStringDouble, nil}, + {`#`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/coq.go b/vendor/github.com/alecthomas/chroma/lexers/c/coq.go new file mode 100644 index 00000000..e69a5c16 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/coq.go @@ -0,0 +1,63 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Coq lexer. +var Coq = internal.Register(MustNewLexer( + &Config{ + Name: "Coq", + Aliases: []string{"coq"}, + Filenames: []string{"*.v"}, + MimeTypes: []string{"text/x-coq"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil}, + {`\(\*`, Comment, Push("comment")}, + {Words(`\b`, `\b`, `Section`, `Module`, `End`, `Require`, `Import`, `Export`, `Variable`, `Variables`, `Parameter`, `Parameters`, `Axiom`, `Hypothesis`, `Hypotheses`, `Notation`, `Local`, `Tactic`, `Reserved`, `Scope`, `Open`, `Close`, `Bind`, `Delimit`, `Definition`, `Let`, `Ltac`, `Fixpoint`, `CoFixpoint`, `Morphism`, `Relation`, `Implicit`, `Arguments`, `Set`, `Unset`, `Contextual`, `Strict`, `Prenex`, `Implicits`, `Inductive`, `CoInductive`, `Record`, `Structure`, `Canonical`, `Coercion`, `Theorem`, `Lemma`, `Corollary`, `Proposition`, `Fact`, `Remark`, `Example`, `Proof`, `Goal`, `Save`, `Qed`, `Defined`, `Hint`, `Resolve`, `Rewrite`, `View`, `Search`, `Show`, `Print`, `Printing`, `All`, `Graph`, `Projections`, `inside`, `outside`, `Check`, `Global`, `Instance`, `Class`, `Existing`, `Universe`, `Polymorphic`, `Monomorphic`, `Context`), KeywordNamespace, nil}, + {Words(`\b`, `\b`, `forall`, `exists`, `exists2`, `fun`, `fix`, `cofix`, `struct`, `match`, `end`, `in`, `return`, `let`, `if`, `is`, `then`, `else`, `for`, `of`, `nosimpl`, `with`, `as`), Keyword, nil}, + {Words(`\b`, `\b`, `Type`, `Prop`), KeywordType, nil}, + {Words(`\b`, `\b`, `pose`, `set`, `move`, `case`, `elim`, `apply`, `clear`, `hnf`, `intro`, `intros`, `generalize`, `rename`, `pattern`, `after`, `destruct`, `induction`, `using`, `refine`, `inversion`, `injection`, `rewrite`, `congr`, `unlock`, `compute`, `ring`, `field`, `replace`, `fold`, `unfold`, `change`, `cutrewrite`, `simpl`, `have`, `suff`, `wlog`, `suffices`, `without`, `loss`, `nat_norm`, `assert`, `cut`, `trivial`, `revert`, `bool_congr`, `nat_congr`, `symmetry`, `transitivity`, `auto`, `split`, `left`, `right`, `autorewrite`, `tauto`, `setoid_rewrite`, `intuition`, `eauto`, `eapply`, `econstructor`, `etransitivity`, `constructor`, `erewrite`, `red`, `cbv`, `lazy`, `vm_compute`, `native_compute`, `subst`), Keyword, nil}, + {Words(`\b`, `\b`, `by`, `done`, `exact`, `reflexivity`, `tauto`, `romega`, `omega`, `assumption`, `solve`, `contradiction`, `discriminate`, `congruence`), KeywordPseudo, nil}, + {Words(`\b`, `\b`, `do`, `last`, `first`, `try`, `idtac`, `repeat`), KeywordReserved, nil}, + {`\b([A-Z][\w\']*)`, Name, nil}, + {"(\u03bb|\u03a0|\\|\\}|\\{\\||\\\\/|/\\\\|=>|~|\\}|\\|]|\\||\\{<|\\{|`|_|]|\\[\\||\\[>|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<->|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(unit|nat|bool|string|ascii|list)\b`, KeywordType, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*`, LiteralNumberBin, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, Name, nil}, + }, + "comment": { + {`[^(*)]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`[(*)]`, Comment, nil}, + }, + "string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, NameClass, Pop(1)}, + {`[a-z][a-z0-9_\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go b/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go new file mode 100644 index 00000000..cb53ee5e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cpp.go @@ -0,0 +1,103 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CPP lexer. +var CPP = internal.Register(MustNewLexer( + &Config{ + Name: "C++", + Aliases: []string{"cpp", "c++"}, + Filenames: []string{"*.cpp", "*.hpp", "*.c++", "*.h++", "*.cc", "*.hh", "*.cxx", "*.hxx", "*.C", "*.H", "*.cp", "*.CPP"}, + MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"}, + EnsureNL: true, + }, + Rules{ + "statements": { + {Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`), Keyword, nil}, + {`char(16_t|32_t)\b`, KeywordType, nil}, + {`(class)\b`, ByGroups(Keyword, Text), Push("classname")}, + {`(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")`, ByGroups(LiteralStringAffix, LiteralString, LiteralStringDelimiter, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, LiteralString), nil}, + {`(u8|u|U)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "root": { + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + {Words(`__`, `\b`, `virtual_inheritance`, `uuidof`, `super`, `single_inheritance`, `multiple_inheritance`, `interface`, `event`), KeywordReserved, nil}, + {`__(offload|blockingoffload|outer)\b`, KeywordPseudo, nil}, + }, + "classname": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + {`\s*(?=>)`, Text, Pop(1)}, + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?=~!@#%^&|`?-]+", Operator, nil}, + {`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`, + UsingByGroup( + internal.Get, + 1, 6, + NameBuiltin, TextWhitespace, Keyword, TextWhitespace, + LiteralStringHeredoc, LiteralStringHeredoc, LiteralStringHeredoc, + ), + nil, + }, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}`, LiteralNumberHex, nil}, + {`\.[0-9]+(e[+-]?[0-9]+)?`, Error, nil}, + {`-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringName, Push("quoted-ident")}, + {`\$\$`, LiteralStringHeredoc, Push("dollar-string")}, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]\{\},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + "dollar-string": { + {`[^\$]+`, LiteralStringHeredoc, nil}, + {`\$\$`, LiteralStringHeredoc, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go b/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go new file mode 100644 index 00000000..69e053c7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/crystal.go @@ -0,0 +1,262 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Crystal lexer. +var Crystal = internal.Register(MustNewLexer( + &Config{ + Name: "Crystal", + Aliases: []string{"cr", "crystal"}, + Filenames: []string{"*.cr"}, + MimeTypes: []string{"text/x-crystal"}, + DotAll: true, + }, + Rules{ + "root": { + {`#.*?$`, CommentSingle, nil}, + {Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil}, + {Words(``, `\b`, `true`, `false`, `nil`), KeywordConstant, nil}, + {`(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("funcname")}, + {"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, + {`(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)`, ByGroups(Keyword, Text, NameNamespace), Push("classname")}, + {`(self|out|uninitialized)\b|(is_a|responds_to)\?`, KeywordPseudo, nil}, + {Words(``, `\b`, `debugger`, `record`, `pp`, `assert_responds_to`, `spawn`, `parallel`, `getter`, `setter`, `property`, `delegate`, `def_hash`, `def_equals`, `def_equals_and_hash`, `forward_missing_to`), NameBuiltinPseudo, nil}, + {`getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b`, NameBuiltinPseudo, nil}, + {Words(`(?~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, + {`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, + {`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, + {`(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?`, ByGroups(LiteralNumberFloat, Text, Operator), nil}, + {`(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, + {`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, + {`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, + {`\$\w+`, NameVariableGlobal, nil}, + {"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, + {`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, + {`::`, Operator, nil}, + Include("strings"), + {`\?(\\[MC]-)*(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, + {`[A-Z][A-Z_]+\b`, NameConstant, nil}, + {`\{%`, LiteralStringInterpol, Push("in-macro-control")}, + {`\{\{`, LiteralStringInterpol, Push("in-macro-expr")}, + {`(@\[)(\s*)([A-Z]\w*)`, ByGroups(Operator, Text, NameDecorator), Push("in-attr")}, + {Words(`(\.|::)`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), ByGroups(Operator, NameOperator), nil}, + {"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, + {`[a-zA-Z_]\w*(?:[!?](?!=))?`, Name, nil}, + {`(\[|\]\??|\*\*|<=>?|>=|<>?|=~|===|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&!^|~]=?`, Operator, nil}, + {`[(){};,/?:\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "funcname": { + {"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, + Default(Pop(1)), + }, + "classname": { + {`[A-Z_]\w*`, NameClass, nil}, + {`(\()(\s*)([A-Z_]\w*)(\s*)(\))`, ByGroups(Punctuation, Text, NameClass, Text, Punctuation), nil}, + Default(Pop(1)), + }, + "in-intp": { + {`\{`, LiteralStringInterpol, Push()}, + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "string-intp": { + {`#\{`, LiteralStringInterpol, Push("in-intp")}, + }, + "string-escaped": { + {`\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, + }, + "string-intp-escaped": { + Include("string-intp"), + Include("string-escaped"), + }, + "interpolated-regex": { + Include("string-intp"), + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\#]+`, LiteralStringRegex, nil}, + }, + "interpolated-string": { + Include("string-intp"), + {`[\\#]`, LiteralStringOther, nil}, + {`[^\\#]+`, LiteralStringOther, nil}, + }, + "multiline-regex": { + Include("string-intp"), + {`\\\\`, LiteralStringRegex, nil}, + {`\\/`, LiteralStringRegex, nil}, + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\/#]+`, LiteralStringRegex, nil}, + {`/[imsx]*`, LiteralStringRegex, Pop(1)}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + "in-macro-control": { + {`\{%`, LiteralStringInterpol, Push()}, + {`%\}`, LiteralStringInterpol, Pop(1)}, + {`for\b|in\b`, Keyword, nil}, + Include("root"), + }, + "in-macro-expr": { + {`\{\{`, LiteralStringInterpol, Push()}, + {`\}\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "in-attr": { + {`\[`, Operator, Push()}, + {`\]`, Operator, Pop(1)}, + Include("root"), + }, + "strings": { + {`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, + {Words(`\:@{0,2}`, ``, `!=`, `!~`, `!`, `%`, `&&`, `&`, `**`, `*`, `+`, `-`, `/`, `<=>`, `<<`, `<=`, `<`, `===`, `==`, `=~`, `=`, `>=`, `>>`, `>`, `[]=`, `[]?`, `[]`, `^`, `||`, `|`, `~`), LiteralStringSymbol, nil}, + {`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, + {`'(\\\\|\\'|[^']|\\[^'\\]+)'`, LiteralStringChar, nil}, + {`:"`, LiteralStringSymbol, Push("simple-sym")}, + {`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("simple-string")}, + {"(?&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, + }, + "simple-string": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringDouble, nil}, + {`[\\#]`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "simple-sym": { + Include("string-escaped"), + {`[^\\"#]+`, LiteralStringSymbol, nil}, + {`[\\#]`, LiteralStringSymbol, nil}, + {`"`, LiteralStringSymbol, Pop(1)}, + }, + "simple-backtick": { + Include("string-intp-escaped"), + {"[^\\\\`#]+", LiteralStringBacktick, nil}, + {`[\\#]`, LiteralStringBacktick, nil}, + {"`", LiteralStringBacktick, Pop(1)}, + }, + "cb-intp-string": { + {`\\[\{]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-regex": { + {`\\[\\{}]`, LiteralStringRegex, nil}, + {`\{`, LiteralStringRegex, Push()}, + {`\}[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#{}]`, LiteralStringRegex, nil}, + {`[^\\#{}]+`, LiteralStringRegex, nil}, + }, + "sb-intp-string": { + {`\\[\[]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-regex": { + {`\\[\\\[\]]`, LiteralStringRegex, nil}, + {`\[`, LiteralStringRegex, Push()}, + {`\][imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#\[\]]`, LiteralStringRegex, nil}, + {`[^\\#\[\]]+`, LiteralStringRegex, nil}, + }, + "pa-intp-string": { + {`\\[\(]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-regex": { + {`\\[\\()]`, LiteralStringRegex, nil}, + {`\(`, LiteralStringRegex, Push()}, + {`\)[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#()]`, LiteralStringRegex, nil}, + {`[^\\#()]+`, LiteralStringRegex, nil}, + }, + "ab-intp-string": { + {`\\[<]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-regex": { + {`\\[\\<>]`, LiteralStringRegex, nil}, + {`<`, LiteralStringRegex, Push()}, + {`>[imsx]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#<>]`, LiteralStringRegex, nil}, + {`[^\\#<>]+`, LiteralStringRegex, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go b/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go new file mode 100644 index 00000000..d59fa4f4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/csharp.go @@ -0,0 +1,50 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CSharp lexer. +var CSharp = internal.Register(MustNewLexer( + &Config{ + Name: "C#", + Aliases: []string{"csharp", "c#"}, + Filenames: []string{"*.cs"}, + MimeTypes: []string{"text/x-csharp"}, + DotAll: true, + }, + Rules{ + "root": { + {`^\s*\[.*?\]`, NameAttribute, nil}, + {`[^\S\n]+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/[*].*?[*]/`, CommentMultiline, nil}, + {`\n`, Text, nil}, + {`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, + {`[{}]`, Punctuation, nil}, + {`@"(""|[^"])*"`, LiteralString, nil}, + {`\$@?"(""|[^"])*"`, LiteralString, nil}, + {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, + {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, + {`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil}, + {`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil}, + {`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil}, + {`(global)(::)`, ByGroups(Keyword, Punctuation), nil}, + {`(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??`, KeywordType, nil}, + {`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`(namespace|using)(\s+)`, ByGroups(Keyword, Text), Push("namespace")}, + {`@?[_a-zA-Z]\w*`, Name, nil}, + }, + "class": { + {`@?[_a-zA-Z]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "namespace": { + {`(?=\()`, Text, Pop(1)}, + {`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/css.go b/vendor/github.com/alecthomas/chroma/lexers/c/css.go new file mode 100644 index 00000000..fedc809b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/css.go @@ -0,0 +1,104 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// CSS lexer. +var CSS = internal.Register(MustNewLexer( + &Config{ + Name: "CSS", + Aliases: []string{"css"}, + Filenames: []string{"*.css"}, + MimeTypes: []string{"text/css"}, + }, + Rules{ + "root": { + Include("basics"), + }, + "basics": { + {`\s+`, Text, nil}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + {`\{`, Punctuation, Push("content")}, + {`(\:{1,2})([\w-]+)`, ByGroups(Punctuation, NameDecorator), nil}, + {`(\.)([\w-]+)`, ByGroups(Punctuation, NameClass), nil}, + {`(\#)([\w-]+)`, ByGroups(Punctuation, NameNamespace), nil}, + {`(@)([\w-]+)`, ByGroups(Punctuation, Keyword), Push("atrule")}, + {`[\w-]+`, NameTag, nil}, + {`[~^*!%&$\[\]()<>|+=@:;,./?-]`, Operator, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + }, + "atrule": { + {`\{`, Punctuation, Push("atcontent")}, + {`;`, Punctuation, Pop(1)}, + Include("basics"), + }, + "atcontent": { + Include("basics"), + {`\}`, Punctuation, Pop(2)}, + }, + "content": { + {`\s+`, Text, nil}, + {`\}`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`^@.*?$`, CommentPreproc, nil}, + {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, + {`(align-content|align-items|align-self|alignment-baseline|all|animation|animation-delay|animation-direction|animation-duration|animation-fill-mode|animation-iteration-count|animation-name|animation-play-state|animation-timing-function|appearance|azimuth|backface-visibility|background|background-attachment|background-blend-mode|background-clip|background-color|background-image|background-origin|background-position|background-repeat|background-size|baseline-shift|bookmark-label|bookmark-level|bookmark-state|border|border-bottom|border-bottom-color|border-bottom-left-radius|border-bottom-right-radius|border-bottom-style|border-bottom-width|border-boundary|border-collapse|border-color|border-image|border-image-outset|border-image-repeat|border-image-slice|border-image-source|border-image-width|border-left|border-left-color|border-left-style|border-left-width|border-radius|border-right|border-right-color|border-right-style|border-right-width|border-spacing|border-style|border-top|border-top-color|border-top-left-radius|border-top-right-radius|border-top-style|border-top-width|border-width|bottom|box-decoration-break|box-shadow|box-sizing|box-snap|box-suppress|break-after|break-before|break-inside|caption-side|caret|caret-animation|caret-color|caret-shape|chains|clear|clip|clip-path|clip-rule|color|color-interpolation-filters|column-count|column-fill|column-gap|column-rule|column-rule-color|column-rule-style|column-rule-width|column-span|column-width|columns|content|counter-increment|counter-reset|counter-set|crop|cue|cue-after|cue-before|cursor|direction|display|dominant-baseline|elevation|empty-cells|filter|flex|flex-basis|flex-direction|flex-flow|flex-grow|flex-shrink|flex-wrap|float|float-defer|float-offset|float-reference|flood-color|flood-opacity|flow|flow-from|flow-into|font|font-family|font-feature-settings|font-kerning|font-language-override|font-size|font-size-adjust|font-stretch|font-style|font-synthesis|font-variant|font-variant-alternates|font-variant-caps|font-variant-east-asian|font-variant-ligatures|font-variant-numeric|font-variant-position|font-weight|footnote-display|footnote-policy|glyph-orientation-vertical|grid|grid-area|grid-auto-columns|grid-auto-flow|grid-auto-rows|grid-column|grid-column-end|grid-column-gap|grid-column-start|grid-gap|grid-row|grid-row-end|grid-row-gap|grid-row-start|grid-template|grid-template-areas|grid-template-columns|grid-template-rows|hanging-punctuation|height|hyphenate-character|hyphenate-limit-chars|hyphenate-limit-last|hyphenate-limit-lines|hyphenate-limit-zone|hyphens|image-orientation|image-resolution|initial-letter|initial-letter-align|initial-letter-wrap|isolation|justify-content|justify-items|justify-self|left|letter-spacing|lighting-color|line-break|line-grid|line-height|line-snap|list-style|list-style-image|list-style-position|list-style-type|margin|margin-bottom|margin-left|margin-right|margin-top|marker-side|marquee-direction|marquee-loop|marquee-speed|marquee-style|mask|mask-border|mask-border-mode|mask-border-outset|mask-border-repeat|mask-border-slice|mask-border-source|mask-border-width|mask-clip|mask-composite|mask-image|mask-mode|mask-origin|mask-position|mask-repeat|mask-size|mask-type|max-height|max-lines|max-width|min-height|min-width|mix-blend-mode|motion|motion-offset|motion-path|motion-rotation|move-to|nav-down|nav-left|nav-right|nav-up|object-fit|object-position|offset-after|offset-before|offset-end|offset-start|opacity|order|orphans|outline|outline-color|outline-offset|outline-style|outline-width|overflow|overflow-style|overflow-wrap|overflow-x|overflow-y|padding|padding-bottom|padding-left|padding-right|padding-top|page|page-break-after|page-break-before|page-break-inside|page-policy|pause|pause-after|pause-before|perspective|perspective-origin|pitch|pitch-range|play-during|polar-angle|polar-distance|position|presentation-level|quotes|region-fragment|resize|rest|rest-after|rest-before|richness|right|rotation|rotation-point|ruby-align|ruby-merge|ruby-position|running|scroll-snap-coordinate|scroll-snap-destination|scroll-snap-points-x|scroll-snap-points-y|scroll-snap-type|shape-image-threshold|shape-inside|shape-margin|shape-outside|size|speak|speak-as|speak-header|speak-numeral|speak-punctuation|speech-rate|stress|string-set|tab-size|table-layout|text-align|text-align-last|text-combine-upright|text-decoration|text-decoration-color|text-decoration-line|text-decoration-skip|text-decoration-style|text-emphasis|text-emphasis-color|text-emphasis-position|text-emphasis-style|text-indent|text-justify|text-orientation|text-overflow|text-shadow|text-space-collapse|text-space-trim|text-spacing|text-transform|text-underline-position|text-wrap|top|transform|transform-origin|transform-style|transition|transition-delay|transition-duration|transition-property|transition-timing-function|unicode-bidi|user-select|vertical-align|visibility|voice-balance|voice-duration|voice-family|voice-pitch|voice-range|voice-rate|voice-stress|voice-volume|volume|white-space|widows|width|will-change|word-break|word-spacing|word-wrap|wrap-after|wrap-before|wrap-flow|wrap-inside|wrap-through|writing-mode|z-index)(\s*)(\:)`, ByGroups(Keyword, Text, Punctuation), Push("value-start")}, + {`(--[a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(NameVariable, Text, Punctuation), Push("value-start")}, + {`([a-zA-Z_][\w-]*)(\s*)(\:)`, ByGroups(Name, Text, Punctuation), Push("value-start")}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + }, + "value-start": { + Include("common-values"), + {Words(``, `\b`, `align-content`, `align-items`, `align-self`, `alignment-baseline`, `all`, `animation`, `animation-delay`, `animation-direction`, `animation-duration`, `animation-fill-mode`, `animation-iteration-count`, `animation-name`, `animation-play-state`, `animation-timing-function`, `appearance`, `azimuth`, `backface-visibility`, `background`, `background-attachment`, `background-blend-mode`, `background-clip`, `background-color`, `background-image`, `background-origin`, `background-position`, `background-repeat`, `background-size`, `baseline-shift`, `bookmark-label`, `bookmark-level`, `bookmark-state`, `border`, `border-bottom`, `border-bottom-color`, `border-bottom-left-radius`, `border-bottom-right-radius`, `border-bottom-style`, `border-bottom-width`, `border-boundary`, `border-collapse`, `border-color`, `border-image`, `border-image-outset`, `border-image-repeat`, `border-image-slice`, `border-image-source`, `border-image-width`, `border-left`, `border-left-color`, `border-left-style`, `border-left-width`, `border-radius`, `border-right`, `border-right-color`, `border-right-style`, `border-right-width`, `border-spacing`, `border-style`, `border-top`, `border-top-color`, `border-top-left-radius`, `border-top-right-radius`, `border-top-style`, `border-top-width`, `border-width`, `bottom`, `box-decoration-break`, `box-shadow`, `box-sizing`, `box-snap`, `box-suppress`, `break-after`, `break-before`, `break-inside`, `caption-side`, `caret`, `caret-animation`, `caret-color`, `caret-shape`, `chains`, `clear`, `clip`, `clip-path`, `clip-rule`, `color`, `color-interpolation-filters`, `column-count`, `column-fill`, `column-gap`, `column-rule`, `column-rule-color`, `column-rule-style`, `column-rule-width`, `column-span`, `column-width`, `columns`, `content`, `counter-increment`, `counter-reset`, `counter-set`, `crop`, `cue`, `cue-after`, `cue-before`, `cursor`, `direction`, `display`, `dominant-baseline`, `elevation`, `empty-cells`, `filter`, `flex`, `flex-basis`, `flex-direction`, `flex-flow`, `flex-grow`, `flex-shrink`, `flex-wrap`, `float`, `float-defer`, `float-offset`, `float-reference`, `flood-color`, `flood-opacity`, `flow`, `flow-from`, `flow-into`, `font`, `font-family`, `font-feature-settings`, `font-kerning`, `font-language-override`, `font-size`, `font-size-adjust`, `font-stretch`, `font-style`, `font-synthesis`, `font-variant`, `font-variant-alternates`, `font-variant-caps`, `font-variant-east-asian`, `font-variant-ligatures`, `font-variant-numeric`, `font-variant-position`, `font-weight`, `footnote-display`, `footnote-policy`, `glyph-orientation-vertical`, `grid`, `grid-area`, `grid-auto-columns`, `grid-auto-flow`, `grid-auto-rows`, `grid-column`, `grid-column-end`, `grid-column-gap`, `grid-column-start`, `grid-gap`, `grid-row`, `grid-row-end`, `grid-row-gap`, `grid-row-start`, `grid-template`, `grid-template-areas`, `grid-template-columns`, `grid-template-rows`, `hanging-punctuation`, `height`, `hyphenate-character`, `hyphenate-limit-chars`, `hyphenate-limit-last`, `hyphenate-limit-lines`, `hyphenate-limit-zone`, `hyphens`, `image-orientation`, `image-resolution`, `initial-letter`, `initial-letter-align`, `initial-letter-wrap`, `isolation`, `justify-content`, `justify-items`, `justify-self`, `left`, `letter-spacing`, `lighting-color`, `line-break`, `line-grid`, `line-height`, `line-snap`, `list-style`, `list-style-image`, `list-style-position`, `list-style-type`, `margin`, `margin-bottom`, `margin-left`, `margin-right`, `margin-top`, `marker-side`, `marquee-direction`, `marquee-loop`, `marquee-speed`, `marquee-style`, `mask`, `mask-border`, `mask-border-mode`, `mask-border-outset`, `mask-border-repeat`, `mask-border-slice`, `mask-border-source`, `mask-border-width`, `mask-clip`, `mask-composite`, `mask-image`, `mask-mode`, `mask-origin`, `mask-position`, `mask-repeat`, `mask-size`, `mask-type`, `max-height`, `max-lines`, `max-width`, `min-height`, `min-width`, `mix-blend-mode`, `motion`, `motion-offset`, `motion-path`, `motion-rotation`, `move-to`, `nav-down`, `nav-left`, `nav-right`, `nav-up`, `object-fit`, `object-position`, `offset-after`, `offset-before`, `offset-end`, `offset-start`, `opacity`, `order`, `orphans`, `outline`, `outline-color`, `outline-offset`, `outline-style`, `outline-width`, `overflow`, `overflow-style`, `overflow-wrap`, `overflow-x`, `overflow-y`, `padding`, `padding-bottom`, `padding-left`, `padding-right`, `padding-top`, `page`, `page-break-after`, `page-break-before`, `page-break-inside`, `page-policy`, `pause`, `pause-after`, `pause-before`, `perspective`, `perspective-origin`, `pitch`, `pitch-range`, `play-during`, `polar-angle`, `polar-distance`, `position`, `presentation-level`, `quotes`, `region-fragment`, `resize`, `rest`, `rest-after`, `rest-before`, `richness`, `right`, `rotation`, `rotation-point`, `ruby-align`, `ruby-merge`, `ruby-position`, `running`, `scroll-snap-coordinate`, `scroll-snap-destination`, `scroll-snap-points-x`, `scroll-snap-points-y`, `scroll-snap-type`, `shape-image-threshold`, `shape-inside`, `shape-margin`, `shape-outside`, `size`, `speak`, `speak-as`, `speak-header`, `speak-numeral`, `speak-punctuation`, `speech-rate`, `stress`, `string-set`, `tab-size`, `table-layout`, `text-align`, `text-align-last`, `text-combine-upright`, `text-decoration`, `text-decoration-color`, `text-decoration-line`, `text-decoration-skip`, `text-decoration-style`, `text-emphasis`, `text-emphasis-color`, `text-emphasis-position`, `text-emphasis-style`, `text-indent`, `text-justify`, `text-orientation`, `text-overflow`, `text-shadow`, `text-space-collapse`, `text-space-trim`, `text-spacing`, `text-transform`, `text-underline-position`, `text-wrap`, `top`, `transform`, `transform-origin`, `transform-style`, `transition`, `transition-delay`, `transition-duration`, `transition-property`, `transition-timing-function`, `unicode-bidi`, `user-select`, `vertical-align`, `visibility`, `voice-balance`, `voice-duration`, `voice-family`, `voice-pitch`, `voice-range`, `voice-rate`, `voice-stress`, `voice-volume`, `volume`, `white-space`, `widows`, `width`, `will-change`, `word-break`, `word-spacing`, `word-wrap`, `wrap-after`, `wrap-before`, `wrap-flow`, `wrap-inside`, `wrap-through`, `writing-mode`, `z-index`), Keyword, nil}, + {`\!important`, CommentPreproc, nil}, + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + Include("numeric-values"), + {`[~^*!%&<>|+=@:./?-]+`, Operator, nil}, + {`[\[\](),]+`, Punctuation, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_][\w-]*`, Name, nil}, + {`;`, Punctuation, Pop(1)}, + {`\}`, Punctuation, Pop(2)}, + }, + "function-start": { + Include("common-values"), + {`/\*(?:.|\n)*?\*/`, Comment, nil}, + Include("numeric-values"), + {`[*+/-]`, Operator, nil}, + {`[,]`, Punctuation, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_-]\w*`, Name, nil}, + {`\)`, Punctuation, Pop(1)}, + }, + "common-values": { + {`\s+`, Text, nil}, + {Words(``, ``, `-ms-`, `mso-`, `-moz-`, `-o-`, `-xv-`, `-atsc-`, `-wap-`, `-khtml-`, `-webkit-`, `prince-`, `-ah-`, `-hp-`, `-ro-`, `-rim-`, `-tc-`), KeywordPseudo, nil}, + Include("urls"), + {`(attr|blackness|blend|blenda|blur|brightness|calc|circle|color-mod|contrast|counter|cubic-bezier|device-cmyk|drop-shadow|ellipse|gray|grayscale|hsl|hsla|hue|hue-rotate|hwb|image|inset|invert|lightness|linear-gradient|matrix|matrix3d|opacity|perspective|polygon|radial-gradient|rect|repeating-linear-gradient|repeating-radial-gradient|rgb|rgba|rotate|rotate3d|rotateX|rotateY|rotateZ|saturate|saturation|scale|scale3d|scaleX|scaleY|scaleZ|sepia|shade|skewX|skewY|steps|tint|toggle|translate|translate3d|translateX|translateY|translateZ|whiteness)(\()`, ByGroups(NameBuiltin, Punctuation), Push("function-start")}, + {`([a-zA-Z_][\w-]+)(\()`, ByGroups(NameFunction, Punctuation), Push("function-start")}, + {Words(``, `\b`, `absolute`, `alias`, `all`, `all-petite-caps`, `all-scroll`, `all-small-caps`, `allow-end`, `alpha`, `alternate`, `alternate-reverse`, `always`, `armenian`, `auto`, `avoid`, `avoid-column`, `avoid-page`, `backwards`, `balance`, `baseline`, `below`, `blink`, `block`, `bold`, `bolder`, `border-box`, `both`, `bottom`, `box-decoration`, `break-word`, `capitalize`, `cell`, `center`, `circle`, `clip`, `clone`, `close-quote`, `col-resize`, `collapse`, `color`, `color-burn`, `color-dodge`, `column`, `column-reverse`, `compact`, `condensed`, `contain`, `container`, `content-box`, `context-menu`, `copy`, `cover`, `crisp-edges`, `crosshair`, `currentColor`, `cursive`, `darken`, `dashed`, `decimal`, `decimal-leading-zero`, `default`, `descendants`, `difference`, `digits`, `disc`, `distribute`, `dot`, `dotted`, `double`, `double-circle`, `e-resize`, `each-line`, `ease`, `ease-in`, `ease-in-out`, `ease-out`, `edges`, `ellipsis`, `end`, `ew-resize`, `exclusion`, `expanded`, `extra-condensed`, `extra-expanded`, `fantasy`, `fill`, `fill-box`, `filled`, `first`, `fixed`, `flat`, `flex`, `flex-end`, `flex-start`, `flip`, `force-end`, `forwards`, `from-image`, `full-width`, `geometricPrecision`, `georgian`, `groove`, `hanging`, `hard-light`, `help`, `hidden`, `hide`, `horizontal`, `hue`, `icon`, `infinite`, `inherit`, `initial`, `ink`, `inline`, `inline-block`, `inline-flex`, `inline-table`, `inset`, `inside`, `inter-word`, `invert`, `isolate`, `italic`, `justify`, `large`, `larger`, `last`, `left`, `lighten`, `lighter`, `line-through`, `linear`, `list-item`, `local`, `loose`, `lower-alpha`, `lower-greek`, `lower-latin`, `lower-roman`, `lowercase`, `ltr`, `luminance`, `luminosity`, `mandatory`, `manipulation`, `manual`, `margin-box`, `match-parent`, `medium`, `mixed`, `monospace`, `move`, `multiply`, `n-resize`, `ne-resize`, `nesw-resize`, `no-close-quote`, `no-drop`, `no-open-quote`, `no-repeat`, `none`, `normal`, `not-allowed`, `nowrap`, `ns-resize`, `nw-resize`, `nwse-resize`, `objects`, `oblique`, `off`, `on`, `open`, `open-quote`, `optimizeLegibility`, `optimizeSpeed`, `outset`, `outside`, `over`, `overlay`, `overline`, `padding-box`, `page`, `pan-down`, `pan-left`, `pan-right`, `pan-up`, `pan-x`, `pan-y`, `paused`, `petite-caps`, `pixelated`, `pointer`, `preserve-3d`, `progress`, `proximity`, `relative`, `repeat`, `repeat no-repeat`, `repeat-x`, `repeat-y`, `reverse`, `ridge`, `right`, `round`, `row`, `row-resize`, `row-reverse`, `rtl`, `ruby`, `ruby-base`, `ruby-base-container`, `ruby-text`, `ruby-text-container`, `run-in`, `running`, `s-resize`, `sans-serif`, `saturation`, `scale-down`, `screen`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `sesame`, `show`, `sideways`, `sideways-left`, `sideways-right`, `slice`, `small`, `small-caps`, `smaller`, `smooth`, `snap`, `soft-light`, `solid`, `space`, `space-around`, `space-between`, `spaces`, `square`, `start`, `static`, `step-end`, `step-start`, `sticky`, `stretch`, `strict`, `stroke-box`, `style`, `sw-resize`, `table`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `thick`, `thin`, `titling-caps`, `to`, `top`, `triangle`, `ultra-condensed`, `ultra-expanded`, `under`, `underline`, `unicase`, `unset`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `upright`, `use-glyph-orientation`, `vertical`, `vertical-text`, `view-box`, `visible`, `w-resize`, `wait`, `wavy`, `weight`, `weight style`, `wrap`, `wrap-reverse`, `x-large`, `x-small`, `xx-large`, `xx-small`, `zoom-in`, `zoom-out`), KeywordConstant, nil}, + {Words(``, `\b`, `above`, `aural`, `behind`, `bidi-override`, `center-left`, `center-right`, `cjk-ideographic`, `continuous`, `crop`, `cross`, `embed`, `far-left`, `far-right`, `fast`, `faster`, `hebrew`, `high`, `higher`, `hiragana`, `hiragana-iroha`, `katakana`, `katakana-iroha`, `landscape`, `left-side`, `leftwards`, `level`, `loud`, `low`, `lower`, `message-box`, `middle`, `mix`, `narrower`, `once`, `portrait`, `right-side`, `rightwards`, `silent`, `slow`, `slower`, `small-caption`, `soft`, `spell-out`, `status-bar`, `super`, `text-bottom`, `text-top`, `wider`, `x-fast`, `x-high`, `x-loud`, `x-low`, `x-soft`, `yes`, `pre`, `pre-wrap`, `pre-line`), KeywordConstant, nil}, + {Words(``, `\b`, `aliceblue`, `antiquewhite`, `aqua`, `aquamarine`, `azure`, `beige`, `bisque`, `black`, `blanchedalmond`, `blue`, `blueviolet`, `brown`, `burlywood`, `cadetblue`, `chartreuse`, `chocolate`, `coral`, `cornflowerblue`, `cornsilk`, `crimson`, `cyan`, `darkblue`, `darkcyan`, `darkgoldenrod`, `darkgray`, `darkgreen`, `darkgrey`, `darkkhaki`, `darkmagenta`, `darkolivegreen`, `darkorange`, `darkorchid`, `darkred`, `darksalmon`, `darkseagreen`, `darkslateblue`, `darkslategray`, `darkslategrey`, `darkturquoise`, `darkviolet`, `deeppink`, `deepskyblue`, `dimgray`, `dimgrey`, `dodgerblue`, `firebrick`, `floralwhite`, `forestgreen`, `fuchsia`, `gainsboro`, `ghostwhite`, `gold`, `goldenrod`, `gray`, `green`, `greenyellow`, `grey`, `honeydew`, `hotpink`, `indianred`, `indigo`, `ivory`, `khaki`, `lavender`, `lavenderblush`, `lawngreen`, `lemonchiffon`, `lightblue`, `lightcoral`, `lightcyan`, `lightgoldenrodyellow`, `lightgray`, `lightgreen`, `lightgrey`, `lightpink`, `lightsalmon`, `lightseagreen`, `lightskyblue`, `lightslategray`, `lightslategrey`, `lightsteelblue`, `lightyellow`, `lime`, `limegreen`, `linen`, `magenta`, `maroon`, `mediumaquamarine`, `mediumblue`, `mediumorchid`, `mediumpurple`, `mediumseagreen`, `mediumslateblue`, `mediumspringgreen`, `mediumturquoise`, `mediumvioletred`, `midnightblue`, `mintcream`, `mistyrose`, `moccasin`, `navajowhite`, `navy`, `oldlace`, `olive`, `olivedrab`, `orange`, `orangered`, `orchid`, `palegoldenrod`, `palegreen`, `paleturquoise`, `palevioletred`, `papayawhip`, `peachpuff`, `peru`, `pink`, `plum`, `powderblue`, `purple`, `rebeccapurple`, `red`, `rosybrown`, `royalblue`, `saddlebrown`, `salmon`, `sandybrown`, `seagreen`, `seashell`, `sienna`, `silver`, `skyblue`, `slateblue`, `slategray`, `slategrey`, `snow`, `springgreen`, `steelblue`, `tan`, `teal`, `thistle`, `tomato`, `turquoise`, `violet`, `wheat`, `white`, `whitesmoke`, `yellow`, `yellowgreen`, `transparent`), KeywordConstant, nil}, + }, + "urls": { + {`(url)(\()(".*?")(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringDouble, Punctuation), nil}, + {`(url)(\()('.*?')(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringSingle, Punctuation), nil}, + {`(url)(\()(.*?)(\))`, ByGroups(NameBuiltin, Punctuation, LiteralStringOther, Punctuation), nil}, + }, + "numeric-values": { + {`\#[a-zA-Z0-9]{1,6}`, LiteralNumberHex, nil}, + {`[+\-]?[0-9]*[.][0-9]+`, LiteralNumberFloat, Push("numeric-end")}, + {`[+\-]?[0-9]+`, LiteralNumberInteger, Push("numeric-end")}, + }, + "numeric-end": { + {Words(``, `\b`, `deg`, `grad`, `rad`, `turn`, `Hz`, `kHz`, `em`, `ex`, `ch`, `rem`, `vh`, `vw`, `vmin`, `vmax`, `px`, `mm`, `cm`, `in`, `pt`, `pc`, `q`, `dpi`, `dpcm`, `dppx`, `s`, `ms`), KeywordType, nil}, + {`%`, KeywordType, nil}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/c/cython.go b/vendor/github.com/alecthomas/chroma/lexers/c/cython.go new file mode 100644 index 00000000..701e2b79 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/c/cython.go @@ -0,0 +1,135 @@ +package c + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Cython lexer. +var Cython = internal.Register(MustNewLexer( + &Config{ + Name: "Cython", + Aliases: []string{"cython", "pyx", "pyrex"}, + Filenames: []string{"*.pyx", "*.pxd", "*.pxi"}, + MimeTypes: []string{"text/x-cython", "application/x-cython"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, + {`^(\s*)('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`#.*$`, Comment, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|is|and|or|not)\b`, OperatorWord, nil}, + {`(<)([a-zA-Z0-9.?]+)(>)`, ByGroups(Punctuation, KeywordType, Punctuation), nil}, + {`!=|==|<<|>>|[-~+/*%=<>&^|.?]`, Operator, nil}, + {`(from)(\d+)(<=)(\s+)(<)(\d+)(:)`, ByGroups(Keyword, LiteralNumberInteger, Operator, Name, Operator, Name, Punctuation), nil}, + Include("keywords"), + {`(def|property)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(cp?def)(\s+)`, ByGroups(Keyword, Text), Push("cdef")}, + {`(cdef)(:)`, ByGroups(Keyword, Punctuation), nil}, + {`(class|struct)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)(\s+)`, ByGroups(Keyword, Text), Push("fromimport")}, + {`(c?import)(\s+)`, ByGroups(Keyword, Text), Push("import")}, + Include("builtins"), + Include("backtick"), + {`(?:[rR]|[uU][rR]|[rR][uU])"""`, LiteralString, Push("tdqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])'''`, LiteralString, Push("tsqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])"`, LiteralString, Push("dqs")}, + {`(?:[rR]|[uU][rR]|[rR][uU])'`, LiteralString, Push("sqs")}, + {`[uU]?"""`, LiteralString, Combined("stringescape", "tdqs")}, + {`[uU]?'''`, LiteralString, Combined("stringescape", "tsqs")}, + {`[uU]?"`, LiteralString, Combined("stringescape", "dqs")}, + {`[uU]?'`, LiteralString, Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, `assert`, `break`, `by`, `continue`, `ctypedef`, `del`, `elif`, `else`, `except`, `except?`, `exec`, `finally`, `for`, `fused`, `gil`, `global`, `if`, `include`, `lambda`, `nogil`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `as`, `with`), Keyword, nil}, + {`(DEF|IF|ELIF|ELSE)\b`, CommentPreproc, nil}, + }, + "builtins": { + {Words(`(?`, CommentPreproc, Pop(1)}, + {`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil}, + {`\s+`, Text, nil}, + {`#.*?\n`, CommentSingle, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*\*/`, CommentMultiline, nil}, + {`/\*\*.*?\*/`, LiteralStringDoc, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil}, + {`[~!%^&*+=|:.<>/@-]+`, Operator, nil}, + {`\?`, Operator, nil}, + {`[\[\]{}();,]+`, Punctuation, nil}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil}, + {`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")}, + {`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil}, + {`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + Include("magicconstants"), + {`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil}, + {`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil}, + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil}, + {`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0x[a-f0-9]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil}, + {"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil}, + {`"`, LiteralStringDouble, Push("string")}, + }, + "magicfuncs": { + {Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil}, + }, + "magicconstants": { + {Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil}, + }, + "classname": { + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)}, + }, + "functionname": { + Include("magicfuncs"), + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)}, + Default(Pop(1)), + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^{$"\\]+`, LiteralStringDouble, nil}, + {`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil}, + {`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil}, + {`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + {`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + {`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil}, + {`[${\\]`, LiteralStringDouble, nil}, + }, + }, +).SetAnalyser(func(text string) float32 { + if strings.Contains(text, "/-]|as\b`, Operator, nil}, + {`[a-zA-Z_$]\w*:`, NameLabel, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`[(){}\[\],.;]`, Punctuation, nil}, + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\d+(\.\d*)?([eE][+-]?\d+)?`, LiteralNumber, nil}, + {`\.\d+([eE][+-]?\d+)?`, LiteralNumber, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[a-zA-Z_$]\w*`, NameClass, Pop(1)}, + }, + "import_decl": { + Include("string_literal"), + {`\s+`, Text, nil}, + {`\b(as|show|hide)\b`, Keyword, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`\,`, Punctuation, nil}, + {`\;`, Punctuation, Pop(1)}, + }, + "string_literal": { + {`r"""([\w\W]*?)"""`, LiteralStringDouble, nil}, + {`r'''([\w\W]*?)'''`, LiteralStringSingle, nil}, + {`r"(.*?)"`, LiteralStringDouble, nil}, + {`r'(.*?)'`, LiteralStringSingle, nil}, + {`"""`, LiteralStringDouble, Push("string_double_multiline")}, + {`'''`, LiteralStringSingle, Push("string_single_multiline")}, + {`"`, LiteralStringDouble, Push("string_double")}, + {`'`, LiteralStringSingle, Push("string_single")}, + }, + "string_common": { + {`\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])`, LiteralStringEscape, nil}, + {`(\$)([a-zA-Z_]\w*)`, ByGroups(LiteralStringInterpol, Name), nil}, + {`(\$\{)(.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil}, + }, + "string_double": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^"$\\\n]+`, LiteralStringDouble, nil}, + Include("string_common"), + {`\$+`, LiteralStringDouble, nil}, + }, + "string_double_multiline": { + {`"""`, LiteralStringDouble, Pop(1)}, + {`[^"$\\]+`, LiteralStringDouble, nil}, + Include("string_common"), + {`(\$|\")+`, LiteralStringDouble, nil}, + }, + "string_single": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^'$\\\n]+`, LiteralStringSingle, nil}, + Include("string_common"), + {`\$+`, LiteralStringSingle, nil}, + }, + "string_single_multiline": { + {`'''`, LiteralStringSingle, Pop(1)}, + {`[^\'$\\]+`, LiteralStringSingle, nil}, + Include("string_common"), + {`(\$|\')+`, LiteralStringSingle, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/diff.go b/vendor/github.com/alecthomas/chroma/lexers/d/diff.go new file mode 100644 index 00000000..264ed45c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/diff.go @@ -0,0 +1,29 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Diff lexer. +var Diff = internal.Register(MustNewLexer( + &Config{ + Name: "Diff", + Aliases: []string{"diff", "udiff"}, + EnsureNL: true, + Filenames: []string{"*.diff", "*.patch"}, + MimeTypes: []string{"text/x-diff", "text/x-patch"}, + }, + Rules{ + "root": { + {` .*\n`, Text, nil}, + {`\+.*\n`, GenericInserted, nil}, + {`-.*\n`, GenericDeleted, nil}, + {`!.*\n`, GenericStrong, nil}, + {`@.*\n`, GenericSubheading, nil}, + {`([Ii]ndex|diff).*\n`, GenericHeading, nil}, + {`=.*\n`, GenericHeading, nil}, + {`.*\n`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/django.go b/vendor/github.com/alecthomas/chroma/lexers/d/django.go new file mode 100644 index 00000000..d72d99a3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/django.go @@ -0,0 +1,53 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Django/Jinja lexer. +var DjangoJinja = internal.Register(MustNewLexer( + &Config{ + Name: "Django/Jinja", + Aliases: []string{"django", "jinja"}, + Filenames: []string{}, + MimeTypes: []string{"application/x-django-templating", "application/x-jinja"}, + DotAll: true, + }, + Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{`, CommentPreproc, Push("var")}, + {`\{[*#].*?[*#]\}`, Comment, nil}, + {`(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Comment, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Text, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("block")}, + {`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("block")}, + {`\{`, Other, nil}, + }, + "varnames": { + {`(\|)(\s*)([a-zA-Z_]\w*)`, ByGroups(Operator, Text, NameFunction), nil}, + {`(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(_|true|false|none|True|False|None)\b`, KeywordPseudo, nil}, + {`(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b`, Keyword, nil}, + {`(loop|block|super|forloop)\b`, NameBuiltin, nil}, + {`[a-zA-Z_][\w-]*`, NameVariable, nil}, + {`\.\w+`, NameVariable, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`([{}()\[\]+\-*/,:~]|[><=]=?)`, Operator, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + "var": { + {`\s+`, Text, nil}, + {`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + }, + "block": { + {`\s+`, Text, nil}, + {`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + {`.`, Punctuation, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/docker.go b/vendor/github.com/alecthomas/chroma/lexers/d/docker.go new file mode 100644 index 00000000..412646bc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/docker.go @@ -0,0 +1,27 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/b" + "github.com/alecthomas/chroma/lexers/internal" +) + +// Docker lexer. +var Docker = internal.Register(MustNewLexer( + &Config{ + Name: "Docker", + Aliases: []string{"docker", "dockerfile"}, + Filenames: []string{"Dockerfile", "*.docker"}, + MimeTypes: []string{"text/x-dockerfile-config"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`^(ONBUILD)(\s+)((?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|VOLUME|WORKDIR))\b`, ByGroups(NameKeyword, TextWhitespace, Keyword), nil}, + {`^((?:FROM|MAINTAINER|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|VOLUME|WORKDIR))\b(.*)`, ByGroups(Keyword, LiteralString), nil}, + {`#.*`, Comment, nil}, + {`RUN`, Keyword, nil}, + {`(.*\\\n)*.+`, Using(Bash), nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go b/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go new file mode 100644 index 00000000..99bf5d37 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/d/dtd.go @@ -0,0 +1,69 @@ +package d + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Dtd lexer. +var Dtd = internal.Register(MustNewLexer( + &Config{ + Name: "DTD", + Aliases: []string{"dtd"}, + Filenames: []string{"*.dtd"}, + MimeTypes: []string{"application/xml-dtd"}, + DotAll: true, + }, + Rules{ + "root": { + Include("common"), + {`(\s]+)`, ByGroups(Keyword, Text, NameTag), nil}, + {`PUBLIC|SYSTEM`, KeywordConstant, nil}, + {`[\[\]>]`, Keyword, nil}, + }, + "common": { + {`\s+`, Text, nil}, + {`(%|&)[^;]*;`, NameEntity, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "element": { + Include("common"), + {`EMPTY|ANY|#PCDATA`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameTag, nil}, + {`>`, Keyword, Pop(1)}, + }, + "attlist": { + Include("common"), + {`CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION`, KeywordConstant, nil}, + {`#REQUIRED|#IMPLIED|#FIXED`, KeywordConstant, nil}, + {`xml:space|xml:lang`, KeywordReserved, nil}, + {`[^>\s|()?+*,]+`, NameAttribute, nil}, + {`>`, Keyword, Pop(1)}, + }, + "entity": { + Include("common"), + {`SYSTEM|PUBLIC|NDATA`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameEntity, nil}, + {`>`, Keyword, Pop(1)}, + }, + "notation": { + Include("common"), + {`SYSTEM|PUBLIC`, KeywordConstant, nil}, + {`[^>\s|()?+*,]+`, NameAttribute, nil}, + {`>`, Keyword, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go b/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go new file mode 100644 index 00000000..42a3a379 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/ebnf.go @@ -0,0 +1,51 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ebnf lexer. +var Ebnf = internal.Register(MustNewLexer( + &Config{ + Name: "EBNF", + Aliases: []string{"ebnf"}, + Filenames: []string{"*.ebnf"}, + MimeTypes: []string{"text/x-ebnf"}, + }, + Rules{ + "root": { + Include("whitespace"), + Include("comment_start"), + Include("identifier"), + {`=`, Operator, Push("production")}, + }, + "production": { + Include("whitespace"), + Include("comment_start"), + Include("identifier"), + {`"[^"]*"`, LiteralStringDouble, nil}, + {`'[^']*'`, LiteralStringSingle, nil}, + {`(\?[^?]*\?)`, NameEntity, nil}, + {`[\[\]{}(),|]`, Punctuation, nil}, + {`-`, Operator, nil}, + {`;`, Punctuation, Pop(1)}, + {`\.`, Punctuation, Pop(1)}, + }, + "whitespace": { + {`\s+`, Text, nil}, + }, + "comment_start": { + {`\(\*`, CommentMultiline, Push("comment")}, + }, + "comment": { + {`[^*)]`, CommentMultiline, nil}, + Include("comment_start"), + {`\*\)`, CommentMultiline, Pop(1)}, + {`[*)]`, CommentMultiline, nil}, + }, + "identifier": { + {`([a-zA-Z][\w \-]*)`, Keyword, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go b/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go new file mode 100644 index 00000000..23fa8af6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/elixir.go @@ -0,0 +1,270 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Elixir lexer. +var Elixir = internal.Register(MustNewLexer( + &Config{ + Name: "Elixir", + Aliases: []string{"elixir", "ex", "exs"}, + Filenames: []string{"*.ex", "*.exs"}, + MimeTypes: []string{"text/x-elixir"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`#.*$`, CommentSingle, nil}, + {`(\?)(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringChar, LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, + {`(\?)(\\x[\da-fA-F]{1,2})`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, + {`(\?)(\\[abdefnrstv])`, ByGroups(LiteralStringChar, LiteralStringEscape), nil}, + {`\?\\?.`, LiteralStringChar, nil}, + {`:::`, LiteralStringSymbol, nil}, + {`::`, Operator, nil}, + {`:(?:\.\.\.|<<>>|%\{\}|%|\{\})`, LiteralStringSymbol, nil}, + {`:(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&))`, LiteralStringSymbol, nil}, + {`:"`, LiteralStringSymbol, Push("string_double_atom")}, + {`:'`, LiteralStringSymbol, Push("string_single_atom")}, + {`((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`@(?:\.\.\.|[a-z_]\w*[!?]?)`, NameAttribute, nil}, + {`(?:\.\.\.|[a-z_]\w*[!?]?)`, Name, nil}, + {`(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)`, ByGroups(Punctuation, NameClass), nil}, + {`\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>`, Operator, nil}, + {`\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~`, Operator, nil}, + {`\\\\|\<\<|\>\>|\=\>|\(|\)|\:|\;|\,|\[|\]`, Punctuation, nil}, + {`&\d`, NameEntity, nil}, + {`\<|\>|\+|\-|\*|\/|\!|\^|\&`, Operator, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`0o[0-7]+`, LiteralNumberOct, nil}, + {`0x[\da-fA-F]+`, LiteralNumberHex, nil}, + {`\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?`, LiteralNumberFloat, nil}, + {`\d(_?\d)*`, LiteralNumberInteger, nil}, + {`"""\s*`, LiteralStringHeredoc, Push("heredoc_double")}, + {`'''\s*$`, LiteralStringHeredoc, Push("heredoc_single")}, + {`"`, LiteralStringDouble, Push("string_double")}, + {`'`, LiteralStringSingle, Push("string_single")}, + Include("sigils"), + {`%\{`, Punctuation, Push("map_key")}, + {`\{`, Punctuation, Push("tuple")}, + }, + "heredoc_double": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "heredoc_single": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "heredoc_interpol": { + {`[^#\\\n]+`, LiteralStringHeredoc, nil}, + Include("escapes"), + {`\\.`, LiteralStringHeredoc, nil}, + {`\n+`, LiteralStringHeredoc, nil}, + Include("interpol"), + }, + "heredoc_no_interpol": { + {`[^\\\n]+`, LiteralStringHeredoc, nil}, + {`\\.`, LiteralStringHeredoc, nil}, + {`\n+`, LiteralStringHeredoc, nil}, + }, + "escapes": { + {`(\\x\{)([\da-fA-F]+)(\})`, ByGroups(LiteralStringEscape, LiteralNumberHex, LiteralStringEscape), nil}, + {`(\\x[\da-fA-F]{1,2})`, LiteralStringEscape, nil}, + {`(\\[abdefnrstv])`, LiteralStringEscape, nil}, + }, + "interpol": { + {`#\{`, LiteralStringInterpol, Push("interpol_string")}, + }, + "interpol_string": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "map_key": { + Include("root"), + {`:`, Punctuation, Push("map_val")}, + {`=>`, Punctuation, Push("map_val")}, + {`\}`, Punctuation, Pop(1)}, + }, + "map_val": { + Include("root"), + {`,`, Punctuation, Pop(1)}, + {`(?=\})`, Punctuation, Pop(1)}, + }, + "tuple": { + Include("root"), + {`\}`, Punctuation, Pop(1)}, + }, + "string_double": { + {`[^#"\\]+`, LiteralStringDouble, nil}, + Include("escapes"), + {`\\.`, LiteralStringDouble, nil}, + {`(")`, ByGroups(LiteralStringDouble), Pop(1)}, + Include("interpol"), + }, + "string_single": { + {`[^#'\\]+`, LiteralStringSingle, nil}, + Include("escapes"), + {`\\.`, LiteralStringSingle, nil}, + {`(')`, ByGroups(LiteralStringSingle), Pop(1)}, + Include("interpol"), + }, + "string_double_atom": { + {`[^#"\\]+`, LiteralStringSymbol, nil}, + Include("escapes"), + {`\\.`, LiteralStringSymbol, nil}, + {`(")`, ByGroups(LiteralStringSymbol), Pop(1)}, + Include("interpol"), + }, + "string_single_atom": { + {`[^#'\\]+`, LiteralStringSymbol, nil}, + Include("escapes"), + {`\\.`, LiteralStringSymbol, nil}, + {`(')`, ByGroups(LiteralStringSymbol), Pop(1)}, + Include("interpol"), + }, + "sigils": { + {`(~[a-z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-intp")}, + {`(~[A-Z])(""")`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triquot-end", "triquot-no-intp")}, + {`(~[a-z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-intp")}, + {`(~[A-Z])(''')`, ByGroups(LiteralStringOther, LiteralStringHeredoc), Push("triapos-end", "triapos-no-intp")}, + {`~[a-z]\{`, LiteralStringOther, Push("cb-intp")}, + {`~[A-Z]\{`, LiteralStringOther, Push("cb-no-intp")}, + {`~[a-z]\[`, LiteralStringOther, Push("sb-intp")}, + {`~[A-Z]\[`, LiteralStringOther, Push("sb-no-intp")}, + {`~[a-z]\(`, LiteralStringOther, Push("pa-intp")}, + {`~[A-Z]\(`, LiteralStringOther, Push("pa-no-intp")}, + {`~[a-z]<`, LiteralStringOther, Push("ab-intp")}, + {`~[A-Z]<`, LiteralStringOther, Push("ab-no-intp")}, + {`~[a-z]/`, LiteralStringOther, Push("slas-intp")}, + {`~[A-Z]/`, LiteralStringOther, Push("slas-no-intp")}, + {`~[a-z]\|`, LiteralStringOther, Push("pipe-intp")}, + {`~[A-Z]\|`, LiteralStringOther, Push("pipe-no-intp")}, + {`~[a-z]"`, LiteralStringOther, Push("quot-intp")}, + {`~[A-Z]"`, LiteralStringOther, Push("quot-no-intp")}, + {`~[a-z]'`, LiteralStringOther, Push("apos-intp")}, + {`~[A-Z]'`, LiteralStringOther, Push("apos-no-intp")}, + }, + "triquot-end": { + {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, + Default(Pop(1)), + }, + "triquot-intp": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "triquot-no-intp": { + {`^\s*"""`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_no_interpol"), + }, + "triapos-end": { + {`[a-zA-Z]+`, LiteralStringOther, Pop(1)}, + Default(Pop(1)), + }, + "triapos-intp": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_interpol"), + }, + "triapos-no-intp": { + {`^\s*'''`, LiteralStringHeredoc, Pop(1)}, + Include("heredoc_no_interpol"), + }, + "cb-intp": { + {`[^#\}\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "cb-no-intp": { + {`[^\}\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\}[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "sb-intp": { + {`[^#\]\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "sb-no-intp": { + {`[^\]\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\][a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "pa-intp": { + {`[^#\)\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "pa-no-intp": { + {`[^\)\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\)[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "ab-intp": { + {`[^#>\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "ab-no-intp": { + {`[^>\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`>[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "slas-intp": { + {`[^#/\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "slas-no-intp": { + {`[^/\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`/[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "pipe-intp": { + {`[^#\|\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "pipe-no-intp": { + {`[^\|\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`\|[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "quot-intp": { + {`[^#"\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "quot-no-intp": { + {`[^"\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`"[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + "apos-intp": { + {`[^#'\\]+`, LiteralStringOther, nil}, + Include("escapes"), + {`\\.`, LiteralStringOther, nil}, + {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + Include("interpol"), + }, + "apos-no-intp": { + {`[^'\\]+`, LiteralStringOther, nil}, + {`\\.`, LiteralStringOther, nil}, + {`'[a-zA-Z]*`, LiteralStringOther, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/elm.go b/vendor/github.com/alecthomas/chroma/lexers/e/elm.go new file mode 100644 index 00000000..a71c6270 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/elm.go @@ -0,0 +1,59 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Elm lexer. +var Elm = internal.Register(MustNewLexer( + &Config{ + Name: "Elm", + Aliases: []string{"elm"}, + Filenames: []string{"*.elm"}, + MimeTypes: []string{"text/x-elm"}, + }, + Rules{ + "root": { + {`\{-`, CommentMultiline, Push("comment")}, + {`--.*`, CommentSingle, nil}, + {`\s+`, Text, nil}, + {`"`, LiteralString, Push("doublequote")}, + {`^\s*module\s*`, KeywordNamespace, Push("imports")}, + {`^\s*import\s*`, KeywordNamespace, Push("imports")}, + {`\[glsl\|.*`, NameEntity, Push("shader")}, + {Words(``, `\b`, `alias`, `as`, `case`, `else`, `if`, `import`, `in`, `let`, `module`, `of`, `port`, `then`, `type`, `where`), KeywordReserved, nil}, + {`[A-Z]\w*`, KeywordType, nil}, + {`^main `, KeywordReserved, nil}, + {Words(`\(`, `\)`, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, + {Words(``, ``, `~`, `||`, `|>`, `|`, "`", `^`, `\`, `'`, `>>`, `>=`, `>`, `==`, `=`, `<~`, `<|`, `<=`, `<<`, `<-`, `<`, `::`, `:`, `/=`, `//`, `/`, `..`, `.`, `->`, `-`, `++`, `+`, `*`, `&&`, `%`), NameFunction, nil}, + Include("numbers"), + {`[a-z_][a-zA-Z_\']*`, NameVariable, nil}, + {`[,()\[\]{}]`, Punctuation, nil}, + }, + "comment": { + {`-(?!\})`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`[^-}]`, CommentMultiline, nil}, + {`-\}`, CommentMultiline, Pop(1)}, + }, + "doublequote": { + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\[nrfvb\\"]`, LiteralStringEscape, nil}, + {`[^"]`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + }, + "imports": { + {`\w+(\.\w+)*`, NameClass, Pop(1)}, + }, + "numbers": { + {`_?\d+\.(?=\d+)`, LiteralNumberFloat, nil}, + {`_?\d+`, LiteralNumberInteger, nil}, + }, + "shader": { + {`\|(?!\])`, NameEntity, nil}, + {`\|\]`, NameEntity, Pop(1)}, + {`.*\n`, NameEntity, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go b/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go new file mode 100644 index 00000000..78ffda12 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/emacs.go @@ -0,0 +1,582 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var ( + emacsMacros = []string{ + "atomic-change-group", "case", "block", "cl-block", "cl-callf", "cl-callf2", + "cl-case", "cl-decf", "cl-declaim", "cl-declare", + "cl-define-compiler-macro", "cl-defmacro", "cl-defstruct", + "cl-defsubst", "cl-deftype", "cl-defun", "cl-destructuring-bind", + "cl-do", "cl-do*", "cl-do-all-symbols", "cl-do-symbols", "cl-dolist", + "cl-dotimes", "cl-ecase", "cl-etypecase", "eval-when", "cl-eval-when", "cl-flet", + "cl-flet*", "cl-function", "cl-incf", "cl-labels", "cl-letf", + "cl-letf*", "cl-load-time-value", "cl-locally", "cl-loop", + "cl-macrolet", "cl-multiple-value-bind", "cl-multiple-value-setq", + "cl-progv", "cl-psetf", "cl-psetq", "cl-pushnew", "cl-remf", + "cl-return", "cl-return-from", "cl-rotatef", "cl-shiftf", + "cl-symbol-macrolet", "cl-tagbody", "cl-the", "cl-typecase", + "combine-after-change-calls", "condition-case-unless-debug", "decf", + "declaim", "declare", "declare-function", "def-edebug-spec", + "defadvice", "defclass", "defcustom", "defface", "defgeneric", + "defgroup", "define-advice", "define-alternatives", + "define-compiler-macro", "define-derived-mode", "define-generic-mode", + "define-global-minor-mode", "define-globalized-minor-mode", + "define-minor-mode", "define-modify-macro", + "define-obsolete-face-alias", "define-obsolete-function-alias", + "define-obsolete-variable-alias", "define-setf-expander", + "define-skeleton", "defmacro", "defmethod", "defsetf", "defstruct", + "defsubst", "deftheme", "deftype", "defun", "defvar-local", + "delay-mode-hooks", "destructuring-bind", "do", "do*", + "do-all-symbols", "do-symbols", "dolist", "dont-compile", "dotimes", + "dotimes-with-progress-reporter", "ecase", "ert-deftest", "etypecase", + "eval-and-compile", "eval-when-compile", "flet", "ignore-errors", + "incf", "labels", "lambda", "letrec", "lexical-let", "lexical-let*", + "loop", "multiple-value-bind", "multiple-value-setq", "noreturn", + "oref", "oref-default", "oset", "oset-default", "pcase", + "pcase-defmacro", "pcase-dolist", "pcase-exhaustive", "pcase-let", + "pcase-let*", "pop", "psetf", "psetq", "push", "pushnew", "remf", + "return", "rotatef", "rx", "save-match-data", "save-selected-window", + "save-window-excursion", "setf", "setq-local", "shiftf", + "track-mouse", "typecase", "unless", "use-package", "when", + "while-no-input", "with-case-table", "with-category-table", + "with-coding-priority", "with-current-buffer", "with-demoted-errors", + "with-eval-after-load", "with-file-modes", "with-local-quit", + "with-output-to-string", "with-output-to-temp-buffer", + "with-parsed-tramp-file-name", "with-selected-frame", + "with-selected-window", "with-silent-modifications", "with-slots", + "with-syntax-table", "with-temp-buffer", "with-temp-file", + "with-temp-message", "with-timeout", "with-tramp-connection-property", + "with-tramp-file-property", "with-tramp-progress-reporter", + "with-wrapper-hook", "load-time-value", "locally", "macrolet", "progv", + "return-from", + } + + emacsSpecialForms = []string{ + "and", "catch", "cond", "condition-case", "defconst", "defvar", + "function", "if", "interactive", "let", "let*", "or", "prog1", + "prog2", "progn", "quote", "save-current-buffer", "save-excursion", + "save-restriction", "setq", "setq-default", "subr-arity", + "unwind-protect", "while", + } + + emacsBuiltinFunction = []string{ + "%", "*", "+", "-", "/", "/=", "1+", "1-", "<", "<=", "=", ">", ">=", + "Snarf-documentation", "abort-recursive-edit", "abs", + "accept-process-output", "access-file", "accessible-keymaps", "acos", + "active-minibuffer-window", "add-face-text-property", + "add-name-to-file", "add-text-properties", "all-completions", + "append", "apply", "apropos-internal", "aref", "arrayp", "aset", + "ash", "asin", "assoc", "assoc-string", "assq", "atan", "atom", + "autoload", "autoload-do-load", "backtrace", "backtrace--locals", + "backtrace-debug", "backtrace-eval", "backtrace-frame", + "backward-char", "backward-prefix-chars", "barf-if-buffer-read-only", + "base64-decode-region", "base64-decode-string", + "base64-encode-region", "base64-encode-string", "beginning-of-line", + "bidi-find-overridden-directionality", "bidi-resolved-levels", + "bitmap-spec-p", "bobp", "bolp", "bool-vector", + "bool-vector-count-consecutive", "bool-vector-count-population", + "bool-vector-exclusive-or", "bool-vector-intersection", + "bool-vector-not", "bool-vector-p", "bool-vector-set-difference", + "bool-vector-subsetp", "bool-vector-union", "boundp", + "buffer-base-buffer", "buffer-chars-modified-tick", + "buffer-enable-undo", "buffer-file-name", "buffer-has-markers-at", + "buffer-list", "buffer-live-p", "buffer-local-value", + "buffer-local-variables", "buffer-modified-p", "buffer-modified-tick", + "buffer-name", "buffer-size", "buffer-string", "buffer-substring", + "buffer-substring-no-properties", "buffer-swap-text", "bufferp", + "bury-buffer-internal", "byte-code", "byte-code-function-p", + "byte-to-position", "byte-to-string", "byteorder", + "call-interactively", "call-last-kbd-macro", "call-process", + "call-process-region", "cancel-kbd-macro-events", "capitalize", + "capitalize-region", "capitalize-word", "car", "car-less-than-car", + "car-safe", "case-table-p", "category-docstring", + "category-set-mnemonics", "category-table", "category-table-p", + "ccl-execute", "ccl-execute-on-string", "ccl-program-p", "cdr", + "cdr-safe", "ceiling", "char-after", "char-before", + "char-category-set", "char-charset", "char-equal", "char-or-string-p", + "char-resolve-modifiers", "char-syntax", "char-table-extra-slot", + "char-table-p", "char-table-parent", "char-table-range", + "char-table-subtype", "char-to-string", "char-width", "characterp", + "charset-after", "charset-id-internal", "charset-plist", + "charset-priority-list", "charsetp", "check-coding-system", + "check-coding-systems-region", "clear-buffer-auto-save-failure", + "clear-charset-maps", "clear-face-cache", "clear-font-cache", + "clear-image-cache", "clear-string", "clear-this-command-keys", + "close-font", "clrhash", "coding-system-aliases", + "coding-system-base", "coding-system-eol-type", "coding-system-p", + "coding-system-plist", "coding-system-priority-list", + "coding-system-put", "color-distance", "color-gray-p", + "color-supported-p", "combine-after-change-execute", + "command-error-default-function", "command-remapping", "commandp", + "compare-buffer-substrings", "compare-strings", + "compare-window-configurations", "completing-read", + "compose-region-internal", "compose-string-internal", + "composition-get-gstring", "compute-motion", "concat", "cons", + "consp", "constrain-to-field", "continue-process", + "controlling-tty-p", "coordinates-in-window-p", "copy-alist", + "copy-category-table", "copy-file", "copy-hash-table", "copy-keymap", + "copy-marker", "copy-sequence", "copy-syntax-table", "copysign", + "cos", "current-active-maps", "current-bidi-paragraph-direction", + "current-buffer", "current-case-table", "current-column", + "current-global-map", "current-idle-time", "current-indentation", + "current-input-mode", "current-local-map", "current-message", + "current-minor-mode-maps", "current-time", "current-time-string", + "current-time-zone", "current-window-configuration", + "cygwin-convert-file-name-from-windows", + "cygwin-convert-file-name-to-windows", "daemon-initialized", + "daemonp", "dbus--init-bus", "dbus-get-unique-name", + "dbus-message-internal", "debug-timer-check", "declare-equiv-charset", + "decode-big5-char", "decode-char", "decode-coding-region", + "decode-coding-string", "decode-sjis-char", "decode-time", + "default-boundp", "default-file-modes", "default-printer-name", + "default-toplevel-value", "default-value", "define-category", + "define-charset-alias", "define-charset-internal", + "define-coding-system-alias", "define-coding-system-internal", + "define-fringe-bitmap", "define-hash-table-test", "define-key", + "define-prefix-command", "delete", + "delete-all-overlays", "delete-and-extract-region", "delete-char", + "delete-directory-internal", "delete-field", "delete-file", + "delete-frame", "delete-other-windows-internal", "delete-overlay", + "delete-process", "delete-region", "delete-terminal", + "delete-window-internal", "delq", "describe-buffer-bindings", + "describe-vector", "destroy-fringe-bitmap", "detect-coding-region", + "detect-coding-string", "ding", "directory-file-name", + "directory-files", "directory-files-and-attributes", "discard-input", + "display-supports-face-attributes-p", "do-auto-save", "documentation", + "documentation-property", "downcase", "downcase-region", + "downcase-word", "draw-string", "dump-colors", "dump-emacs", + "dump-face", "dump-frame-glyph-matrix", "dump-glyph-matrix", + "dump-glyph-row", "dump-redisplay-history", "dump-tool-bar-row", + "elt", "emacs-pid", "encode-big5-char", "encode-char", + "encode-coding-region", "encode-coding-string", "encode-sjis-char", + "encode-time", "end-kbd-macro", "end-of-line", "eobp", "eolp", "eq", + "eql", "equal", "equal-including-properties", "erase-buffer", + "error-message-string", "eval", "eval-buffer", "eval-region", + "event-convert-list", "execute-kbd-macro", "exit-recursive-edit", + "exp", "expand-file-name", "expt", "external-debugging-output", + "face-attribute-relative-p", "face-attributes-as-vector", "face-font", + "fboundp", "fceiling", "fetch-bytecode", "ffloor", + "field-beginning", "field-end", "field-string", + "field-string-no-properties", "file-accessible-directory-p", + "file-acl", "file-attributes", "file-attributes-lessp", + "file-directory-p", "file-executable-p", "file-exists-p", + "file-locked-p", "file-modes", "file-name-absolute-p", + "file-name-all-completions", "file-name-as-directory", + "file-name-completion", "file-name-directory", + "file-name-nondirectory", "file-newer-than-file-p", "file-readable-p", + "file-regular-p", "file-selinux-context", "file-symlink-p", + "file-system-info", "file-system-info", "file-writable-p", + "fillarray", "find-charset-region", "find-charset-string", + "find-coding-systems-region-internal", "find-composition-internal", + "find-file-name-handler", "find-font", "find-operation-coding-system", + "float", "float-time", "floatp", "floor", "fmakunbound", + "following-char", "font-at", "font-drive-otf", "font-face-attributes", + "font-family-list", "font-get", "font-get-glyphs", + "font-get-system-font", "font-get-system-normal-font", "font-info", + "font-match-p", "font-otf-alternates", "font-put", + "font-shape-gstring", "font-spec", "font-variation-glyphs", + "font-xlfd-name", "fontp", "fontset-font", "fontset-info", + "fontset-list", "fontset-list-all", "force-mode-line-update", + "force-window-update", "format", "format-mode-line", + "format-network-address", "format-time-string", "forward-char", + "forward-comment", "forward-line", "forward-word", + "frame-border-width", "frame-bottom-divider-width", + "frame-can-run-window-configuration-change-hook", "frame-char-height", + "frame-char-width", "frame-face-alist", "frame-first-window", + "frame-focus", "frame-font-cache", "frame-fringe-width", "frame-list", + "frame-live-p", "frame-or-buffer-changed-p", "frame-parameter", + "frame-parameters", "frame-pixel-height", "frame-pixel-width", + "frame-pointer-visible-p", "frame-right-divider-width", + "frame-root-window", "frame-scroll-bar-height", + "frame-scroll-bar-width", "frame-selected-window", "frame-terminal", + "frame-text-cols", "frame-text-height", "frame-text-lines", + "frame-text-width", "frame-total-cols", "frame-total-lines", + "frame-visible-p", "framep", "frexp", "fringe-bitmaps-at-pos", + "fround", "fset", "ftruncate", "funcall", "funcall-interactively", + "function-equal", "functionp", "gap-position", "gap-size", + "garbage-collect", "gc-status", "generate-new-buffer-name", "get", + "get-buffer", "get-buffer-create", "get-buffer-process", + "get-buffer-window", "get-byte", "get-char-property", + "get-char-property-and-overlay", "get-file-buffer", "get-file-char", + "get-internal-run-time", "get-load-suffixes", "get-pos-property", + "get-process", "get-screen-color", "get-text-property", + "get-unicode-property-internal", "get-unused-category", + "get-unused-iso-final-char", "getenv-internal", "gethash", + "gfile-add-watch", "gfile-rm-watch", "global-key-binding", + "gnutls-available-p", "gnutls-boot", "gnutls-bye", "gnutls-deinit", + "gnutls-error-fatalp", "gnutls-error-string", "gnutls-errorp", + "gnutls-get-initstage", "gnutls-peer-status", + "gnutls-peer-status-warning-describe", "goto-char", "gpm-mouse-start", + "gpm-mouse-stop", "group-gid", "group-real-gid", + "handle-save-session", "handle-switch-frame", "hash-table-count", + "hash-table-p", "hash-table-rehash-size", + "hash-table-rehash-threshold", "hash-table-size", "hash-table-test", + "hash-table-weakness", "iconify-frame", "identity", "image-flush", + "image-mask-p", "image-metadata", "image-size", "imagemagick-types", + "imagep", "indent-to", "indirect-function", "indirect-variable", + "init-image-library", "inotify-add-watch", "inotify-rm-watch", + "input-pending-p", "insert", "insert-and-inherit", + "insert-before-markers", "insert-before-markers-and-inherit", + "insert-buffer-substring", "insert-byte", "insert-char", + "insert-file-contents", "insert-startup-screen", "int86", + "integer-or-marker-p", "integerp", "interactive-form", "intern", + "intern-soft", "internal--track-mouse", "internal-char-font", + "internal-complete-buffer", "internal-copy-lisp-face", + "internal-default-process-filter", + "internal-default-process-sentinel", "internal-describe-syntax-value", + "internal-event-symbol-parse-modifiers", + "internal-face-x-get-resource", "internal-get-lisp-face-attribute", + "internal-lisp-face-attribute-values", "internal-lisp-face-empty-p", + "internal-lisp-face-equal-p", "internal-lisp-face-p", + "internal-make-lisp-face", "internal-make-var-non-special", + "internal-merge-in-global-face", + "internal-set-alternative-font-family-alist", + "internal-set-alternative-font-registry-alist", + "internal-set-font-selection-order", + "internal-set-lisp-face-attribute", + "internal-set-lisp-face-attribute-from-resource", + "internal-show-cursor", "internal-show-cursor-p", "interrupt-process", + "invisible-p", "invocation-directory", "invocation-name", "isnan", + "iso-charset", "key-binding", "key-description", + "keyboard-coding-system", "keymap-parent", "keymap-prompt", "keymapp", + "keywordp", "kill-all-local-variables", "kill-buffer", "kill-emacs", + "kill-local-variable", "kill-process", "last-nonminibuffer-frame", + "lax-plist-get", "lax-plist-put", "ldexp", "length", + "libxml-parse-html-region", "libxml-parse-xml-region", + "line-beginning-position", "line-end-position", "line-pixel-height", + "list", "list-fonts", "list-system-processes", "listp", "load", + "load-average", "local-key-binding", "local-variable-if-set-p", + "local-variable-p", "locale-info", "locate-file-internal", + "lock-buffer", "log", "logand", "logb", "logior", "lognot", "logxor", + "looking-at", "lookup-image", "lookup-image-map", "lookup-key", + "lower-frame", "lsh", "macroexpand", "make-bool-vector", + "make-byte-code", "make-category-set", "make-category-table", + "make-char", "make-char-table", "make-directory-internal", + "make-frame-invisible", "make-frame-visible", "make-hash-table", + "make-indirect-buffer", "make-keymap", "make-list", + "make-local-variable", "make-marker", "make-network-process", + "make-overlay", "make-serial-process", "make-sparse-keymap", + "make-string", "make-symbol", "make-symbolic-link", "make-temp-name", + "make-terminal-frame", "make-variable-buffer-local", + "make-variable-frame-local", "make-vector", "makunbound", + "map-char-table", "map-charset-chars", "map-keymap", + "map-keymap-internal", "mapatoms", "mapc", "mapcar", "mapconcat", + "maphash", "mark-marker", "marker-buffer", "marker-insertion-type", + "marker-position", "markerp", "match-beginning", "match-data", + "match-end", "matching-paren", "max", "max-char", "md5", "member", + "memory-info", "memory-limit", "memory-use-counts", "memq", "memql", + "menu-bar-menu-at-x-y", "menu-or-popup-active-p", + "menu-or-popup-active-p", "merge-face-attribute", "message", + "message-box", "message-or-box", "min", + "minibuffer-completion-contents", "minibuffer-contents", + "minibuffer-contents-no-properties", "minibuffer-depth", + "minibuffer-prompt", "minibuffer-prompt-end", + "minibuffer-selected-window", "minibuffer-window", "minibufferp", + "minor-mode-key-binding", "mod", "modify-category-entry", + "modify-frame-parameters", "modify-syntax-entry", + "mouse-pixel-position", "mouse-position", "move-overlay", + "move-point-visually", "move-to-column", "move-to-window-line", + "msdos-downcase-filename", "msdos-long-file-names", "msdos-memget", + "msdos-memput", "msdos-mouse-disable", "msdos-mouse-enable", + "msdos-mouse-init", "msdos-mouse-p", "msdos-remember-default-colors", + "msdos-set-keyboard", "msdos-set-mouse-buttons", + "multibyte-char-to-unibyte", "multibyte-string-p", "narrow-to-region", + "natnump", "nconc", "network-interface-info", + "network-interface-list", "new-fontset", "newline-cache-check", + "next-char-property-change", "next-frame", "next-overlay-change", + "next-property-change", "next-read-file-uses-dialog-p", + "next-single-char-property-change", "next-single-property-change", + "next-window", "nlistp", "nreverse", "nth", "nthcdr", "null", + "number-or-marker-p", "number-to-string", "numberp", + "open-dribble-file", "open-font", "open-termscript", + "optimize-char-table", "other-buffer", "other-window-for-scrolling", + "overlay-buffer", "overlay-end", "overlay-get", "overlay-lists", + "overlay-properties", "overlay-put", "overlay-recenter", + "overlay-start", "overlayp", "overlays-at", "overlays-in", + "parse-partial-sexp", "play-sound-internal", "plist-get", + "plist-member", "plist-put", "point", "point-marker", "point-max", + "point-max-marker", "point-min", "point-min-marker", + "pos-visible-in-window-p", "position-bytes", "posix-looking-at", + "posix-search-backward", "posix-search-forward", "posix-string-match", + "posn-at-point", "posn-at-x-y", "preceding-char", + "prefix-numeric-value", "previous-char-property-change", + "previous-frame", "previous-overlay-change", + "previous-property-change", "previous-single-char-property-change", + "previous-single-property-change", "previous-window", "prin1", + "prin1-to-string", "princ", "print", "process-attributes", + "process-buffer", "process-coding-system", "process-command", + "process-connection", "process-contact", "process-datagram-address", + "process-exit-status", "process-filter", "process-filter-multibyte-p", + "process-id", "process-inherit-coding-system-flag", "process-list", + "process-mark", "process-name", "process-plist", + "process-query-on-exit-flag", "process-running-child-p", + "process-send-eof", "process-send-region", "process-send-string", + "process-sentinel", "process-status", "process-tty-name", + "process-type", "processp", "profiler-cpu-log", + "profiler-cpu-running-p", "profiler-cpu-start", "profiler-cpu-stop", + "profiler-memory-log", "profiler-memory-running-p", + "profiler-memory-start", "profiler-memory-stop", "propertize", + "purecopy", "put", "put-text-property", + "put-unicode-property-internal", "puthash", "query-font", + "query-fontset", "quit-process", "raise-frame", "random", "rassoc", + "rassq", "re-search-backward", "re-search-forward", "read", + "read-buffer", "read-char", "read-char-exclusive", + "read-coding-system", "read-command", "read-event", + "read-from-minibuffer", "read-from-string", "read-function", + "read-key-sequence", "read-key-sequence-vector", + "read-no-blanks-input", "read-non-nil-coding-system", "read-string", + "read-variable", "recent-auto-save-p", "recent-doskeys", + "recent-keys", "recenter", "recursion-depth", "recursive-edit", + "redirect-debugging-output", "redirect-frame-focus", "redisplay", + "redraw-display", "redraw-frame", "regexp-quote", "region-beginning", + "region-end", "register-ccl-program", "register-code-conversion-map", + "remhash", "remove-list-of-text-properties", "remove-text-properties", + "rename-buffer", "rename-file", "replace-match", + "reset-this-command-lengths", "resize-mini-window-internal", + "restore-buffer-modified-p", "resume-tty", "reverse", "round", + "run-hook-with-args", "run-hook-with-args-until-failure", + "run-hook-with-args-until-success", "run-hook-wrapped", "run-hooks", + "run-window-configuration-change-hook", "run-window-scroll-functions", + "safe-length", "scan-lists", "scan-sexps", "scroll-down", + "scroll-left", "scroll-other-window", "scroll-right", "scroll-up", + "search-backward", "search-forward", "secure-hash", "select-frame", + "select-window", "selected-frame", "selected-window", + "self-insert-command", "send-string-to-terminal", "sequencep", + "serial-process-configure", "set", "set-buffer", + "set-buffer-auto-saved", "set-buffer-major-mode", + "set-buffer-modified-p", "set-buffer-multibyte", "set-case-table", + "set-category-table", "set-char-table-extra-slot", + "set-char-table-parent", "set-char-table-range", "set-charset-plist", + "set-charset-priority", "set-coding-system-priority", + "set-cursor-size", "set-default", "set-default-file-modes", + "set-default-toplevel-value", "set-file-acl", "set-file-modes", + "set-file-selinux-context", "set-file-times", "set-fontset-font", + "set-frame-height", "set-frame-position", "set-frame-selected-window", + "set-frame-size", "set-frame-width", "set-fringe-bitmap-face", + "set-input-interrupt-mode", "set-input-meta-mode", "set-input-mode", + "set-keyboard-coding-system-internal", "set-keymap-parent", + "set-marker", "set-marker-insertion-type", "set-match-data", + "set-message-beep", "set-minibuffer-window", + "set-mouse-pixel-position", "set-mouse-position", + "set-network-process-option", "set-output-flow-control", + "set-process-buffer", "set-process-coding-system", + "set-process-datagram-address", "set-process-filter", + "set-process-filter-multibyte", + "set-process-inherit-coding-system-flag", "set-process-plist", + "set-process-query-on-exit-flag", "set-process-sentinel", + "set-process-window-size", "set-quit-char", + "set-safe-terminal-coding-system-internal", "set-screen-color", + "set-standard-case-table", "set-syntax-table", + "set-terminal-coding-system-internal", "set-terminal-local-value", + "set-terminal-parameter", "set-text-properties", "set-time-zone-rule", + "set-visited-file-modtime", "set-window-buffer", + "set-window-combination-limit", "set-window-configuration", + "set-window-dedicated-p", "set-window-display-table", + "set-window-fringes", "set-window-hscroll", "set-window-margins", + "set-window-new-normal", "set-window-new-pixel", + "set-window-new-total", "set-window-next-buffers", + "set-window-parameter", "set-window-point", "set-window-prev-buffers", + "set-window-redisplay-end-trigger", "set-window-scroll-bars", + "set-window-start", "set-window-vscroll", "setcar", "setcdr", + "setplist", "show-face-resources", "signal", "signal-process", "sin", + "single-key-description", "skip-chars-backward", "skip-chars-forward", + "skip-syntax-backward", "skip-syntax-forward", "sleep-for", "sort", + "sort-charsets", "special-variable-p", "split-char", + "split-window-internal", "sqrt", "standard-case-table", + "standard-category-table", "standard-syntax-table", "start-kbd-macro", + "start-process", "stop-process", "store-kbd-macro-event", "string", + "string-as-multibyte", "string-as-unibyte", "string-bytes", + "string-collate-equalp", "string-collate-lessp", "string-equal", + "string-lessp", "string-make-multibyte", "string-make-unibyte", + "string-match", "string-to-char", "string-to-multibyte", + "string-to-number", "string-to-syntax", "string-to-unibyte", + "string-width", "stringp", "subr-name", "subrp", + "subst-char-in-region", "substitute-command-keys", + "substitute-in-file-name", "substring", "substring-no-properties", + "suspend-emacs", "suspend-tty", "suspicious-object", "sxhash", + "symbol-function", "symbol-name", "symbol-plist", "symbol-value", + "symbolp", "syntax-table", "syntax-table-p", "system-groups", + "system-move-file-to-trash", "system-name", "system-users", "tan", + "terminal-coding-system", "terminal-list", "terminal-live-p", + "terminal-local-value", "terminal-name", "terminal-parameter", + "terminal-parameters", "terpri", "test-completion", + "text-char-description", "text-properties-at", "text-property-any", + "text-property-not-all", "this-command-keys", + "this-command-keys-vector", "this-single-command-keys", + "this-single-command-raw-keys", "time-add", "time-less-p", + "time-subtract", "tool-bar-get-system-style", "tool-bar-height", + "tool-bar-pixel-width", "top-level", "trace-redisplay", + "trace-to-stderr", "translate-region-internal", "transpose-regions", + "truncate", "try-completion", "tty-display-color-cells", + "tty-display-color-p", "tty-no-underline", + "tty-suppress-bold-inverse-default-colors", "tty-top-frame", + "tty-type", "type-of", "undo-boundary", "unencodable-char-position", + "unhandled-file-name-directory", "unibyte-char-to-multibyte", + "unibyte-string", "unicode-property-table-internal", "unify-charset", + "unintern", "unix-sync", "unlock-buffer", "upcase", "upcase-initials", + "upcase-initials-region", "upcase-region", "upcase-word", + "use-global-map", "use-local-map", "user-full-name", + "user-login-name", "user-real-login-name", "user-real-uid", + "user-uid", "variable-binding-locus", "vconcat", "vector", + "vector-or-char-table-p", "vectorp", "verify-visited-file-modtime", + "vertical-motion", "visible-frame-list", "visited-file-modtime", + "w16-get-clipboard-data", "w16-selection-exists-p", + "w16-set-clipboard-data", "w32-battery-status", + "w32-default-color-map", "w32-define-rgb-color", + "w32-display-monitor-attributes-list", "w32-frame-menu-bar-size", + "w32-frame-rect", "w32-get-clipboard-data", + "w32-get-codepage-charset", "w32-get-console-codepage", + "w32-get-console-output-codepage", "w32-get-current-locale-id", + "w32-get-default-locale-id", "w32-get-keyboard-layout", + "w32-get-locale-info", "w32-get-valid-codepages", + "w32-get-valid-keyboard-layouts", "w32-get-valid-locale-ids", + "w32-has-winsock", "w32-long-file-name", "w32-reconstruct-hot-key", + "w32-register-hot-key", "w32-registered-hot-keys", + "w32-selection-exists-p", "w32-send-sys-command", + "w32-set-clipboard-data", "w32-set-console-codepage", + "w32-set-console-output-codepage", "w32-set-current-locale", + "w32-set-keyboard-layout", "w32-set-process-priority", + "w32-shell-execute", "w32-short-file-name", "w32-toggle-lock-key", + "w32-unload-winsock", "w32-unregister-hot-key", "w32-window-exists-p", + "w32notify-add-watch", "w32notify-rm-watch", + "waiting-for-user-input-p", "where-is-internal", "widen", + "widget-apply", "widget-get", "widget-put", + "window-absolute-pixel-edges", "window-at", "window-body-height", + "window-body-width", "window-bottom-divider-width", "window-buffer", + "window-combination-limit", "window-configuration-frame", + "window-configuration-p", "window-dedicated-p", + "window-display-table", "window-edges", "window-end", "window-frame", + "window-fringes", "window-header-line-height", "window-hscroll", + "window-inside-absolute-pixel-edges", "window-inside-edges", + "window-inside-pixel-edges", "window-left-child", + "window-left-column", "window-line-height", "window-list", + "window-list-1", "window-live-p", "window-margins", + "window-minibuffer-p", "window-mode-line-height", "window-new-normal", + "window-new-pixel", "window-new-total", "window-next-buffers", + "window-next-sibling", "window-normal-size", "window-old-point", + "window-parameter", "window-parameters", "window-parent", + "window-pixel-edges", "window-pixel-height", "window-pixel-left", + "window-pixel-top", "window-pixel-width", "window-point", + "window-prev-buffers", "window-prev-sibling", + "window-redisplay-end-trigger", "window-resize-apply", + "window-resize-apply-total", "window-right-divider-width", + "window-scroll-bar-height", "window-scroll-bar-width", + "window-scroll-bars", "window-start", "window-system", + "window-text-height", "window-text-pixel-size", "window-text-width", + "window-top-child", "window-top-line", "window-total-height", + "window-total-width", "window-use-time", "window-valid-p", + "window-vscroll", "windowp", "write-char", "write-region", + "x-backspace-delete-keys-p", "x-change-window-property", + "x-change-window-property", "x-close-connection", + "x-close-connection", "x-create-frame", "x-create-frame", + "x-delete-window-property", "x-delete-window-property", + "x-disown-selection-internal", "x-display-backing-store", + "x-display-backing-store", "x-display-color-cells", + "x-display-color-cells", "x-display-grayscale-p", + "x-display-grayscale-p", "x-display-list", "x-display-list", + "x-display-mm-height", "x-display-mm-height", "x-display-mm-width", + "x-display-mm-width", "x-display-monitor-attributes-list", + "x-display-pixel-height", "x-display-pixel-height", + "x-display-pixel-width", "x-display-pixel-width", "x-display-planes", + "x-display-planes", "x-display-save-under", "x-display-save-under", + "x-display-screens", "x-display-screens", "x-display-visual-class", + "x-display-visual-class", "x-family-fonts", "x-file-dialog", + "x-file-dialog", "x-file-dialog", "x-focus-frame", "x-frame-geometry", + "x-frame-geometry", "x-get-atom-name", "x-get-resource", + "x-get-selection-internal", "x-hide-tip", "x-hide-tip", + "x-list-fonts", "x-load-color-file", "x-menu-bar-open-internal", + "x-menu-bar-open-internal", "x-open-connection", "x-open-connection", + "x-own-selection-internal", "x-parse-geometry", "x-popup-dialog", + "x-popup-menu", "x-register-dnd-atom", "x-select-font", + "x-select-font", "x-selection-exists-p", "x-selection-owner-p", + "x-send-client-message", "x-server-max-request-size", + "x-server-max-request-size", "x-server-vendor", "x-server-vendor", + "x-server-version", "x-server-version", "x-show-tip", "x-show-tip", + "x-synchronize", "x-synchronize", "x-uses-old-gtk-dialog", + "x-window-property", "x-window-property", "x-wm-set-size-hint", + "xw-color-defined-p", "xw-color-defined-p", "xw-color-values", + "xw-color-values", "xw-display-color-p", "xw-display-color-p", + "yes-or-no-p", "zlib-available-p", "zlib-decompress-region", + "forward-point", + } + + emacsBuiltinFunctionHighlighted = []string{ + "defvaralias", "provide", "require", + "with-no-warnings", "define-widget", "with-electric-help", + "throw", "defalias", "featurep", + } + + emacsLambdaListKeywords = []string{ + "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional", + "&rest", "&whole", + } + + emacsErrorKeywords = []string{ + "cl-assert", "cl-check-type", "error", "signal", + "user-error", "warn", + } +) + +// EmacsLisp lexer. +var EmacsLisp = internal.Register(TypeRemappingLexer(MustNewLexer( + &Config{ + Name: "EmacsLisp", + Aliases: []string{"emacs", "elisp", "emacs-lisp"}, + Filenames: []string{"*.el"}, + MimeTypes: []string{"text/x-elisp", "application/x-elisp"}, + }, + Rules{ + "root": { + Default(Push("body")), + }, + "body": { + {`\s+`, Text, nil}, + {`;.*$`, CommentSingle, nil}, + {`"`, LiteralString, Push("string")}, + {`\?([^\\]|\\.)`, LiteralStringChar, nil}, + {`:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameBuiltin, nil}, + {`::((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`'`, Operator, nil}, + {"`", Operator, nil}, + {"[-+]?\\d+\\.?(?=[ \"()\\]\\'\\n,;`])", LiteralNumberInteger, nil}, + {"[-+]?\\d+/\\d+(?=[ \"()\\]\\'\\n,;`])", LiteralNumber, nil}, + {"[-+]?(\\d*\\.\\d+([defls][-+]?\\d+)?|\\d+(\\.\\d*)?[defls][-+]?\\d+)(?=[ \"()\\]\\'\\n,;`])", LiteralNumberFloat, nil}, + {`\[|\]`, Punctuation, nil}, + {`#:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, LiteralStringSymbol, nil}, + {`#\^\^?`, Operator, nil}, + {`#\'`, NameFunction, nil}, + {`#[bB][+-]?[01]+(/[01]+)?`, LiteralNumberBin, nil}, + {`#[oO][+-]?[0-7]+(/[0-7]+)?`, LiteralNumberOct, nil}, + {`#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?`, LiteralNumberHex, nil}, + {`#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?`, LiteralNumber, nil}, + {`#\d+=`, Operator, nil}, + {`#\d+#`, Operator, nil}, + {`(,@|,|\.|:)`, Operator, nil}, + {"(t|nil)(?=[ \"()\\]\\'\\n,;`])", NameConstant, nil}, + {`\*((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\*`, NameVariableGlobal, nil}, + {`((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)`, NameVariable, nil}, + {`#\(`, Operator, Push("body")}, + {`\(`, Punctuation, Push("body")}, + {`\)`, Punctuation, Pop(1)}, + }, + "string": { + {"[^\"\\\\`]+", LiteralString, nil}, + {"`((?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|])(?:\\\\.|[\\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\\'", LiteralStringSymbol, nil}, + {"`", LiteralString, nil}, + {`\\.`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + }, + }, +), TypeMapping{ + {NameVariable, NameFunction, emacsBuiltinFunction}, + {NameVariable, NameBuiltin, emacsSpecialForms}, + {NameVariable, NameException, emacsErrorKeywords}, + {NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)}, + {NameVariable, KeywordPseudo, emacsLambdaListKeywords}, +})) diff --git a/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go b/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go new file mode 100644 index 00000000..63cd59a4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/e/erlang.go @@ -0,0 +1,66 @@ +package e + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Erlang lexer. +var Erlang = internal.Register(MustNewLexer( + &Config{ + Name: "Erlang", + Aliases: []string{"erlang"}, + Filenames: []string{"*.erl", "*.hrl", "*.es", "*.escript"}, + MimeTypes: []string{"text/x-erlang"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`%.*\n`, Comment, nil}, + {Words(``, `\b`, `after`, `begin`, `case`, `catch`, `cond`, `end`, `fun`, `if`, `let`, `of`, `query`, `receive`, `try`, `when`), Keyword, nil}, + {Words(``, `\b`, `abs`, `append_element`, `apply`, `atom_to_list`, `binary_to_list`, `bitstring_to_list`, `binary_to_term`, `bit_size`, `bump_reductions`, `byte_size`, `cancel_timer`, `check_process_code`, `delete_module`, `demonitor`, `disconnect_node`, `display`, `element`, `erase`, `exit`, `float`, `float_to_list`, `fun_info`, `fun_to_list`, `function_exported`, `garbage_collect`, `get`, `get_keys`, `group_leader`, `hash`, `hd`, `integer_to_list`, `iolist_to_binary`, `iolist_size`, `is_atom`, `is_binary`, `is_bitstring`, `is_boolean`, `is_builtin`, `is_float`, `is_function`, `is_integer`, `is_list`, `is_number`, `is_pid`, `is_port`, `is_process_alive`, `is_record`, `is_reference`, `is_tuple`, `length`, `link`, `list_to_atom`, `list_to_binary`, `list_to_bitstring`, `list_to_existing_atom`, `list_to_float`, `list_to_integer`, `list_to_pid`, `list_to_tuple`, `load_module`, `localtime_to_universaltime`, `make_tuple`, `md5`, `md5_final`, `md5_update`, `memory`, `module_loaded`, `monitor`, `monitor_node`, `node`, `nodes`, `open_port`, `phash`, `phash2`, `pid_to_list`, `port_close`, `port_command`, `port_connect`, `port_control`, `port_call`, `port_info`, `port_to_list`, `process_display`, `process_flag`, `process_info`, `purge_module`, `put`, `read_timer`, `ref_to_list`, `register`, `resume_process`, `round`, `send`, `send_after`, `send_nosuspend`, `set_cookie`, `setelement`, `size`, `spawn`, `spawn_link`, `spawn_monitor`, `spawn_opt`, `split_binary`, `start_timer`, `statistics`, `suspend_process`, `system_flag`, `system_info`, `system_monitor`, `system_profile`, `term_to_binary`, `tl`, `trace`, `trace_delivered`, `trace_info`, `trace_pattern`, `trunc`, `tuple_size`, `tuple_to_list`, `universaltime_to_localtime`, `unlink`, `unregister`, `whereis`), NameBuiltin, nil}, + {Words(``, `\b`, `and`, `andalso`, `band`, `bnot`, `bor`, `bsl`, `bsr`, `bxor`, `div`, `not`, `or`, `orelse`, `rem`, `xor`), OperatorWord, nil}, + {`^-`, Punctuation, Push("directive")}, + {`(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)`, Operator, nil}, + {`"`, LiteralString, Push("string")}, + {`<<`, NameLabel, nil}, + {`>>`, NameLabel, nil}, + {`((?:[a-z]\w*|'[^\n']*[^\\]'))(:)`, ByGroups(NameNamespace, Punctuation), nil}, + {`(?:^|(?<=:))((?:[a-z]\w*|'[^\n']*[^\\]'))(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[+-]?(?:[2-9]|[12][0-9]|3[0-6])#[0-9a-zA-Z]+`, LiteralNumberInteger, nil}, + {`[+-]?\d+`, LiteralNumberInteger, nil}, + {`[+-]?\d+.\d+`, LiteralNumberFloat, nil}, + {`[]\[:_@\".{}()|;,]`, Punctuation, nil}, + {`(?:[A-Z_]\w*)`, NameVariable, nil}, + {`(?:[a-z]\w*|'[^\n']*[^\\]')`, Name, nil}, + {`\?(?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]'))`, NameConstant, nil}, + {`\$(?:(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))|\\[ %]|[^\\])`, LiteralStringChar, nil}, + {`#(?:[a-z]\w*|'[^\n']*[^\\]')(:?\.(?:[a-z]\w*|'[^\n']*[^\\]'))?`, NameLabel, nil}, + {`\A#!.+\n`, CommentHashbang, nil}, + {`#\{`, Punctuation, Push("map_key")}, + }, + "string": { + {`(?:\\(?:[bdefnrstv\'"\\]|[0-7][0-7]?[0-7]?|(?:x[0-9a-fA-F]{2}|x\{[0-9a-fA-F]+\})|\^[a-zA-Z]))`, LiteralStringEscape, nil}, + {`"`, LiteralString, Pop(1)}, + {`~[0-9.*]*[~#+BPWXb-ginpswx]`, LiteralStringInterpol, nil}, + {`[^"\\~]+`, LiteralString, nil}, + {`~`, LiteralString, nil}, + }, + "directive": { + {`(define)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameConstant), Pop(1)}, + {`(record)(\s*)(\()((?:(?:[A-Z_]\w*)|(?:[a-z]\w*|'[^\n']*[^\\]')))`, ByGroups(NameEntity, Text, Punctuation, NameLabel), Pop(1)}, + {`(?:[a-z]\w*|'[^\n']*[^\\]')`, NameEntity, Pop(1)}, + }, + "map_key": { + Include("root"), + {`=>`, Punctuation, Push("map_val")}, + {`:=`, Punctuation, Push("map_val")}, + {`\}`, Punctuation, Pop(1)}, + }, + "map_val": { + Include("root"), + {`,`, Punctuation, Pop(1)}, + {`(?=\})`, Punctuation, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/factor.go b/vendor/github.com/alecthomas/chroma/lexers/f/factor.go new file mode 100644 index 00000000..26c0d562 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/factor.go @@ -0,0 +1,115 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Factor lexer. +var Factor = internal.Register(MustNewLexer( + &Config{ + Name: "Factor", + Aliases: []string{"factor"}, + Filenames: []string{"*.factor"}, + MimeTypes: []string{"text/x-factor"}, + }, + Rules{ + "root": { + {`#!.*$`, CommentPreproc, nil}, + Default(Push("base")), + }, + "base": { + {`\s+`, Text, nil}, + {`((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(M:[:]?)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameFunction), nil}, + {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, + {`(GENERIC:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameFunction), nil}, + {`\(\s`, NameFunction, Push("stackeffect")}, + {`;\s`, Keyword, nil}, + {`(USING:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("vocabs")}, + {`(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, + {`(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text, NameNamespace), nil}, + {`(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)`, ByGroups(KeywordNamespace, Text, NameNamespace, Text), Push("words")}, + {`(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameNamespace, Text, NameFunction), nil}, + {`(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction), nil}, + {`(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)`, ByGroups(KeywordNamespace, Text, NameFunction), nil}, + {`(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), Push("slots")}, + {`(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), Push("slots")}, + {`(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, + {`(C:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction, Text, NameClass), nil}, + {`(INSTANCE:)(\s+)(\S+)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass, Text, NameClass), nil}, + {`(SLOT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`(SINGLETON:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`SINGLETONS:`, Keyword, Push("classes")}, + {`(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameFunction), nil}, + {`SYMBOLS:\s`, Keyword, Push("words")}, + {`SYNTAX:\s`, Keyword, nil}, + {`ALIEN:\s`, Keyword, nil}, + {`(STRUCT:)(\s+)(\S+)`, ByGroups(Keyword, Text, NameClass), nil}, + {`(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text), nil}, + {`(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)`, ByGroups(KeywordNamespace, Text, NameFunction, Text, NameFunction, Text), nil}, + {`(?:)\s`, KeywordNamespace, nil}, + {`"""\s+(?:.|\n)*?\s+"""`, LiteralString, nil}, + {`"(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`\S+"\s+(?:\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s`, LiteralStringChar, nil}, + {`!\s+.*$`, Comment, nil}, + {`#!\s+.*$`, Comment, nil}, + {`/\*\s+(?:.|\n)*?\s\*/\s`, Comment, nil}, + {`[tf]\s`, NameConstant, nil}, + {`[\\$]\s+\S+`, NameConstant, nil}, + {`M\\\s+\S+\s+\S+`, NameConstant, nil}, + {`[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, + {`[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s`, LiteralNumber, nil}, + {`0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, + {`NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s`, LiteralNumber, nil}, + {`0b[01]+\s`, LiteralNumberBin, nil}, + {`0o[0-7]+\s`, LiteralNumberOct, nil}, + {`(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, + {`(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s`, LiteralNumber, nil}, + {`(?:deprecated|final|foldable|flushable|inline|recursive)\s`, Keyword, nil}, + {Words(``, `\s`, `-rot`, `2bi`, `2bi@`, `2bi*`, `2curry`, `2dip`, `2drop`, `2dup`, `2keep`, `2nip`, `2over`, `2tri`, `2tri@`, `2tri*`, `3bi`, `3curry`, `3dip`, `3drop`, `3dup`, `3keep`, `3tri`, `4dip`, `4drop`, `4dup`, `4keep`, ``, `=`, `>boolean`, `clone`, `?`, `?execute`, `?if`, `and`, `assert`, `assert=`, `assert?`, `bi`, `bi-curry`, `bi-curry@`, `bi-curry*`, `bi@`, `bi*`, `boa`, `boolean`, `boolean?`, `both?`, `build`, `call`, `callstack`, `callstack>array`, `callstack?`, `clear`, `(clone)`, `compose`, `compose?`, `curry`, `curry?`, `datastack`, `die`, `dip`, `do`, `drop`, `dup`, `dupd`, `either?`, `eq?`, `equal?`, `execute`, `hashcode`, `hashcode*`, `identity-hashcode`, `identity-tuple`, `identity-tuple?`, `if`, `if*`, `keep`, `loop`, `most`, `new`, `nip`, `not`, `null`, `object`, `or`, `over`, `pick`, `prepose`, `retainstack`, `rot`, `same?`, `swap`, `swapd`, `throw`, `tri`, `tri-curry`, `tri-curry@`, `tri-curry*`, `tri@`, `tri*`, `tuple`, `tuple?`, `unless`, `unless*`, `until`, `when`, `when*`, `while`, `with`, `wrapper`, `wrapper?`, `xor`), NameBuiltin, nil}, + {Words(``, `\s`, `2cache`, ``, `>alist`, `?at`, `?of`, `assoc`, `assoc-all?`, `assoc-any?`, `assoc-clone-like`, `assoc-combine`, `assoc-diff`, `assoc-diff!`, `assoc-differ`, `assoc-each`, `assoc-empty?`, `assoc-filter`, `assoc-filter!`, `assoc-filter-as`, `assoc-find`, `assoc-hashcode`, `assoc-intersect`, `assoc-like`, `assoc-map`, `assoc-map-as`, `assoc-partition`, `assoc-refine`, `assoc-size`, `assoc-stack`, `assoc-subset?`, `assoc-union`, `assoc-union!`, `assoc=`, `assoc>map`, `assoc?`, `at`, `at+`, `at*`, `cache`, `change-at`, `clear-assoc`, `delete-at`, `delete-at*`, `enum`, `enum?`, `extract-keys`, `inc-at`, `key?`, `keys`, `map>assoc`, `maybe-set-at`, `new-assoc`, `of`, `push-at`, `rename-at`, `set-at`, `sift-keys`, `sift-values`, `substitute`, `unzip`, `value-at`, `value-at*`, `value?`, `values`, `zip`), NameBuiltin, nil}, + {Words(``, `\s`, `2cleave`, `2cleave>quot`, `3cleave`, `3cleave>quot`, `4cleave`, `4cleave>quot`, `alist>quot`, `call-effect`, `case`, `case-find`, `case>quot`, `cleave`, `cleave>quot`, `cond`, `cond>quot`, `deep-spread>quot`, `execute-effect`, `linear-case-quot`, `no-case`, `no-case?`, `no-cond`, `no-cond?`, `recursive-hashcode`, `shallow-spread>quot`, `spread`, `to-fixed-point`, `wrong-values`, `wrong-values?`), NameBuiltin, nil}, + {Words(``, `\s`, `-`, `/`, `/f`, `/i`, `/mod`, `2/`, `2^`, `<`, `<=`, ``, `>`, `>=`, `>bignum`, `>fixnum`, `>float`, `>integer`, `(all-integers?)`, `(each-integer)`, `(find-integer)`, `*`, `+`, `?1+`, `abs`, `align`, `all-integers?`, `bignum`, `bignum?`, `bit?`, `bitand`, `bitnot`, `bitor`, `bits>double`, `bits>float`, `bitxor`, `complex`, `complex?`, `denominator`, `double>bits`, `each-integer`, `even?`, `find-integer`, `find-last-integer`, `fixnum`, `fixnum?`, `float`, `float>bits`, `float?`, `fp-bitwise=`, `fp-infinity?`, `fp-nan-payload`, `fp-nan?`, `fp-qnan?`, `fp-sign`, `fp-snan?`, `fp-special?`, `if-zero`, `imaginary-part`, `integer`, `integer>fixnum`, `integer>fixnum-strict`, `integer?`, `log2`, `log2-expects-positive`, `log2-expects-positive?`, `mod`, `neg`, `neg?`, `next-float`, `next-power-of-2`, `number`, `number=`, `number?`, `numerator`, `odd?`, `out-of-fixnum-range`, `out-of-fixnum-range?`, `power-of-2?`, `prev-float`, `ratio`, `ratio?`, `rational`, `rational?`, `real`, `real-part`, `real?`, `recip`, `rem`, `sgn`, `shift`, `sq`, `times`, `u<`, `u<=`, `u>`, `u>=`, `unless-zero`, `unordered?`, `when-zero`, `zero?`), NameBuiltin, nil}, + {Words(``, `\s`, `1sequence`, `2all?`, `2each`, `2map`, `2map-as`, `2map-reduce`, `2reduce`, `2selector`, `2sequence`, `3append`, `3append-as`, `3each`, `3map`, `3map-as`, `3sequence`, `4sequence`, ``, ``, ``, `?first`, `?last`, `?nth`, `?second`, `?set-nth`, `accumulate`, `accumulate!`, `accumulate-as`, `all?`, `any?`, `append`, `append!`, `append-as`, `assert-sequence`, `assert-sequence=`, `assert-sequence?`, `binary-reduce`, `bounds-check`, `bounds-check?`, `bounds-error`, `bounds-error?`, `but-last`, `but-last-slice`, `cartesian-each`, `cartesian-map`, `cartesian-product`, `change-nth`, `check-slice`, `check-slice-error`, `clone-like`, `collapse-slice`, `collector`, `collector-for`, `concat`, `concat-as`, `copy`, `count`, `cut`, `cut-slice`, `cut*`, `delete-all`, `delete-slice`, `drop-prefix`, `each`, `each-from`, `each-index`, `empty?`, `exchange`, `filter`, `filter!`, `filter-as`, `find`, `find-from`, `find-index`, `find-index-from`, `find-last`, `find-last-from`, `first`, `first2`, `first3`, `first4`, `flip`, `follow`, `fourth`, `glue`, `halves`, `harvest`, `head`, `head-slice`, `head-slice*`, `head*`, `head?`, `if-empty`, `immutable`, `immutable-sequence`, `immutable-sequence?`, `immutable?`, `index`, `index-from`, `indices`, `infimum`, `infimum-by`, `insert-nth`, `interleave`, `iota`, `iota-tuple`, `iota-tuple?`, `join`, `join-as`, `last`, `last-index`, `last-index-from`, `length`, `lengthen`, `like`, `longer`, `longer?`, `longest`, `map`, `map!`, `map-as`, `map-find`, `map-find-last`, `map-index`, `map-integers`, `map-reduce`, `map-sum`, `max-length`, `member-eq?`, `member?`, `midpoint@`, `min-length`, `mismatch`, `move`, `new-like`, `new-resizable`, `new-sequence`, `non-negative-integer-expected`, `non-negative-integer-expected?`, `nth`, `nths`, `pad-head`, `pad-tail`, `padding`, `partition`, `pop`, `pop*`, `prefix`, `prepend`, `prepend-as`, `produce`, `produce-as`, `product`, `push`, `push-all`, `push-either`, `push-if`, `reduce`, `reduce-index`, `remove`, `remove!`, `remove-eq`, `remove-eq!`, `remove-nth`, `remove-nth!`, `repetition`, `repetition?`, `replace-slice`, `replicate`, `replicate-as`, `rest`, `rest-slice`, `reverse`, `reverse!`, `reversed`, `reversed?`, `second`, `selector`, `selector-for`, `sequence`, `sequence-hashcode`, `sequence=`, `sequence?`, `set-first`, `set-fourth`, `set-last`, `set-length`, `set-nth`, `set-second`, `set-third`, `short`, `shorten`, `shorter`, `shorter?`, `shortest`, `sift`, `slice`, `slice-error`, `slice-error?`, `slice?`, `snip`, `snip-slice`, `start`, `start*`, `subseq`, `subseq?`, `suffix`, `suffix!`, `sum`, `sum-lengths`, `supremum`, `supremum-by`, `surround`, `tail`, `tail-slice`, `tail-slice*`, `tail*`, `tail?`, `third`, `trim`, `trim-head`, `trim-head-slice`, `trim-slice`, `trim-tail`, `trim-tail-slice`, `unclip`, `unclip-last`, `unclip-last-slice`, `unclip-slice`, `unless-empty`, `virtual-exemplar`, `virtual-sequence`, `virtual-sequence?`, `virtual@`, `when-empty`), NameBuiltin, nil}, + {Words(``, `\s`, `+@`, `change`, `change-global`, `counter`, `dec`, `get`, `get-global`, `global`, `inc`, `init-namespaces`, `initialize`, `is-global`, `make-assoc`, `namespace`, `namestack`, `off`, `on`, `set`, `set-global`, `set-namestack`, `toggle`, `with-global`, `with-scope`, `with-variable`, `with-variables`), NameBuiltin, nil}, + {Words(``, `\s`, `1array`, `2array`, `3array`, `4array`, ``, `>array`, `array`, `array?`, `pair`, `pair?`, `resize-array`), NameBuiltin, nil}, + {Words(``, `\s`, `(each-stream-block-slice)`, `(each-stream-block)`, `(stream-contents-by-block)`, `(stream-contents-by-element)`, `(stream-contents-by-length-or-block)`, `(stream-contents-by-length)`, `+byte+`, `+character+`, `bad-seek-type`, `bad-seek-type?`, `bl`, `contents`, `each-block`, `each-block-size`, `each-block-slice`, `each-line`, `each-morsel`, `each-stream-block`, `each-stream-block-slice`, `each-stream-line`, `error-stream`, `flush`, `input-stream`, `input-stream?`, `invalid-read-buffer`, `invalid-read-buffer?`, `lines`, `nl`, `output-stream`, `output-stream?`, `print`, `read`, `read-into`, `read-partial`, `read-partial-into`, `read-until`, `read1`, `readln`, `seek-absolute`, `seek-absolute?`, `seek-end`, `seek-end?`, `seek-input`, `seek-output`, `seek-relative`, `seek-relative?`, `stream-bl`, `stream-contents`, `stream-contents*`, `stream-copy`, `stream-copy*`, `stream-element-type`, `stream-flush`, `stream-length`, `stream-lines`, `stream-nl`, `stream-print`, `stream-read`, `stream-read-into`, `stream-read-partial`, `stream-read-partial-into`, `stream-read-partial-unsafe`, `stream-read-unsafe`, `stream-read-until`, `stream-read1`, `stream-readln`, `stream-seek`, `stream-seekable?`, `stream-tell`, `stream-write`, `stream-write1`, `tell-input`, `tell-output`, `with-error-stream`, `with-error-stream*`, `with-error>output`, `with-input-output+error-streams`, `with-input-output+error-streams*`, `with-input-stream`, `with-input-stream*`, `with-output-stream`, `with-output-stream*`, `with-output>error`, `with-output+error-stream`, `with-output+error-stream*`, `with-streams`, `with-streams*`, `write`, `write1`), NameBuiltin, nil}, + {Words(``, `\s`, `1string`, ``, `>string`, `resize-string`, `string`, `string?`), NameBuiltin, nil}, + {Words(``, `\s`, `1vector`, ``, `>vector`, `?push`, `vector`, `vector?`), NameBuiltin, nil}, + {Words(``, `\s`, ``, ``, ``, `attempt-all`, `attempt-all-error`, `attempt-all-error?`, `callback-error-hook`, `callcc0`, `callcc1`, `cleanup`, `compute-restarts`, `condition`, `condition?`, `continuation`, `continuation?`, `continue`, `continue-restart`, `continue-with`, `current-continuation`, `error`, `error-continuation`, `error-in-thread`, `error-thread`, `ifcc`, `ignore-errors`, `in-callback?`, `original-error`, `recover`, `restart`, `restart?`, `restarts`, `rethrow`, `rethrow-restarts`, `return`, `return-continuation`, `thread-error-hook`, `throw-continue`, `throw-restarts`, `with-datastack`, `with-return`), NameBuiltin, nil}, + {`\S+`, Text, nil}, + }, + "stackeffect": { + {`\s+`, Text, nil}, + {`\(\s+`, NameFunction, Push("stackeffect")}, + {`\)\s`, NameFunction, Pop(1)}, + {`--\s`, NameFunction, nil}, + {`\S+`, NameVariable, nil}, + }, + "slots": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`(\{\s+)(\S+)(\s+[^}]+\s+\}\s)`, ByGroups(Text, NameVariable, Text), nil}, + {`\S+`, NameVariable, nil}, + }, + "vocabs": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameNamespace, nil}, + }, + "classes": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameClass, nil}, + }, + "words": { + {`\s+`, Text, nil}, + {`;\s`, Keyword, Pop(1)}, + {`\S+`, NameFunction, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fish.go b/vendor/github.com/alecthomas/chroma/lexers/f/fish.go new file mode 100644 index 00000000..185fc92b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fish.go @@ -0,0 +1,65 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fish lexer. +var Fish = internal.Register(MustNewLexer( + &Config{ + Name: "Fish", + Aliases: []string{"fish", "fishshell"}, + Filenames: []string{"*.fish", "*.load"}, + MimeTypes: []string{"application/x-fish"}, + }, + Rules{ + "root": { + Include("basic"), + Include("data"), + Include("interp"), + }, + "interp": { + {`\$\(\(`, Keyword, Push("math")}, + {`\(`, Keyword, Push("paren")}, + {`\$#?(\w+|.)`, NameVariable, nil}, + }, + "basic": { + {`\b(begin|end|if|else|while|break|for|in|return|function|block|case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|cd|count|test)(\s*)\b`, ByGroups(Keyword, Text), nil}, + {`\b(alias|bg|bind|breakpoint|builtin|command|commandline|complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|fish_indent|fish_pager|fish_prompt|fish_right_prompt|fish_update_completions|fishd|funced|funcsave|functions|help|history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|pushd|random|read|set_color|source|status|trap|type|ulimit|umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)`, NameBuiltin, nil}, + {`#.*\n`, Comment, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]()=]`, Operator, nil}, + {`<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + }, + "data": { + {`(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`(?s)'.*?'`, LiteralStringSingle, nil}, + {`;`, Punctuation, nil}, + {`&|\||\^|<|>`, Operator, nil}, + {`\s+`, Text, nil}, + {`\d+(?= |\Z)`, LiteralNumber, nil}, + {"[^=\\s\\[\\]{}()$\"\\'`\\\\<&|;]+", Text, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+`, LiteralStringDouble, nil}, + Include("interp"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "math": { + {`\)\)`, Keyword, Pop(1)}, + {`[-+*/%^|&]|\*\*|\|\|`, Operator, nil}, + {`\d+#\d+`, LiteralNumber, nil}, + {`\d+#(?! )`, LiteralNumber, nil}, + {`\d+`, LiteralNumber, nil}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/forth.go b/vendor/github.com/alecthomas/chroma/lexers/f/forth.go new file mode 100644 index 00000000..3fb3aa78 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/forth.go @@ -0,0 +1,40 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Forth lexer. +var Forth = internal.Register(MustNewLexer( + &Config{ + Name: "Forth", + Aliases: []string{"forth"}, + Filenames: []string{"*.frt", "*.fs"}, + MimeTypes: []string{"application/x-forth"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`\\.*?\n`, CommentSingle, nil}, + {`\([\s].*?\)`, CommentSingle, nil}, + {`(:|variable|constant|value|buffer:)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, + {`([.sc]")(\s+?)`, ByGroups(LiteralString, Text), Push("stringdef")}, + {`(blk|block|buffer|evaluate|flush|load|save-buffers|update|empty-buffers|list|refill|scr|thru|\#s|\*\/mod|\+loop|\/mod|0<|0=|1\+|1-|2!|2\*|2\/|2@|2drop|2dup|2over|2swap|>body|>in|>number|>r|\?dup|abort|abort\"|abs|accept|align|aligned|allot|and|base|begin|bl|c!|c,|c@|cell\+|cells|char|char\+|chars|constant|count|cr|create|decimal|depth|do|does>|drop|dup|else|emit|environment\?|evaluate|execute|exit|fill|find|fm\/mod|here|hold|i|if|immediate|invert|j|key|leave|literal|loop|lshift|m\*|max|min|mod|move|negate|or|over|postpone|quit|r>|r@|recurse|repeat|rot|rshift|s\"|s>d|sign|sm\/rem|source|space|spaces|state|swap|then|type|u\.|u\<|um\*|um\/mod|unloop|until|variable|while|word|xor|\[char\]|\[\'\]|@|!|\#|<\#|\#>|:|;|\+|-|\*|\/|,|<|>|\|1\+|1-|\.|\.r|0<>|0>|2>r|2r>|2r@|:noname|\?do|again|c\"|case|compile,|endcase|endof|erase|false|hex|marker|nip|of|pad|parse|pick|refill|restore-input|roll|save-input|source-id|to|true|tuck|u\.r|u>|unused|value|within|\[compile\]|\#tib|convert|expect|query|span|tib|2constant|2literal|2variable|d\+|d-|d\.|d\.r|d0<|d0=|d2\*|d2\/|d<|d=|d>s|dabs|dmax|dmin|dnegate|m\*\/|m\+|2rot|du<|catch|throw|abort|abort\"|at-xy|key\?|page|ekey|ekey>char|ekey\?|emit\?|ms|time&date|BIN|CLOSE-FILE|CREATE-FILE|DELETE-FILE|FILE-POSITION|FILE-SIZE|INCLUDE-FILE|INCLUDED|OPEN-FILE|R\/O|R\/W|READ-FILE|READ-LINE|REPOSITION-FILE|RESIZE-FILE|S\"|SOURCE-ID|W/O|WRITE-FILE|WRITE-LINE|FILE-STATUS|FLUSH-FILE|REFILL|RENAME-FILE|>float|d>f|f!|f\*|f\+|f-|f\/|f0<|f0=|f<|f>d|f@|falign|faligned|fconstant|fdepth|fdrop|fdup|fliteral|float\+|floats|floor|fmax|fmin|fnegate|fover|frot|fround|fswap|fvariable|represent|df!|df@|dfalign|dfaligned|dfloat\+|dfloats|f\*\*|f\.|fabs|facos|facosh|falog|fasin|fasinh|fatan|fatan2|fatanh|fcos|fcosh|fe\.|fexp|fexpm1|fln|flnp1|flog|fs\.|fsin|fsincos|fsinh|fsqrt|ftan|ftanh|f~|precision|set-precision|sf!|sf@|sfalign|sfaligned|sfloat\+|sfloats|\(local\)|to|locals\||allocate|free|resize|definitions|find|forth-wordlist|get-current|get-order|search-wordlist|set-current|set-order|wordlist|also|forth|only|order|previous|-trailing|\/string|blank|cmove|cmove>|compare|search|sliteral|.s|dump|see|words|;code|ahead|assembler|bye|code|cs-pick|cs-roll|editor|state|\[else\]|\[if\]|\[then\]|forget|defer|defer@|defer!|action-of|begin-structure|field:|buffer:|parse-name|buffer:|traverse-wordlist|n>r|nr>|2value|fvalue|name>interpret|name>compile|name>string|cfield:|end-structure)\s`, Keyword, nil}, + {`(\$[0-9A-F]+)`, LiteralNumberHex, nil}, + {`(\#|%|&|\-|\+)?[0-9]+`, LiteralNumberInteger, nil}, + {`(\#|%|&|\-|\+)?[0-9.]+`, KeywordType, nil}, + {`(@i|!i|@e|!e|pause|noop|turnkey|sleep|itype|icompare|sp@|sp!|rp@|rp!|up@|up!|>a|a>|a@|a!|a@+|a@-|>b|b>|b@|b!|b@+|b@-|find-name|1ms|sp0|rp0|\(evaluate\)|int-trap|int!)\s`, NameConstant, nil}, + {`(do-recognizer|r:fail|recognizer:|get-recognizers|set-recognizers|r:float|r>comp|r>int|r>post|r:name|r:word|r:dnum|r:num|recognizer|forth-recognizer|rec:num|rec:float|rec:word)\s`, NameDecorator, nil}, + {`(Evalue|Rvalue|Uvalue|Edefer|Rdefer|Udefer)(\s+)`, ByGroups(KeywordNamespace, Text), Push("worddef")}, + {`[^\s]+(?=[\s])`, NameFunction, nil}, + }, + "worddef": { + {`\S+`, NameClass, Pop(1)}, + }, + "stringdef": { + {`[^"]+`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go b/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go new file mode 100644 index 00000000..6c57afa6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fortran.go @@ -0,0 +1,47 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fortran lexer. +var Fortran = internal.Register(MustNewLexer( + &Config{ + Name: "Fortran", + Aliases: []string{"fortran"}, + Filenames: []string{"*.f03", "*.f90", "*.F03", "*.F90"}, + MimeTypes: []string{"text/x-fortran"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`^#.*\n`, CommentPreproc, nil}, + {`!.*\n`, Comment, nil}, + Include("strings"), + Include("core"), + {`[a-z][\w$]*`, Name, nil}, + Include("nums"), + {`[\s]+`, Text, nil}, + }, + "core": { + {Words(`\b`, `\s*\b`, `ABSTRACT`, `ACCEPT`, `ALL`, `ALLSTOP`, `ALLOCATABLE`, `ALLOCATE`, `ARRAY`, `ASSIGN`, `ASSOCIATE`, `ASYNCHRONOUS`, `BACKSPACE`, `BIND`, `BLOCK`, `BLOCKDATA`, `BYTE`, `CALL`, `CASE`, `CLASS`, `CLOSE`, `CODIMENSION`, `COMMON`, `CONCURRRENT`, `CONTIGUOUS`, `CONTAINS`, `CONTINUE`, `CRITICAL`, `CYCLE`, `DATA`, `DEALLOCATE`, `DECODE`, `DEFERRED`, `DIMENSION`, `DO`, `ELEMENTAL`, `ELSE`, `ENCODE`, `END`, `ENTRY`, `ENUM`, `ENUMERATOR`, `EQUIVALENCE`, `EXIT`, `EXTENDS`, `EXTERNAL`, `EXTRINSIC`, `FILE`, `FINAL`, `FORALL`, `FORMAT`, `FUNCTION`, `GENERIC`, `GOTO`, `IF`, `IMAGES`, `IMPLICIT`, `IMPORT`, `IMPURE`, `INCLUDE`, `INQUIRE`, `INTENT`, `INTERFACE`, `INTRINSIC`, `IS`, `LOCK`, `MEMORY`, `MODULE`, `NAMELIST`, `NULLIFY`, `NONE`, `NON_INTRINSIC`, `NON_OVERRIDABLE`, `NOPASS`, `OPEN`, `OPTIONAL`, `OPTIONS`, `PARAMETER`, `PASS`, `PAUSE`, `POINTER`, `PRINT`, `PRIVATE`, `PROGRAM`, `PROCEDURE`, `PROTECTED`, `PUBLIC`, `PURE`, `READ`, `RECURSIVE`, `RESULT`, `RETURN`, `REWIND`, `SAVE`, `SELECT`, `SEQUENCE`, `STOP`, `SUBMODULE`, `SUBROUTINE`, `SYNC`, `SYNCALL`, `SYNCIMAGES`, `SYNCMEMORY`, `TARGET`, `THEN`, `TYPE`, `UNLOCK`, `USE`, `VALUE`, `VOLATILE`, `WHERE`, `WRITE`, `WHILE`), Keyword, nil}, + {Words(`\b`, `\s*\b`, `CHARACTER`, `COMPLEX`, `DOUBLE PRECISION`, `DOUBLE COMPLEX`, `INTEGER`, `LOGICAL`, `REAL`, `C_INT`, `C_SHORT`, `C_LONG`, `C_LONG_LONG`, `C_SIGNED_CHAR`, `C_SIZE_T`, `C_INT8_T`, `C_INT16_T`, `C_INT32_T`, `C_INT64_T`, `C_INT_LEAST8_T`, `C_INT_LEAST16_T`, `C_INT_LEAST32_T`, `C_INT_LEAST64_T`, `C_INT_FAST8_T`, `C_INT_FAST16_T`, `C_INT_FAST32_T`, `C_INT_FAST64_T`, `C_INTMAX_T`, `C_INTPTR_T`, `C_FLOAT`, `C_DOUBLE`, `C_LONG_DOUBLE`, `C_FLOAT_COMPLEX`, `C_DOUBLE_COMPLEX`, `C_LONG_DOUBLE_COMPLEX`, `C_BOOL`, `C_CHAR`, `C_PTR`, `C_FUNPTR`), KeywordType, nil}, + {`(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)`, Operator, nil}, + {`(::)`, KeywordDeclaration, nil}, + {`[()\[\],:&%;.]`, Punctuation, nil}, + {Words(`\b`, `\s*\b`, `Abort`, `Abs`, `Access`, `AChar`, `ACos`, `ACosH`, `AdjustL`, `AdjustR`, `AImag`, `AInt`, `Alarm`, `All`, `Allocated`, `ALog`, `AMax`, `AMin`, `AMod`, `And`, `ANInt`, `Any`, `ASin`, `ASinH`, `Associated`, `ATan`, `ATanH`, `Atomic_Define`, `Atomic_Ref`, `BesJ`, `BesJN`, `Bessel_J0`, `Bessel_J1`, `Bessel_JN`, `Bessel_Y0`, `Bessel_Y1`, `Bessel_YN`, `BesY`, `BesYN`, `BGE`, `BGT`, `BLE`, `BLT`, `Bit_Size`, `BTest`, `CAbs`, `CCos`, `Ceiling`, `CExp`, `Char`, `ChDir`, `ChMod`, `CLog`, `Cmplx`, `Command_Argument_Count`, `Complex`, `Conjg`, `Cos`, `CosH`, `Count`, `CPU_Time`, `CShift`, `CSin`, `CSqRt`, `CTime`, `C_Loc`, `C_Associated`, `C_Null_Ptr`, `C_Null_Funptr`, `C_F_Pointer`, `C_F_ProcPointer`, `C_Null_Char`, `C_Alert`, `C_Backspace`, `C_Form_Feed`, `C_FunLoc`, `C_Sizeof`, `C_New_Line`, `C_Carriage_Return`, `C_Horizontal_Tab`, `C_Vertical_Tab`, `DAbs`, `DACos`, `DASin`, `DATan`, `Date_and_Time`, `DbesJ`, `DbesJN`, `DbesY`, `DbesYN`, `Dble`, `DCos`, `DCosH`, `DDiM`, `DErF`, `DErFC`, `DExp`, `Digits`, `DiM`, `DInt`, `DLog`, `DMax`, `DMin`, `DMod`, `DNInt`, `Dot_Product`, `DProd`, `DSign`, `DSinH`, `DShiftL`, `DShiftR`, `DSin`, `DSqRt`, `DTanH`, `DTan`, `DTime`, `EOShift`, `Epsilon`, `ErF`, `ErFC`, `ErFC_Scaled`, `ETime`, `Execute_Command_Line`, `Exit`, `Exp`, `Exponent`, `Extends_Type_Of`, `FDate`, `FGet`, `FGetC`, `FindLoc`, `Float`, `Floor`, `Flush`, `FNum`, `FPutC`, `FPut`, `Fraction`, `FSeek`, `FStat`, `FTell`, `Gamma`, `GError`, `GetArg`, `Get_Command`, `Get_Command_Argument`, `Get_Environment_Variable`, `GetCWD`, `GetEnv`, `GetGId`, `GetLog`, `GetPId`, `GetUId`, `GMTime`, `HostNm`, `Huge`, `Hypot`, `IAbs`, `IAChar`, `IAll`, `IAnd`, `IAny`, `IArgC`, `IBClr`, `IBits`, `IBSet`, `IChar`, `IDate`, `IDiM`, `IDInt`, `IDNInt`, `IEOr`, `IErrNo`, `IFix`, `Imag`, `ImagPart`, `Image_Index`, `Index`, `Int`, `IOr`, `IParity`, `IRand`, `IsaTty`, `IShft`, `IShftC`, `ISign`, `Iso_C_Binding`, `Is_Contiguous`, `Is_Iostat_End`, `Is_Iostat_Eor`, `ITime`, `Kill`, `Kind`, `LBound`, `LCoBound`, `Len`, `Len_Trim`, `LGe`, `LGt`, `Link`, `LLe`, `LLt`, `LnBlnk`, `Loc`, `Log`, `Log_Gamma`, `Logical`, `Long`, `LShift`, `LStat`, `LTime`, `MaskL`, `MaskR`, `MatMul`, `Max`, `MaxExponent`, `MaxLoc`, `MaxVal`, `MClock`, `Merge`, `Merge_Bits`, `Move_Alloc`, `Min`, `MinExponent`, `MinLoc`, `MinVal`, `Mod`, `Modulo`, `MvBits`, `Nearest`, `New_Line`, `NInt`, `Norm2`, `Not`, `Null`, `Num_Images`, `Or`, `Pack`, `Parity`, `PError`, `Precision`, `Present`, `Product`, `Radix`, `Rand`, `Random_Number`, `Random_Seed`, `Range`, `Real`, `RealPart`, `Rename`, `Repeat`, `Reshape`, `RRSpacing`, `RShift`, `Same_Type_As`, `Scale`, `Scan`, `Second`, `Selected_Char_Kind`, `Selected_Int_Kind`, `Selected_Real_Kind`, `Set_Exponent`, `Shape`, `ShiftA`, `ShiftL`, `ShiftR`, `Short`, `Sign`, `Signal`, `SinH`, `Sin`, `Sleep`, `Sngl`, `Spacing`, `Spread`, `SqRt`, `SRand`, `Stat`, `Storage_Size`, `Sum`, `SymLnk`, `System`, `System_Clock`, `Tan`, `TanH`, `Time`, `This_Image`, `Tiny`, `TrailZ`, `Transfer`, `Transpose`, `Trim`, `TtyNam`, `UBound`, `UCoBound`, `UMask`, `Unlink`, `Unpack`, `Verify`, `XOr`, `ZAbs`, `ZCos`, `ZExp`, `ZLog`, `ZSin`, `ZSqRt`), NameBuiltin, nil}, + {`\.(true|false)\.`, NameBuiltin, nil}, + {`\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.`, OperatorWord, nil}, + }, + "strings": { + {`(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + }, + "nums": { + {`\d+(?![.e])(_[a-z]\w+)?`, LiteralNumberInteger, nil}, + {`[+-]?\d*\.\d+([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*([ed][-+]?\d+)?(_[a-z]\w+)?`, LiteralNumberFloat, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go b/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go new file mode 100644 index 00000000..d00f63dd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/f/fsharp.go @@ -0,0 +1,94 @@ +package f + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Fsharp lexer. +var Fsharp = internal.Register(MustNewLexer( + &Config{ + Name: "FSharp", + Aliases: []string{"fsharp"}, + Filenames: []string{"*.fs", "*.fsi"}, + MimeTypes: []string{"text/x-fsharp"}, + }, + Rules{ + "escape-sequence": { + {`\\[\\"\'ntbrafv]`, LiteralStringEscape, nil}, + {`\\[0-9]{3}`, LiteralStringEscape, nil}, + {`\\u[0-9a-fA-F]{4}`, LiteralStringEscape, nil}, + {`\\U[0-9a-fA-F]{8}`, LiteralStringEscape, nil}, + }, + "root": { + {`\s+`, Text, nil}, + {`\(\)|\[\]`, NameBuiltinPseudo, nil}, + {`\b(?|-|\\.\\.|\\.|::|:=|:>|:|;;|;|<-|<\\]|<|>\\]|>|\\?\\?|\\?|\\[<|\\[\\||\\[|\\]|_|`|\\{|\\|\\]|\\||\\}|~|<@@|<@|=|@>|@@>)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(and|or|not)\b`, OperatorWord, nil}, + {`\b(sbyte|byte|char|nativeint|unativeint|float32|single|float|double|int8|uint8|int16|uint16|int32|uint32|int64|uint64|decimal|unit|bool|string|list|exn|obj|enum)\b`, KeywordType, nil}, + {`#[ \t]*(if|endif|else|line|nowarn|light|\d+)\b.*?\n`, CommentPreproc, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`\d[\d_]*[uU]?[yslLnQRZINGmM]?`, LiteralNumberInteger, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*[uU]?[yslLn]?`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*[uU]?[yslLn]?`, LiteralNumberBin, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?`, LiteralNumberFloat, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`@?"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, NameVariable, nil}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, Name, Pop(1)}, + {`[a-z_][\w\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + "comment": { + {`[^(*)@"]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`@"`, LiteralString, Push("lstring")}, + {`"""`, LiteralString, Push("tqs")}, + {`"`, LiteralString, Push("string")}, + {`[(*)@]`, Comment, nil}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + Include("escape-sequence"), + {`\\\n`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`"B?`, LiteralString, Pop(1)}, + }, + "lstring": { + {`[^"]+`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`""`, LiteralString, nil}, + {`"B?`, LiteralString, Pop(1)}, + }, + "tqs": { + {`[^"]+`, LiteralString, nil}, + {`\n`, LiteralString, nil}, + {`"""B?`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gas.go b/vendor/github.com/alecthomas/chroma/lexers/g/gas.go new file mode 100644 index 00000000..a9228068 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gas.go @@ -0,0 +1,55 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Gas lexer. +var Gas = internal.Register(MustNewLexer( + &Config{ + Name: "GAS", + Aliases: []string{"gas", "asm"}, + Filenames: []string{"*.s", "*.S"}, + MimeTypes: []string{"text/x-gas"}, + }, + Rules{ + "root": { + Include("whitespace"), + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):`, NameLabel, nil}, + {`\.(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, Push("directive-args")}, + {`lock|rep(n?z)?|data\d+`, NameAttribute, nil}, + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "directive-args": { + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, + {`"(\\"|[^"])*"`, LiteralString, nil}, + {`@(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameAttribute, nil}, + {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("punctuation"), + Include("whitespace"), + }, + "instruction-args": { + {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation), nil}, + {`([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))([-+])((?:0[xX][a-zA-Z0-9]+|\d+))(>)`, ByGroups(LiteralNumberHex, Text, Punctuation, NameConstant, Punctuation, LiteralNumberInteger, Punctuation), nil}, + {`(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameConstant, nil}, + {`(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)`, NameVariable, nil}, + {`$(?:0[xX][a-zA-Z0-9]+|\d+)`, LiteralNumberInteger, nil}, + {`$'(.|\\')'`, LiteralStringChar, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("punctuation"), + Include("whitespace"), + }, + "whitespace": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`[;#].*?\n`, Comment, nil}, + }, + "punctuation": { + {`[-*,.()\[\]!:]+`, Punctuation, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go b/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go new file mode 100644 index 00000000..bfe30637 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gdscript.go @@ -0,0 +1,124 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// GDScript lexer. +var GDScript = internal.Register(MustNewLexer( + &Config{ + Name: "GDScript", + Aliases: []string{"gdscript", "gd"}, + Filenames: []string{"*.gd"}, + MimeTypes: []string{"text/x-gdscript", "application/x-gdscript"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`#.*$`, CommentSingle, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|and|or|not)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|&&|\+=|-=|\*=|/=|%=|&=|\|=|\|\||[-~+/*%=<>&^.!|$]`, Operator, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + Include("builtins"), + {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, + `if`, `elif`, `else`, `for`, `do`, + `while`, `switch`, `case`, `break`, `continue`, + `pass`, `return`, `class`, `extends`, `tool`, + `signal`, `func`, `static`, `const`, `enum`, + `var`, `onready`, `export`, `setget`, `breakpoint`), Keyword, nil}, + }, + "builtins": { + {Words(`(?)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil}, + {`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")}, + {`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")}, + Include("variable"), + {`[<$]`, Other, nil}, + }, + "pytag": { + {`\s+`, Text, nil}, + {`[\w:-]+\s*=`, NameAttribute, Push("pyattr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "pyattr": { + {`(")(.*?)(")`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)}, + {`(')(.*?)(')`, ByGroups(LiteralString, Using(Python), LiteralString), Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + "tag": { + {`\s+`, Text, nil}, + {`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")}, + {`[\w:-]+\s*=`, NameAttribute, Push("attr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "attr": { + {`"`, LiteralString, Push("attr-dstring")}, + {`'`, LiteralString, Push("attr-sstring")}, + {`[^\s>]*`, LiteralString, Pop(1)}, + }, + "attr-dstring": { + {`"`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "attr-sstring": { + {`'`, LiteralString, Pop(1)}, + Include("strings"), + {`'`, LiteralString, nil}, + }, + "strings": { + {`[^"'$]+`, LiteralString, nil}, + Include("variable"), + }, + "variable": { + {`(?>|<=?|>=?|==?|&&?|\^|\|\|?`, Operator, nil}, + {`[?:]`, Operator, nil}, + {`\bdefined\b`, Operator, nil}, + {`[;{}(),\[\]]`, Punctuation, nil}, + {`[+-]?\d*\.\d+([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, + {`[+-]?\d+\.\d*([eE][-+]?\d+)?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F]*`, LiteralNumberHex, nil}, + {`0[0-7]*`, LiteralNumberOct, nil}, + {`[1-9][0-9]*`, LiteralNumberInteger, nil}, + {Words(`\b`, `\b`, `attribute`, `const`, `uniform`, `varying`, `centroid`, `break`, `continue`, `do`, `for`, `while`, `if`, `else`, `in`, `out`, `inout`, `float`, `int`, `void`, `bool`, `true`, `false`, `invariant`, `discard`, `return`, `mat2`, `mat3mat4`, `mat2x2`, `mat3x2`, `mat4x2`, `mat2x3`, `mat3x3`, `mat4x3`, `mat2x4`, `mat3x4`, `mat4x4`, `vec2`, `vec3`, `vec4`, `ivec2`, `ivec3`, `ivec4`, `bvec2`, `bvec3`, `bvec4`, `sampler1D`, `sampler2D`, `sampler3DsamplerCube`, `sampler1DShadow`, `sampler2DShadow`, `struct`), Keyword, nil}, + {Words(`\b`, `\b`, `asm`, `class`, `union`, `enum`, `typedef`, `template`, `this`, `packed`, `goto`, `switch`, `default`, `inline`, `noinline`, `volatile`, `public`, `static`, `extern`, `external`, `interface`, `long`, `short`, `double`, `half`, `fixed`, `unsigned`, `lowp`, `mediump`, `highp`, `precision`, `input`, `output`, `hvec2`, `hvec3`, `hvec4`, `dvec2`, `dvec3`, `dvec4`, `fvec2`, `fvec3`, `fvec4`, `sampler2DRect`, `sampler3DRect`, `sampler2DRectShadow`, `sizeof`, `cast`, `namespace`, `using`), Keyword, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`\.`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go b/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go new file mode 100644 index 00000000..77c6363c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/gnuplot.go @@ -0,0 +1,117 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Gnuplot lexer. +var Gnuplot = internal.Register(MustNewLexer( + &Config{ + Name: "Gnuplot", + Aliases: []string{"gnuplot"}, + Filenames: []string{"*.plot", "*.plt"}, + MimeTypes: []string{"text/x-gnuplot"}, + }, + Rules{ + "root": { + Include("whitespace"), + {`bind\b|bin\b|bi\b`, Keyword, Push("bind")}, + {`exit\b|exi\b|ex\b|quit\b|qui\b|qu\b|q\b`, Keyword, Push("quit")}, + {`fit\b|fi\b|f\b`, Keyword, Push("fit")}, + {`(if)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("if")}, + {`else\b`, Keyword, nil}, + {`pause\b|paus\b|pau\b|pa\b`, Keyword, Push("pause")}, + {`plot\b|plo\b|pl\b|p\b|replot\b|replo\b|repl\b|rep\b|splot\b|splo\b|spl\b|sp\b`, Keyword, Push("plot")}, + {`save\b|sav\b|sa\b`, Keyword, Push("save")}, + {`set\b|se\b`, Keyword, Push("genericargs", "optionarg")}, + {`show\b|sho\b|sh\b|unset\b|unse\b|uns\b`, Keyword, Push("noargs", "optionarg")}, + {`lower\b|lowe\b|low\b|raise\b|rais\b|rai\b|ra\b|call\b|cal\b|ca\b|cd\b|clear\b|clea\b|cle\b|cl\b|help\b|hel\b|he\b|h\b|\?\b|history\b|histor\b|histo\b|hist\b|his\b|hi\b|load\b|loa\b|lo\b|l\b|print\b|prin\b|pri\b|pr\b|pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|system\b|syste\b|syst\b|sys\b|sy\b|update\b|updat\b|upda\b|upd\b|up\b`, Keyword, Push("genericargs")}, + {`pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|test\b`, Keyword, Push("noargs")}, + {`([a-zA-Z_]\w*)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), Push("genericargs")}, + {`([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)`, ByGroups(NameFunction, Text, Operator), Push("genericargs")}, + {`@[a-zA-Z_]\w*`, NameConstant, nil}, + {`;`, Keyword, nil}, + }, + "comment": { + {`[^\\\n]`, Comment, nil}, + {`\\\n`, Comment, nil}, + {`\\`, Comment, nil}, + Default(Pop(1)), + }, + "whitespace": { + {`#`, Comment, Push("comment")}, + {`[ \t\v\f]+`, Text, nil}, + }, + "noargs": { + Include("whitespace"), + {`;`, Punctuation, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "dqstring": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + {`\n`, LiteralString, Pop(1)}, + }, + "sqstring": { + {`''`, LiteralString, nil}, + {`'`, LiteralString, Pop(1)}, + {`[^\\'\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + {`\n`, LiteralString, Pop(1)}, + }, + "genericargs": { + Include("noargs"), + {`"`, LiteralString, Push("dqstring")}, + {`'`, LiteralString, Push("sqstring")}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+)`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`[,.~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[{}()\[\]]`, Punctuation, nil}, + {`(eq|ne)\b`, OperatorWord, nil}, + {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`@[a-zA-Z_]\w*`, NameConstant, nil}, + {`\\\n`, Text, nil}, + }, + "optionarg": { + Include("whitespace"), + {`all\b|al\b|a\b|angles\b|angle\b|angl\b|ang\b|an\b|arrow\b|arro\b|arr\b|ar\b|autoscale\b|autoscal\b|autosca\b|autosc\b|autos\b|auto\b|aut\b|au\b|bars\b|bar\b|ba\b|b\b|border\b|borde\b|bord\b|bor\b|boxwidth\b|boxwidt\b|boxwid\b|boxwi\b|boxw\b|box\b|clabel\b|clabe\b|clab\b|cla\b|cl\b|clip\b|cli\b|cl\b|c\b|cntrparam\b|cntrpara\b|cntrpar\b|cntrpa\b|cntrp\b|cntr\b|cnt\b|cn\b|contour\b|contou\b|conto\b|cont\b|con\b|co\b|data\b|dat\b|da\b|datafile\b|datafil\b|datafi\b|dataf\b|data\b|dgrid3d\b|dgrid3\b|dgrid\b|dgri\b|dgr\b|dg\b|dummy\b|dumm\b|dum\b|du\b|encoding\b|encodin\b|encodi\b|encod\b|enco\b|enc\b|decimalsign\b|decimalsig\b|decimalsi\b|decimals\b|decimal\b|decima\b|decim\b|deci\b|dec\b|fit\b|fontpath\b|fontpat\b|fontpa\b|fontp\b|font\b|format\b|forma\b|form\b|for\b|fo\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|grid\b|gri\b|gr\b|g\b|hidden3d\b|hidden3\b|hidden\b|hidde\b|hidd\b|hid\b|historysize\b|historysiz\b|historysi\b|historys\b|history\b|histor\b|histo\b|hist\b|his\b|isosamples\b|isosample\b|isosampl\b|isosamp\b|isosam\b|isosa\b|isos\b|iso\b|is\b|key\b|ke\b|k\b|keytitle\b|keytitl\b|keytit\b|keyti\b|keyt\b|label\b|labe\b|lab\b|la\b|linestyle\b|linestyl\b|linesty\b|linest\b|lines\b|line\b|lin\b|li\b|ls\b|loadpath\b|loadpat\b|loadpa\b|loadp\b|load\b|loa\b|locale\b|local\b|loca\b|loc\b|logscale\b|logscal\b|logsca\b|logsc\b|logs\b|log\b|macros\b|macro\b|macr\b|mac\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|mapping3d\b|mapping3\b|mapping\b|mappin\b|mappi\b|mapp\b|map\b|margin\b|margi\b|marg\b|mar\b|lmargin\b|lmargi\b|lmarg\b|lmar\b|rmargin\b|rmargi\b|rmarg\b|rmar\b|tmargin\b|tmargi\b|tmarg\b|tmar\b|bmargin\b|bmargi\b|bmarg\b|bmar\b|mouse\b|mous\b|mou\b|mo\b|multiplot\b|multiplo\b|multipl\b|multip\b|multi\b|mxtics\b|mxtic\b|mxti\b|mxt\b|nomxtics\b|nomxtic\b|nomxti\b|nomxt\b|mx2tics\b|mx2tic\b|mx2ti\b|mx2t\b|nomx2tics\b|nomx2tic\b|nomx2ti\b|nomx2t\b|mytics\b|mytic\b|myti\b|myt\b|nomytics\b|nomytic\b|nomyti\b|nomyt\b|my2tics\b|my2tic\b|my2ti\b|my2t\b|nomy2tics\b|nomy2tic\b|nomy2ti\b|nomy2t\b|mztics\b|mztic\b|mzti\b|mzt\b|nomztics\b|nomztic\b|nomzti\b|nomzt\b|mcbtics\b|mcbtic\b|mcbti\b|mcbt\b|nomcbtics\b|nomcbtic\b|nomcbti\b|nomcbt\b|offsets\b|offset\b|offse\b|offs\b|off\b|of\b|origin\b|origi\b|orig\b|ori\b|or\b|output\b|outpu\b|outp\b|out\b|ou\b|o\b|parametric\b|parametri\b|parametr\b|paramet\b|parame\b|param\b|para\b|par\b|pa\b|pm3d\b|pm3\b|pm\b|palette\b|palett\b|palet\b|pale\b|pal\b|colorbox\b|colorbo\b|colorb\b|plot\b|plo\b|pl\b|p\b|pointsize\b|pointsiz\b|pointsi\b|points\b|point\b|poin\b|poi\b|polar\b|pola\b|pol\b|print\b|prin\b|pri\b|pr\b|object\b|objec\b|obje\b|obj\b|samples\b|sample\b|sampl\b|samp\b|sam\b|sa\b|size\b|siz\b|si\b|style\b|styl\b|sty\b|st\b|surface\b|surfac\b|surfa\b|surf\b|sur\b|su\b|table\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|termoptions\b|termoption\b|termoptio\b|termopti\b|termopt\b|termop\b|termo\b|tics\b|tic\b|ti\b|ticscale\b|ticscal\b|ticsca\b|ticsc\b|ticslevel\b|ticsleve\b|ticslev\b|ticsle\b|ticsl\b|timefmt\b|timefm\b|timef\b|timestamp\b|timestam\b|timesta\b|timest\b|times\b|time\b|tim\b|title\b|titl\b|tit\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b|version\b|versio\b|versi\b|vers\b|ver\b|ve\b|view\b|vie\b|vi\b|xyplane\b|xyplan\b|xypla\b|xypl\b|xyp\b|xdata\b|xdat\b|xda\b|x2data\b|x2dat\b|x2da\b|ydata\b|ydat\b|yda\b|y2data\b|y2dat\b|y2da\b|zdata\b|zdat\b|zda\b|cbdata\b|cbdat\b|cbda\b|xlabel\b|xlabe\b|xlab\b|xla\b|xl\b|x2label\b|x2labe\b|x2lab\b|x2la\b|x2l\b|ylabel\b|ylabe\b|ylab\b|yla\b|yl\b|y2label\b|y2labe\b|y2lab\b|y2la\b|y2l\b|zlabel\b|zlabe\b|zlab\b|zla\b|zl\b|cblabel\b|cblabe\b|cblab\b|cbla\b|cbl\b|xtics\b|xtic\b|xti\b|noxtics\b|noxtic\b|noxti\b|x2tics\b|x2tic\b|x2ti\b|nox2tics\b|nox2tic\b|nox2ti\b|ytics\b|ytic\b|yti\b|noytics\b|noytic\b|noyti\b|y2tics\b|y2tic\b|y2ti\b|noy2tics\b|noy2tic\b|noy2ti\b|ztics\b|ztic\b|zti\b|noztics\b|noztic\b|nozti\b|cbtics\b|cbtic\b|cbti\b|nocbtics\b|nocbtic\b|nocbti\b|xdtics\b|xdtic\b|xdti\b|noxdtics\b|noxdtic\b|noxdti\b|x2dtics\b|x2dtic\b|x2dti\b|nox2dtics\b|nox2dtic\b|nox2dti\b|ydtics\b|ydtic\b|ydti\b|noydtics\b|noydtic\b|noydti\b|y2dtics\b|y2dtic\b|y2dti\b|noy2dtics\b|noy2dtic\b|noy2dti\b|zdtics\b|zdtic\b|zdti\b|nozdtics\b|nozdtic\b|nozdti\b|cbdtics\b|cbdtic\b|cbdti\b|nocbdtics\b|nocbdtic\b|nocbdti\b|xmtics\b|xmtic\b|xmti\b|noxmtics\b|noxmtic\b|noxmti\b|x2mtics\b|x2mtic\b|x2mti\b|nox2mtics\b|nox2mtic\b|nox2mti\b|ymtics\b|ymtic\b|ymti\b|noymtics\b|noymtic\b|noymti\b|y2mtics\b|y2mtic\b|y2mti\b|noy2mtics\b|noy2mtic\b|noy2mti\b|zmtics\b|zmtic\b|zmti\b|nozmtics\b|nozmtic\b|nozmti\b|cbmtics\b|cbmtic\b|cbmti\b|nocbmtics\b|nocbmtic\b|nocbmti\b|xrange\b|xrang\b|xran\b|xra\b|xr\b|x2range\b|x2rang\b|x2ran\b|x2ra\b|x2r\b|yrange\b|yrang\b|yran\b|yra\b|yr\b|y2range\b|y2rang\b|y2ran\b|y2ra\b|y2r\b|zrange\b|zrang\b|zran\b|zra\b|zr\b|cbrange\b|cbrang\b|cbran\b|cbra\b|cbr\b|rrange\b|rrang\b|rran\b|rra\b|rr\b|trange\b|trang\b|tran\b|tra\b|tr\b|urange\b|urang\b|uran\b|ura\b|ur\b|vrange\b|vrang\b|vran\b|vra\b|vr\b|xzeroaxis\b|xzeroaxi\b|xzeroax\b|xzeroa\b|x2zeroaxis\b|x2zeroaxi\b|x2zeroax\b|x2zeroa\b|yzeroaxis\b|yzeroaxi\b|yzeroax\b|yzeroa\b|y2zeroaxis\b|y2zeroaxi\b|y2zeroax\b|y2zeroa\b|zzeroaxis\b|zzeroaxi\b|zzeroax\b|zzeroa\b|zeroaxis\b|zeroaxi\b|zeroax\b|zeroa\b|zero\b|zer\b|ze\b|z\b`, NameBuiltin, Pop(1)}, + }, + "bind": { + {`!`, Keyword, Pop(1)}, + {`allwindows\b|allwindow\b|allwindo\b|allwind\b|allwin\b|allwi\b|allw\b|all\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "quit": { + {`gnuplot\b`, Keyword, nil}, + Include("noargs"), + }, + "fit": { + {`via\b`, NameBuiltin, nil}, + Include("plot"), + }, + "if": { + {`\)`, Punctuation, Pop(1)}, + Include("genericargs"), + }, + "pause": { + {`(mouse|any|button1|button2|button3)\b`, NameBuiltin, nil}, + {`keypress\b|keypres\b|keypre\b|keypr\b|keyp\b|key\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "plot": { + {`axes\b|axe\b|ax\b|axis\b|axi\b|binary\b|binar\b|bina\b|bin\b|every\b|ever\b|eve\b|ev\b|index\b|inde\b|ind\b|in\b|i\b|matrix\b|matri\b|matr\b|mat\b|smooth\b|smoot\b|smoo\b|smo\b|sm\b|s\b|thru\b|title\b|titl\b|tit\b|ti\b|t\b|notitle\b|notitl\b|notit\b|noti\b|not\b|using\b|usin\b|usi\b|us\b|u\b|with\b|wit\b|wi\b|w\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + "save": { + {`functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b`, NameBuiltin, nil}, + Include("genericargs"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/go.go b/vendor/github.com/alecthomas/chroma/lexers/g/go.go new file mode 100644 index 00000000..54c23c3c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/go.go @@ -0,0 +1,113 @@ +package g + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/h" + "github.com/alecthomas/chroma/lexers/internal" +) + +// Go lexer. +var Go = internal.Register(MustNewLexer( + &Config{ + Name: "Go", + Aliases: []string{"go", "golang"}, + Filenames: []string{"*.go"}, + MimeTypes: []string{"text/x-gosrc"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`(import|package)\b`, KeywordNamespace, nil}, + {`(var|func|struct|map|chan|type|interface|const)\b`, KeywordDeclaration, nil}, + {Words(``, `\b`, `break`, `default`, `select`, `case`, `defer`, `go`, `else`, `goto`, `switch`, `fallthrough`, `if`, `range`, `continue`, `for`, `return`), Keyword, nil}, + {`(true|false|iota|nil)\b`, KeywordConstant, nil}, + {Words(``, `\b(\()`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`, `print`, `println`, `panic`, `recover`, `close`, `complex`, `real`, `imag`, `len`, `cap`, `append`, `copy`, `delete`, `new`, `make`), ByGroups(NameBuiltin, Punctuation), nil}, + {Words(``, `\b`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`), KeywordType, nil}, + {`\d+i`, LiteralNumber, nil}, + {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, + {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, + {"(`)([^`]*)(`)", ByGroups(LiteralString, Using(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil}, + {`[|^<>=!()\[\]{}.,;:]`, Punctuation, nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + }, +).SetAnalyser(func(text string) float32 { + if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") { + return 0.5 + } + if strings.Contains(text, "package ") { + return 0.1 + } + return 0.0 +})) + +var goTemplateRules = Rules{ + "root": { + {`{{[-]?`, CommentPreproc, Push("template")}, + {`[^{]+`, Other, nil}, + {`{`, Other, nil}, + }, + "template": { + {`[-]?}}`, CommentPreproc, Pop(1)}, + {`/\*.*?\*/`, Comment, nil}, + {`(?=}})`, CommentPreproc, Pop(1)}, // Terminate the pipeline + {`\(`, Operator, Push("subexpression")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + Include("expression"), + }, + "subexpression": { + {`\)`, Operator, Pop(1)}, + Include("expression"), + }, + "expression": { + {`\s+`, Whitespace, nil}, + {`\(`, Operator, Push("subexpression")}, + {`(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge)\b`, Keyword, nil}, + {`\||:=`, Operator, nil}, + {`[$]?[^\W\d]\w*`, NameOther, nil}, + {`[$]?\.(?:[^\W\d]\w*)?`, NameAttribute, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`\d+i`, LiteralNumber, nil}, + {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil}, + {`\d+[Ee][-+]\d+i`, LiteralNumber, nil}, + {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil}, + {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`(0|[1-9][0-9]*)`, LiteralNumberInteger, nil}, + {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil}, + {"`[^`]*`", LiteralString, nil}, + }, +} + +var GoHTMLTemplate = internal.Register(DelegatingLexer(h.HTML, MustNewLexer( + &Config{ + Name: "Go HTML Template", + Aliases: []string{"go-html-template"}, + }, + goTemplateRules, +))) + +var GoTextTemplate = internal.Register(MustNewLexer( + &Config{ + Name: "Go Text Template", + Aliases: []string{"go-text-template"}, + }, + goTemplateRules, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go b/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go new file mode 100644 index 00000000..a395415a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/g/groovy.go @@ -0,0 +1,58 @@ +package g + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Groovy lexer. +var Groovy = internal.Register(MustNewLexer( + &Config{ + Name: "Groovy", + Aliases: []string{"groovy"}, + Filenames: []string{"*.groovy", "*.gradle"}, + MimeTypes: []string{"text/x-groovy"}, + DotAll: true, + }, + Rules{ + "root": { + {`#!(.*?)$`, CommentPreproc, Push("base")}, + Default(Push("base")), + }, + "base": { + {`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`@[a-zA-Z_][\w.]*`, NameDecorator, nil}, + {`(as|assert|break|case|catch|continue|default|do|else|finally|for|if|in|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b`, Keyword, nil}, + {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, + {`(def|boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`""".*?"""`, LiteralStringDouble, nil}, + {`'''.*?'''`, LiteralStringSingle, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`\$/((?!/\$).)*/\$`, LiteralString, nil}, + {`/(\\\\|\\"|[^/])*/`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)([a-zA-Z_]\w*)`, ByGroups(Operator, NameAttribute), nil}, + {`[a-zA-Z_]\w*:`, NameLabel, nil}, + {`[a-zA-Z_$]\w*`, Name, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + }, + "import": { + {`[\w.]+\*?`, NameNamespace, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go b/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go new file mode 100644 index 00000000..07072da5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/handlebars.go @@ -0,0 +1,56 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Handlebars lexer. +var Handlebars = internal.Register(MustNewLexer( + &Config{ + Name: "Handlebars", + Aliases: []string{"handlebars"}, + Filenames: []string{"*.handlebars"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{!.*\}\}`, Comment, nil}, + {`(\{\{\{)(\s*)`, ByGroups(CommentSpecial, Text), Push("tag")}, + {`(\{\{)(\s*)`, ByGroups(CommentPreproc, Text), Push("tag")}, + }, + "tag": { + {`\s+`, Text, nil}, + {`\}\}\}`, CommentSpecial, Pop(1)}, + {`\}\}`, CommentPreproc, Pop(1)}, + {`([#/]*)(each|if|unless|else|with|log|in(?:line)?)`, ByGroups(Keyword, Keyword), nil}, + {`#\*inline`, Keyword, nil}, + {`([#/])([\w-]+)`, ByGroups(NameFunction, NameFunction), nil}, + {`([\w-]+)(=)`, ByGroups(NameAttribute, Operator), nil}, + {`(>)(\s*)(@partial-block)`, ByGroups(Keyword, Text, Keyword), nil}, + {`(#?>)(\s*)([\w-]+)`, ByGroups(Keyword, Text, NameVariable), nil}, + {`(>)(\s*)(\()`, ByGroups(Keyword, Text, Punctuation), Push("dynamic-partial")}, + Include("generic"), + }, + "dynamic-partial": { + {`\s+`, Text, nil}, + {`\)`, Punctuation, Pop(1)}, + {`(lookup)(\s+)(\.|this)(\s+)`, ByGroups(Keyword, Text, NameVariable, Text), nil}, + {`(lookup)(\s+)(\S+)`, ByGroups(Keyword, Text, UsingSelf("variable")), nil}, + {`[\w-]+`, NameFunction, nil}, + Include("generic"), + }, + "variable": { + {`[a-zA-Z][\w-]*`, NameVariable, nil}, + {`\.[\w-]+`, NameVariable, nil}, + {`(this\/|\.\/|(\.\.\/)+)[\w-]+`, NameVariable, nil}, + }, + "generic": { + Include("variable"), + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go b/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go new file mode 100644 index 00000000..b018eab4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/haskell.go @@ -0,0 +1,99 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Haskell lexer. +var Haskell = internal.Register(MustNewLexer( + &Config{ + Name: "Haskell", + Aliases: []string{"haskell", "hs"}, + Filenames: []string{"*.hs"}, + MimeTypes: []string{"text/x-haskell"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`\bimport\b`, KeywordReserved, Push("import")}, + {`\bmodule\b`, KeywordReserved, Push("module")}, + {`\berror\b`, NameException, nil}, + {`\b(case|class|data|default|deriving|do|else|family|if|in|infix[lr]?|instance|let|newtype|of|then|type|where|_)(?!\')\b`, KeywordReserved, nil}, + {`'[^\\]'`, LiteralStringChar, nil}, + {`^[_a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w\']*`, NameFunction, nil}, + {`'?[_a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w']*`, Name, nil}, + {`('')?[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w\']*`, KeywordType, nil}, + {`(')[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w\']*`, KeywordType, nil}, + {`(')\[[^\]]*\]`, KeywordType, nil}, + {`(')\([^)]*\)`, KeywordType, nil}, + {`\\(?![:!#$%&*+.\\/<=>?@^|~-]+)`, NameFunction, nil}, + {`(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, + {`:[:!#$%&*+.\\/<=>?@^|~-]*`, KeywordType, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[oO][0-7]+`, LiteralNumberOct, nil}, + {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringChar, Push("character")}, + {`"`, LiteralString, Push("string")}, + {`\[\]`, KeywordType, nil}, + {`\(\)`, NameBuiltin, nil}, + {"[][(),;`{}]", Punctuation, nil}, + }, + "import": { + {`\s+`, Text, nil}, + {`"`, LiteralString, Push("string")}, + {`\)`, Punctuation, Pop(1)}, + {`qualified\b`, Keyword, nil}, + {`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(as)(\s+)([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)`, ByGroups(NameNamespace, Text, Keyword, Text, Name), Pop(1)}, + {`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(hiding)(\s+)(\()`, ByGroups(NameNamespace, Text, Keyword, Text, Punctuation), Push("funclist")}, + {`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[\w.]+`, NameNamespace, Pop(1)}, + }, + "module": { + {`\s+`, Text, nil}, + {`([A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊][\w.]*`, NameNamespace, Pop(1)}, + }, + "funclist": { + {`\s+`, Text, nil}, + {`[A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊]\w*`, KeywordType, nil}, + {`(_[\w\']+|[a-zµß-öø-ÿāăąćĉċčďđēĕėęěĝğġģĥħĩīĭįıijĵķ-ĸĺļľŀłńņň-ʼnŋōŏőœŕŗřśŝşšţťŧũūŭůűųŵŷźżž-ƀƃƅƈƌ-ƍƒƕƙ-ƛƞơƣƥƨƪ-ƫƭưƴƶƹ-ƺƽ-ƿdžljnjǎǐǒǔǖǘǚǜ-ǝǟǡǣǥǧǩǫǭǯ-ǰdzǵǹǻǽǿȁȃȅȇȉȋȍȏȑȓȕȗșțȝȟȡȣȥȧȩȫȭȯȱȳ-ȹȼȿ-ɀɂɇɉɋɍɏ-ʓʕ-ʯͱͳͷͻ-ͽΐά-ώϐ-ϑϕ-ϗϙϛϝϟϡϣϥϧϩϫϭϯ-ϳϵϸϻ-ϼа-џѡѣѥѧѩѫѭѯѱѳѵѷѹѻѽѿҁҋҍҏґғҕҗҙқҝҟҡңҥҧҩҫҭүұҳҵҷҹһҽҿӂӄӆӈӊӌӎ-ӏӑӓӕӗәӛӝӟӡӣӥӧөӫӭӯӱӳӵӷӹӻӽӿԁԃԅԇԉԋԍԏԑԓԕԗԙԛԝԟԡԣԥԧա-ևᴀ-ᴫᵫ-ᵷᵹ-ᶚḁḃḅḇḉḋḍḏḑḓḕḗḙḛḝḟḡḣḥḧḩḫḭḯḱḳḵḷḹḻḽḿṁṃṅṇṉṋṍṏṑṓṕṗṙṛṝṟṡṣṥṧṩṫṭṯṱṳṵṷṹṻṽṿẁẃẅẇẉẋẍẏẑẓẕ-ẝẟạảấầẩẫậắằẳẵặẹẻẽếềểễệỉịọỏốồổỗộớờởỡợụủứừửữựỳỵỷỹỻỽỿ-ἇἐ-ἕἠ-ἧἰ-ἷὀ-ὅὐ-ὗὠ-ὧὰ-ώᾀ-ᾇᾐ-ᾗᾠ-ᾧᾰ-ᾴᾶ-ᾷιῂ-ῄῆ-ῇῐ-ΐῖ-ῗῠ-ῧῲ-ῴῶ-ῷℊℎ-ℏℓℯℴℹℼ-ℽⅆ-ⅉⅎↄⰰ-ⱞⱡⱥ-ⱦⱨⱪⱬⱱⱳ-ⱴⱶ-ⱻⲁⲃⲅⲇⲉⲋⲍⲏⲑⲓⲕⲗⲙⲛⲝⲟⲡⲣⲥⲧⲩⲫⲭⲯⲱⲳⲵⲷⲹⲻⲽⲿⳁⳃⳅⳇⳉⳋⳍⳏⳑⳓⳕⳗⳙⳛⳝⳟⳡⳣ-ⳤⳬⳮⳳⴀ-ⴥⴧⴭꙁꙃꙅꙇꙉꙋꙍꙏꙑꙓꙕꙗꙙꙛꙝꙟꙡꙣꙥꙧꙩꙫꙭꚁꚃꚅꚇꚉꚋꚍꚏꚑꚓꚕꚗꜣꜥꜧꜩꜫꜭꜯ-ꜱꜳꜵꜷꜹꜻꜽꜿꝁꝃꝅꝇꝉꝋꝍꝏꝑꝓꝕꝗꝙꝛꝝꝟꝡꝣꝥꝧꝩꝫꝭꝯꝱ-ꝸꝺꝼꝿꞁꞃꞅꞇꞌꞎꞑꞓꞡꞣꞥꞧꞩꟺff-stﬓ-ﬗa-z𐐨-𐑏𝐚-𝐳𝑎-𝑔𝑖-𝑧𝒂-𝒛𝒶-𝒹𝒻𝒽-𝓃𝓅-𝓏𝓪-𝔃𝔞-𝔷𝕒-𝕫𝖆-𝖟𝖺-𝗓𝗮-𝘇𝘢-𝘻𝙖-𝙯𝚊-𝚥𝛂-𝛚𝛜-𝛡𝛼-𝜔𝜖-𝜛𝜶-𝝎𝝐-𝝕𝝰-𝞈𝞊-𝞏𝞪-𝟂𝟄-𝟉𝟋][\w\']*)`, NameFunction, nil}, + {`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`,`, Punctuation, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\(`, Punctuation, Push("funclist", "funclist")}, + {`\)`, Punctuation, Pop(2)}, + }, + "comment": { + {`[^-{}]+`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push()}, + {`-\}`, CommentMultiline, Pop(1)}, + {`[-{}]`, CommentMultiline, nil}, + }, + "character": { + {`[^\\']'`, LiteralStringChar, Pop(1)}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`'`, LiteralStringChar, Pop(1)}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`"`, LiteralString, Pop(1)}, + }, + "escape": { + {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, + {`\^[][A-ZÀ-ÖØ-ÞĀĂĄĆĈĊČĎĐĒĔĖĘĚĜĞĠĢĤĦĨĪĬĮİIJĴĶĹĻĽĿŁŃŅŇŊŌŎŐŒŔŖŘŚŜŞŠŢŤŦŨŪŬŮŰŲŴŶŸ-ŹŻŽƁ-ƂƄƆ-ƇƉ-ƋƎ-ƑƓ-ƔƖ-ƘƜ-ƝƟ-ƠƢƤƦ-ƧƩƬƮ-ƯƱ-ƳƵƷ-ƸƼDŽLJNJǍǏǑǓǕǗǙǛǞǠǢǤǦǨǪǬǮDZǴǶ-ǸǺǼǾȀȂȄȆȈȊȌȎȐȒȔȖȘȚȜȞȠȢȤȦȨȪȬȮȰȲȺ-ȻȽ-ȾɁɃ-ɆɈɊɌɎͰͲͶΆΈ-ΊΌΎ-ΏΑ-ΡΣ-ΫϏϒ-ϔϘϚϜϞϠϢϤϦϨϪϬϮϴϷϹ-ϺϽ-ЯѠѢѤѦѨѪѬѮѰѲѴѶѸѺѼѾҀҊҌҎҐҒҔҖҘҚҜҞҠҢҤҦҨҪҬҮҰҲҴҶҸҺҼҾӀ-ӁӃӅӇӉӋӍӐӒӔӖӘӚӜӞӠӢӤӦӨӪӬӮӰӲӴӶӸӺӼӾԀԂԄԆԈԊԌԎԐԒԔԖԘԚԜԞԠԢԤԦԱ-ՖႠ-ჅჇჍḀḂḄḆḈḊḌḎḐḒḔḖḘḚḜḞḠḢḤḦḨḪḬḮḰḲḴḶḸḺḼḾṀṂṄṆṈṊṌṎṐṒṔṖṘṚṜṞṠṢṤṦṨṪṬṮṰṲṴṶṸṺṼṾẀẂẄẆẈẊẌẎẐẒẔẞẠẢẤẦẨẪẬẮẰẲẴẶẸẺẼẾỀỂỄỆỈỊỌỎỐỒỔỖỘỚỜỞỠỢỤỦỨỪỬỮỰỲỴỶỸỺỼỾἈ-ἏἘ-ἝἨ-ἯἸ-ἿὈ-ὍὙὛὝὟὨ-ὯᾸ-ΆῈ-ΉῘ-ΊῨ-ῬῸ-Ώℂℇℋ-ℍℐ-ℒℕℙ-ℝℤΩℨK-ℭℰ-ℳℾ-ℿⅅↃⰀ-ⰮⱠⱢ-ⱤⱧⱩⱫⱭ-ⱰⱲⱵⱾ-ⲀⲂⲄⲆⲈⲊⲌⲎⲐⲒⲔⲖⲘⲚⲜⲞⲠⲢⲤⲦⲨⲪⲬⲮⲰⲲⲴⲶⲸⲺⲼⲾⳀⳂⳄⳆⳈⳊⳌⳎⳐⳒⳔⳖⳘⳚⳜⳞⳠⳢⳫⳭⳲꙀꙂꙄꙆꙈꙊꙌꙎꙐꙒꙔꙖꙘꙚꙜꙞꙠꙢꙤꙦꙨꙪꙬꚀꚂꚄꚆꚈꚊꚌꚎꚐꚒꚔꚖꜢꜤꜦꜨꜪꜬꜮꜲꜴꜶꜸꜺꜼꜾꝀꝂꝄꝆꝈꝊꝌꝎꝐꝒꝔꝖꝘꝚꝜꝞꝠꝢꝤꝦꝨꝪꝬꝮꝹꝻꝽ-ꝾꞀꞂꞄꞆꞋꞍꞐꞒꞠꞢꞤꞦꞨꞪA-Z𐐀-𐐧𝐀-𝐙𝐴-𝑍𝑨-𝒁𝒜𝒞-𝒟𝒢𝒥-𝒦𝒩-𝒬𝒮-𝒵𝓐-𝓩𝔄-𝔅𝔇-𝔊𝔍-𝔔𝔖-𝔜𝔸-𝔹𝔻-𝔾𝕀-𝕄𝕆𝕊-𝕐𝕬-𝖅𝖠-𝖹𝗔-𝗭𝘈-𝘡𝘼-𝙕𝙰-𝚉𝚨-𝛀𝛢-𝛺𝜜-𝜴𝝖-𝝮𝞐-𝞨𝟊@^_]`, LiteralStringEscape, Pop(1)}, + {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, + {`o[0-7]+`, LiteralStringEscape, Pop(1)}, + {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, + {`\d+`, LiteralStringEscape, Pop(1)}, + {`\s+\\`, LiteralStringEscape, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go b/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go new file mode 100644 index 00000000..9bdcfa58 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/haxe.go @@ -0,0 +1,641 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Haxe lexer. +var Haxe = internal.Register(MustNewLexer( + &Config{ + Name: "Haxe", + Aliases: []string{"hx", "haxe", "hxsl"}, + Filenames: []string{"*.hx", "*.hxsl"}, + MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"}, + DotAll: true, + }, + Rules{ + "root": { + Include("spaces"), + Include("meta"), + {`(?:package)\b`, KeywordNamespace, Push("semicolon", "package")}, + {`(?:import)\b`, KeywordNamespace, Push("semicolon", "import")}, + {`(?:using)\b`, KeywordNamespace, Push("semicolon", "using")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("typedef")}, + {`(?=.)`, Text, Push("expr-statement")}, + }, + "spaces": { + {`\s+`, Text, nil}, + {`//[^\n\r]*`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(#)(if|elseif|else|end|error)\b`, CommentPreproc, MutatorFunc(haxePreProcMutator)}, + }, + "string-single-interpol": { + {`\$\{`, LiteralStringInterpol, Push("string-interpol-close", "expr")}, + {`\$\$`, LiteralStringEscape, nil}, + {`\$(?=(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, LiteralStringInterpol, Push("ident")}, + Include("string-single"), + }, + "string-single": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringSingle, nil}, + }, + "string-double": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\.`, LiteralStringEscape, nil}, + {`.`, LiteralStringDouble, nil}, + }, + "string-interpol-close": { + {`\$(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, LiteralStringInterpol, nil}, + {`\}`, LiteralStringInterpol, Pop(1)}, + }, + "package": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "import": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\*`, Keyword, nil}, + {`\.`, Punctuation, Push("import-ident")}, + {`in`, KeywordNamespace, Push("ident")}, + Default(Pop(1)), + }, + "import-ident": { + Include("spaces"), + {`\*`, Keyword, Pop(1)}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, Pop(1)}, + }, + "using": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil}, + {`\.`, Punctuation, Push("import-ident")}, + Default(Pop(1)), + }, + "preproc-error": { + {`\s+`, CommentPreproc, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + Default(Pop(1)), + }, + "preproc-expr": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Pop(1)}, + {`\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "preproc-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\)`, CommentPreproc, Pop(1)}, + Default(Push("preproc-expr-in-parenthesis")), + }, + "preproc-expr-chain": { + {`\s+`, CommentPreproc, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, CommentPreproc, Push("#pop", "preproc-expr-in-parenthesis")}, + Default(Pop(1)), + }, + "preproc-expr-in-parenthesis": { + {`\s+`, CommentPreproc, nil}, + {`\!`, CommentPreproc, nil}, + {`\(`, CommentPreproc, Push("#pop", "preproc-expr-chain", "preproc-parenthesis")}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Push("#pop", "preproc-expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "preproc-expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "preproc-expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "preproc-expr-chain", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "preproc-expr-chain", "string-double")}, + }, + "abstract": { + Include("spaces"), + Default(Pop(1), Push("abstract-body"), Push("abstract-relation"), Push("abstract-opaque"), Push("type-param-constraint"), Push("type-name")), + }, + "abstract-body": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "class-body")}, + }, + "abstract-opaque": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "type")}, + Default(Pop(1)), + }, + "abstract-relation": { + Include("spaces"), + {`(?:to|from)`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "meta": { + Include("spaces"), + {`@`, NameDecorator, Push("meta-body", "meta-ident", "meta-colon")}, + }, + "meta-colon": { + Include("spaces"), + {`:`, NameDecorator, Pop(1)}, + Default(Pop(1)), + }, + "meta-ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameDecorator, Pop(1)}, + }, + "meta-body": { + Include("spaces"), + {`\(`, NameDecorator, Push("#pop", "meta-call")}, + Default(Pop(1)), + }, + "meta-call": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + Default(Pop(1), Push("meta-call-sep"), Push("expr")), + }, + "meta-call-sep": { + Include("spaces"), + {`\)`, NameDecorator, Pop(1)}, + {`,`, Punctuation, Push("#pop", "meta-call")}, + }, + "typedef": { + Include("spaces"), + Default(Pop(1), Push("typedef-body"), Push("type-param-constraint"), Push("type-name")), + }, + "typedef-body": { + Include("spaces"), + {`=`, Operator, Push("#pop", "optional-semicolon", "type")}, + }, + "enum": { + Include("spaces"), + Default(Pop(1), Push("enum-body"), Push("bracket-open"), Push("type-param-constraint"), Push("type-name")), + }, + "enum-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("enum-member", "type-param-constraint")}, + }, + "enum-member": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "semicolon", "flag", "function-param")}, + Default(Pop(1), Push("semicolon"), Push("flag")), + }, + "class": { + Include("spaces"), + Default(Pop(1), Push("class-body"), Push("bracket-open"), Push("extends"), Push("type-param-constraint"), Push("type-name")), + }, + "extends": { + Include("spaces"), + {`(?:extends|implements)\b`, KeywordDeclaration, Push("type")}, + {`,`, Punctuation, nil}, + Default(Pop(1)), + }, + "bracket-open": { + Include("spaces"), + {`\{`, Punctuation, Pop(1)}, + }, + "bracket-close": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + }, + "class-body": { + Include("spaces"), + Include("meta"), + {`\}`, Punctuation, Pop(1)}, + {`(?:static|public|private|override|dynamic|inline|macro)\b`, KeywordDeclaration, nil}, + Default(Push("class-member")), + }, + "class-member": { + Include("spaces"), + {`(var)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "var")}, + {`(function)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "class-method")}, + }, + "function-local": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + Default(Pop(1), Push("optional-expr"), Push("flag"), Push("function-param"), Push("parenthesis-open"), Push("type-param-constraint")), + }, + "optional-expr": { + Include("spaces"), + Include("expr"), + Default(Pop(1)), + }, + "class-method": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")}, + }, + "function-param": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "function-param-sep", "assign", "flag")}, + }, + "function-param-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "function-param")}, + }, + "prop-get-set": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "prop-get-set-opt", "comma", "prop-get-set-opt")}, + Default(Pop(1)), + }, + "prop-get-set-opt": { + Include("spaces"), + {`(?:default|null|never|dynamic|get|set)\b`, Keyword, Pop(1)}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Pop(1)}, + }, + "expr-statement": { + Include("spaces"), + Default(Pop(1), Push("optional-semicolon"), Push("expr")), + }, + "expr": { + Include("spaces"), + {`@`, NameDecorator, Push("#pop", "optional-expr", "meta-body", "meta-ident", "meta-colon")}, + {`(?:\+\+|\-\-|~(?!/)|!|\-)`, Operator, nil}, + {`\(`, Punctuation, Push("#pop", "expr-chain", "parenthesis")}, + {`(?:static|public|private|override|dynamic|inline)\b`, KeywordDeclaration, nil}, + {`(?:function)\b`, KeywordDeclaration, Push("#pop", "expr-chain", "function-local")}, + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket")}, + {`(?:true|false|null)\b`, KeywordConstant, Push("#pop", "expr-chain")}, + {`(?:this)\b`, Keyword, Push("#pop", "expr-chain")}, + {`(?:cast)\b`, Keyword, Push("#pop", "expr-chain", "cast")}, + {`(?:try)\b`, Keyword, Push("#pop", "catch", "expr")}, + {`(?:var)\b`, KeywordDeclaration, Push("#pop", "var")}, + {`(?:new)\b`, Keyword, Push("#pop", "expr-chain", "new")}, + {`(?:switch)\b`, Keyword, Push("#pop", "switch")}, + {`(?:if)\b`, Keyword, Push("#pop", "if")}, + {`(?:do)\b`, Keyword, Push("#pop", "do")}, + {`(?:while)\b`, Keyword, Push("#pop", "while")}, + {`(?:for)\b`, Keyword, Push("#pop", "for")}, + {`(?:untyped|throw)\b`, Keyword, nil}, + {`(?:return)\b`, Keyword, Push("#pop", "optional-expr")}, + {`(?:macro)\b`, Keyword, Push("#pop", "macro")}, + {`(?:continue|break)\b`, Keyword, Pop(1)}, + {`(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)))`, Name, Push("#pop", "dollar")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "expr-chain")}, + {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "expr-chain")}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "expr-chain")}, + {`[0-9]+`, LiteralNumberInteger, Push("#pop", "expr-chain")}, + {`'`, LiteralStringSingle, Push("#pop", "expr-chain", "string-single-interpol")}, + {`"`, LiteralStringDouble, Push("#pop", "expr-chain", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gimsu]*`, LiteralStringRegex, Push("#pop", "expr-chain")}, + {`\[`, Punctuation, Push("#pop", "expr-chain", "array-decl")}, + }, + "expr-chain": { + Include("spaces"), + {`(?:\+\+|\-\-)`, Operator, nil}, + {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, Operator, Push("#pop", "expr")}, + {`(?:in)\b`, Keyword, Push("#pop", "expr")}, + {`\?`, Operator, Push("#pop", "expr", "ternary", "expr")}, + {`(\.)((?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, ByGroups(Punctuation, Name), nil}, + {`\[`, Punctuation, Push("array-access")}, + {`\(`, Punctuation, Push("call")}, + Default(Pop(1)), + }, + "macro": { + Include("spaces"), + Include("meta"), + {`:`, Punctuation, Push("#pop", "type")}, + {`(?:extern|private)\b`, KeywordDeclaration, nil}, + {`(?:abstract)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "abstract")}, + {`(?:class|interface)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "macro-class")}, + {`(?:enum)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "enum")}, + {`(?:typedef)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "typedef")}, + Default(Pop(1), Push("expr")), + }, + "macro-class": { + {`\{`, Punctuation, Push("#pop", "class-body")}, + Include("class"), + }, + "cast": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "parenthesis-close", "cast-type", "expr")}, + Default(Pop(1), Push("expr")), + }, + "cast-type": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "catch": { + Include("spaces"), + {`(?:catch)\b`, Keyword, Push("expr", "function-param", "parenthesis-open")}, + Default(Pop(1)), + }, + "do": { + Include("spaces"), + Default(Pop(1), Push("do-while"), Push("expr")), + }, + "do-while": { + Include("spaces"), + {`(?:while)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + }, + "while": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "for": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")}, + }, + "if": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "else", "optional-semicolon", "expr", "parenthesis")}, + }, + "else": { + Include("spaces"), + {`(?:else)\b`, Keyword, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "switch": { + Include("spaces"), + Default(Pop(1), Push("switch-body"), Push("bracket-open"), Push("expr")), + }, + "switch-body": { + Include("spaces"), + {`(?:case|default)\b`, Keyword, Push("case-block", "case")}, + {`\}`, Punctuation, Pop(1)}, + }, + "case": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + Default(Pop(1), Push("case-sep"), Push("case-guard"), Push("expr")), + }, + "case-sep": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "case")}, + }, + "case-guard": { + Include("spaces"), + {`(?:if)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")}, + Default(Pop(1)), + }, + "case-block": { + Include("spaces"), + {`(?!(?:case|default)\b|\})`, Keyword, Push("expr-statement")}, + Default(Pop(1)), + }, + "new": { + Include("spaces"), + Default(Pop(1), Push("call"), Push("parenthesis-open"), Push("type")), + }, + "array-decl": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + Default(Pop(1), Push("array-decl-sep"), Push("expr")), + }, + "array-decl-sep": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "array-decl")}, + }, + "array-access": { + Include("spaces"), + Default(Pop(1), Push("array-access-close"), Push("expr")), + }, + "array-access-close": { + Include("spaces"), + {`\]`, Punctuation, Pop(1)}, + }, + "comma": { + Include("spaces"), + {`,`, Punctuation, Pop(1)}, + }, + "colon": { + Include("spaces"), + {`:`, Punctuation, Pop(1)}, + }, + "semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + }, + "optional-semicolon": { + Include("spaces"), + {`;`, Punctuation, Pop(1)}, + Default(Pop(1)), + }, + "ident": { + Include("spaces"), + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + }, + "dollar": { + Include("spaces"), + {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket-close", "expr")}, + Default(Pop(1), Push("expr-chain")), + }, + "type-name": { + Include("spaces"), + {`_*[A-Z]\w*`, Name, Pop(1)}, + }, + "type-full-name": { + Include("spaces"), + {`\.`, Punctuation, Push("ident")}, + Default(Pop(1)), + }, + "type": { + Include("spaces"), + {`\?`, Punctuation, nil}, + {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-check", "type-full-name")}, + {`\{`, Punctuation, Push("#pop", "type-check", "type-struct")}, + {`\(`, Punctuation, Push("#pop", "type-check", "type-parenthesis")}, + }, + "type-parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("type")), + }, + "type-check": { + Include("spaces"), + {`->`, Punctuation, Push("#pop", "type")}, + {`<(?!=)`, Punctuation, Push("type-param")}, + Default(Pop(1)), + }, + "type-struct": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`\?`, Punctuation, nil}, + {`>`, Punctuation, Push("comma", "type")}, + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-struct-sep", "type", "colon")}, + Include("class-body"), + }, + "type-struct-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-struct")}, + }, + "type-param-type": { + {`\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Pop(1)}, + {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Pop(1)}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, Pop(1)}, + {`[0-9]+`, LiteralNumberInteger, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + {`~/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, Pop(1)}, + {`\[`, Operator, Push("#pop", "array-decl")}, + Include("type"), + }, + "type-param": { + Include("spaces"), + Default(Pop(1), Push("type-param-sep"), Push("type-param-type")), + }, + "type-param-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param")}, + }, + "type-param-constraint": { + Include("spaces"), + {`<(?!=)`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + Default(Pop(1)), + }, + "type-param-constraint-sep": { + Include("spaces"), + {`>`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")}, + }, + "type-param-constraint-flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type-param-constraint-flag-type")}, + Default(Pop(1)), + }, + "type-param-constraint-flag-type": { + Include("spaces"), + {`\(`, Punctuation, Push("#pop", "type-param-constraint-flag-type-sep", "type")}, + Default(Pop(1), Push("type")), + }, + "type-param-constraint-flag-type-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("type")}, + }, + "parenthesis": { + Include("spaces"), + Default(Pop(1), Push("parenthesis-close"), Push("flag"), Push("expr")), + }, + "parenthesis-open": { + Include("spaces"), + {`\(`, Punctuation, Pop(1)}, + }, + "parenthesis-close": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + }, + "var": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Push("#pop", "var-sep", "assign", "flag", "prop-get-set")}, + }, + "var-sep": { + Include("spaces"), + {`,`, Punctuation, Push("#pop", "var")}, + Default(Pop(1)), + }, + "assign": { + Include("spaces"), + {`=`, Operator, Push("#pop", "expr")}, + Default(Pop(1)), + }, + "flag": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "type")}, + Default(Pop(1)), + }, + "ternary": { + Include("spaces"), + {`:`, Operator, Pop(1)}, + }, + "call": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + Default(Pop(1), Push("call-sep"), Push("expr")), + }, + "call-sep": { + Include("spaces"), + {`\)`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "call")}, + }, + "bracket": { + Include("spaces"), + {`(?!(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))))(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "bracket-check")}, + {`'`, LiteralStringSingle, Push("#pop", "bracket-check", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "bracket-check", "string-double")}, + Default(Pop(1), Push("block")), + }, + "bracket-check": { + Include("spaces"), + {`:`, Punctuation, Push("#pop", "object-sep", "expr")}, + Default(Pop(1), Push("block"), Push("optional-semicolon"), Push("expr-chain")), + }, + "block": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Push("expr-statement")), + }, + "object": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + Default(Pop(1), Push("object-sep"), Push("expr"), Push("colon"), Push("ident-or-string")), + }, + "ident-or-string": { + Include("spaces"), + {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)}, + {`'`, LiteralStringSingle, Push("#pop", "string-single")}, + {`"`, LiteralStringDouble, Push("#pop", "string-double")}, + }, + "object-sep": { + Include("spaces"), + {`\}`, Punctuation, Pop(1)}, + {`,`, Punctuation, Push("#pop", "object")}, + }, + }, +)) + +func haxePreProcMutator(state *LexerState) error { + stack, ok := state.Get("haxe-pre-proc").([][]string) + if !ok { + stack = [][]string{} + } + + proc := state.Groups[2] + if proc == "if" { + stack = append(stack, state.Stack) + } else if proc == "else" || proc == "elseif" { + if len(stack) > 0 { + state.Stack = stack[len(stack)-1] + } + } else if proc == "end" { + stack = stack[:len(stack)-1] + } + + if proc == "if" || proc == "elseif" { + state.Stack = append(state.Stack, "preproc-expr") + } + + if proc == "error" { + state.Stack = append(state.Stack, "preproc-error") + } + state.Set("haxe-pre-proc", stack) + return nil +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go b/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go new file mode 100644 index 00000000..8b7e7bdd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hexdump.go @@ -0,0 +1,67 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Hexdump lexer. +var Hexdump = internal.Register(MustNewLexer( + &Config{ + Name: "Hexdump", + Aliases: []string{"hexdump"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + Include("offset"), + {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\>)(.{16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("bracket-strings")}, + {`(\s{2,3})(\|)(.{16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), Push("piped-strings")}, + {`(\s{2,3})(\>)(.{1,15})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`(\s{2,3})(\|)(.{1,15})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`(\s{2,3})(.{1,15})$`, ByGroups(Text, LiteralString), nil}, + {`(\s{2,3})(.{16}|.{20})$`, ByGroups(Text, LiteralString), Push("nonpiped-strings")}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "offset": { + {`^([0-9A-Ha-h]+)(:)`, ByGroups(NameLabel, Punctuation), Push("offset-mode")}, + {`^[0-9A-Ha-h]+`, NameLabel, nil}, + }, + "offset-mode": { + {`\s`, Text, Pop(1)}, + {`[0-9A-Ha-h]+`, NameLabel, nil}, + {`:`, Punctuation, nil}, + }, + "piped-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\|)(.{1,16})(\|)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "bracket-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{2,3})(\>)(.{1,16})(\<)$`, ByGroups(Text, Punctuation, LiteralString, Punctuation), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + "nonpiped-strings": { + {`\n`, Text, nil}, + Include("offset"), + {`([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})`, ByGroups(LiteralNumberHex, Punctuation, LiteralNumberHex), nil}, + {`[0-9A-Ha-h]{2}`, LiteralNumberHex, nil}, + {`(\s{19,})(.{1,20}?)$`, ByGroups(Text, LiteralString), nil}, + {`(\s{2,3})(.{1,20})$`, ByGroups(Text, LiteralString), nil}, + {`\s`, Text, nil}, + {`^\*`, Punctuation, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/html.go b/vendor/github.com/alecthomas/chroma/lexers/h/html.go new file mode 100644 index 00000000..07fc27ef --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/html.go @@ -0,0 +1,59 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/c" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/j" // nolint +) + +// HTML lexer. +var HTML = internal.Register(MustNewLexer( + &Config{ + Name: "HTML", + Aliases: []string{"html"}, + Filenames: []string{"*.html", "*.htm", "*.xhtml", "*.xslt"}, + MimeTypes: []string{"text/html", "application/xhtml+xml"}, + NotMultiline: true, + DotAll: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`[^<&]+`, Text, nil}, + {`&\S*?;`, NameEntity, nil}, + {`\<\!\[CDATA\[.*?\]\]\>`, CommentPreproc, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "tag": { + {`\s+`, Text, nil}, + {`([\w:-]+\s*)(=)(\s*)`, ByGroups(NameAttribute, Operator, Text), Push("attr")}, + {`[\w:-]+`, NameAttribute, nil}, + {`(/?)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation), Pop(1)}, + }, + "script-content": { + {`(<)(\s*)(/)(\s*)(script)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, + {`.+?(?=<\s*/\s*script\s*>)`, Using(Javascript), nil}, + }, + "style-content": { + {`(<)(\s*)(/)(\s*)(style)(\s*)(>)`, ByGroups(Punctuation, Text, Punctuation, Text, NameTag, Text, Punctuation), Pop(1)}, + {`.+?(?=<\s*/\s*style\s*>)`, Using(CSS), nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/http.go b/vendor/github.com/alecthomas/chroma/lexers/h/http.go new file mode 100644 index 00000000..67bc71a4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/http.go @@ -0,0 +1,131 @@ +package h + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// HTTP lexer. +var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer( + &Config{ + Name: "HTTP", + Aliases: []string{"http"}, + Filenames: []string{}, + MimeTypes: []string{}, + NotMultiline: true, + DotAll: true, + }, + Rules{ + "root": { + {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")}, + {`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")}, + }, + "headers": { + {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil}, + {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil}, + {`\r?\n`, Text, Push("content")}, + }, + "content": { + {`.+`, EmitterFunc(httpContentBlock), nil}, + }, + }, +))) + +func httpContentBlock(groups []string, lexer Lexer) Iterator { + tokens := []*Token{ + {Generic, groups[0]}, + } + return Literator(tokens...) + +} + +func httpHeaderBlock(groups []string, lexer Lexer) Iterator { + tokens := []*Token{ + {Name, groups[1]}, + {Text, groups[2]}, + {Operator, groups[3]}, + {Text, groups[4]}, + {Literal, groups[5]}, + {Text, groups[6]}, + } + return Literator(tokens...) +} + +func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator { + tokens := []*Token{ + {Text, groups[1]}, + {Literal, groups[2]}, + {Text, groups[3]}, + } + return Literator(tokens...) +} + +func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} } + +type httpBodyContentTyper struct{ Lexer } + +func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + var contentType string + var isContentType bool + var subIterator Iterator + + it, err := d.Lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + + return func() *Token { + for token := it(); token != nil; token = it() { + switch { + case token.Type == Name && strings.ToLower(token.Value) == "content-type": + { + isContentType = true + } + case token.Type == Literal && isContentType: + { + contentType = strings.TrimSpace(token.Value) + pos := strings.Index(contentType, ";") + if pos > 0 { + contentType = strings.TrimSpace(contentType[:pos]) + } + } + case token.Type == Generic && contentType != "": + { + lexer := internal.MatchMimeType(contentType) + + // application/calendar+xml can be treated as application/xml + // if there's not a better match. + if lexer == nil && strings.Contains(contentType, "+") { + slashPos := strings.Index(contentType, "/") + plusPos := strings.LastIndex(contentType, "+") + contentType = contentType[:slashPos+1] + contentType[plusPos+1:] + lexer = internal.MatchMimeType(contentType) + } + + if lexer == nil { + token.Type = Text + } else { + subIterator, err = lexer.Tokenise(nil, token.Value) + if err != nil { + panic(err) + } + return nil + } + } + + } + + return token + } + + if subIterator != nil { + for token := subIterator(); token != nil; token = subIterator() { + return token + } + } + return nil + + }, nil +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/h/hy.go b/vendor/github.com/alecthomas/chroma/lexers/h/hy.go new file mode 100644 index 00000000..17385e86 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/h/hy.go @@ -0,0 +1,51 @@ +package h + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Hy lexer. +var Hy = internal.Register(MustNewLexer( + &Config{ + Name: "Hy", + Aliases: []string{"hylang"}, + Filenames: []string{"*.hy"}, + MimeTypes: []string{"text/x-hy", "application/x-hy"}, + }, + Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`[,\s]+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`0[0-7]+j?`, LiteralNumberOct, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {`\\(.|[a-z]+)`, LiteralStringChar, nil}, + {`^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil}, + {`^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringDoc), nil}, + {`::?(?!#)[\w!$%*+<=>?/.#-]+`, LiteralStringSymbol, nil}, + {"~@|[`\\'#^~&@]", Operator, nil}, + Include("py-keywords"), + Include("py-builtins"), + {Words(``, ` `, `cond`, `for`, `->`, `->>`, `car`, `cdr`, `first`, `rest`, `let`, `when`, `unless`, `import`, `do`, `progn`, `get`, `slice`, `assoc`, `with-decorator`, `,`, `list_comp`, `kwapply`, `~`, `is`, `in`, `is-not`, `not-in`, `quasiquote`, `unquote`, `unquote-splice`, `quote`, `|`, `<<=`, `>>=`, `foreach`, `while`, `eval-and-compile`, `eval-when-compile`), Keyword, nil}, + {Words(``, ` `, `def`, `defn`, `defun`, `defmacro`, `defclass`, `lambda`, `fn`, `setv`), KeywordDeclaration, nil}, + {Words(``, ` `, `cycle`, `dec`, `distinct`, `drop`, `even?`, `filter`, `inc`, `instance?`, `iterable?`, `iterate`, `iterator?`, `neg?`, `none?`, `nth`, `numeric?`, `odd?`, `pos?`, `remove`, `repeat`, `repeatedly`, `take`, `take_nth`, `take_while`, `zero?`), NameBuiltin, nil}, + {`(?<=\()(?!#)[\w!$%*+<=>?/.#-]+`, NameFunction, nil}, + {`(?!#)[\w!$%*+<=>?/.#-]+`, NameVariable, nil}, + {`(\[|\])`, Punctuation, nil}, + {`(\{|\})`, Punctuation, nil}, + {`(\(|\))`, Punctuation, nil}, + }, + "py-keywords": { + {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + }, + "py-builtins": { + {Words(`(??@^|_~:\\]).*?)$`, ByGroups(Text, CommentSingle), nil}, + {`(\s*)(\|{3}.*?)$`, ByGroups(Text, CommentSingle), nil}, + {`(\s*)(\{-)`, ByGroups(Text, CommentMultiline), Push("comment")}, + {`^(\s*)([^\s(){}]+)(\s*)(:)(\s*)`, ByGroups(Text, NameFunction, Text, OperatorWord, Text), nil}, + {`\b(case|class|data|default|using|do|else|if|in|infix[lr]?|instance|rewrite|auto|namespace|codata|mutual|private|public|abstract|total|partial|let|proof|of|then|static|where|_|with|pattern|term|syntax|prefix|postulate|parameters|record|dsl|impossible|implicit|tactics|intros|intro|compute|refine|exact|trivial)(?!\')\b`, KeywordReserved, nil}, + {`(import|module)(\s+)`, ByGroups(KeywordReserved, Text), Push("module")}, + {`('')?[A-Z][\w\']*`, KeywordType, nil}, + {`[a-z][\w\']*`, Text, nil}, + {`(<-|::|->|=>|=)`, OperatorWord, nil}, + {`([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)`, OperatorWord, nil}, + {`\d+[eE][+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+\.\d+([eE][+-]?\d+)?`, LiteralNumberFloat, nil}, + {`0[xX][\da-fA-F]+`, LiteralNumberHex, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`'`, LiteralStringChar, Push("character")}, + {`"`, LiteralString, Push("string")}, + {`[^\s(){}]+`, Text, nil}, + {`\s+?`, Text, nil}, + }, + "module": { + {`\s+`, Text, nil}, + {`([A-Z][\w.]*)(\s+)(\()`, ByGroups(NameNamespace, Text, Punctuation), Push("funclist")}, + {`[A-Z][\w.]*`, NameNamespace, Pop(1)}, + }, + "funclist": { + {`\s+`, Text, nil}, + {`[A-Z]\w*`, KeywordType, nil}, + {`(_[\w\']+|[a-z][\w\']*)`, NameFunction, nil}, + {`--.*$`, CommentSingle, nil}, + {`\{-`, CommentMultiline, Push("comment")}, + {`,`, Punctuation, nil}, + {`[:!#$%&*+.\\/<=>?@^|~-]+`, Operator, nil}, + {`\(`, Punctuation, Push("funclist", "funclist")}, + {`\)`, Punctuation, Pop(2)}, + }, + "comment": { + {`[^-{}]+`, CommentMultiline, nil}, + {`\{-`, CommentMultiline, Push()}, + {`-\}`, CommentMultiline, Pop(1)}, + {`[-{}]`, CommentMultiline, nil}, + }, + "character": { + {`[^\\']`, LiteralStringChar, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`'`, LiteralStringChar, Pop(1)}, + }, + "string": { + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`"`, LiteralString, Pop(1)}, + }, + "escape": { + {`[abfnrtv"\'&\\]`, LiteralStringEscape, Pop(1)}, + {`\^[][A-Z@^_]`, LiteralStringEscape, Pop(1)}, + {`NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL`, LiteralStringEscape, Pop(1)}, + {`o[0-7]+`, LiteralStringEscape, Pop(1)}, + {`x[\da-fA-F]+`, LiteralStringEscape, Pop(1)}, + {`\d+`, LiteralStringEscape, Pop(1)}, + {`\s+\\`, LiteralStringEscape, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/ini.go b/vendor/github.com/alecthomas/chroma/lexers/i/ini.go new file mode 100644 index 00000000..39b5edd2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/i/ini.go @@ -0,0 +1,25 @@ +package i + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ini lexer. +var Ini = internal.Register(MustNewLexer( + &Config{ + Name: "INI", + Aliases: []string{"ini", "cfg", "dosini"}, + Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"}, + MimeTypes: []string{"text/x-ini", "text/inf"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`[;#].*`, CommentSingle, nil}, + {`\[.*?\]$`, Keyword, nil}, + {`(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)`, ByGroups(NameAttribute, Text, Operator, Text, LiteralString), nil}, + {`(.+?)$`, NameAttribute, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/i/io.go b/vendor/github.com/alecthomas/chroma/lexers/i/io.go new file mode 100644 index 00000000..840feeaa --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/i/io.go @@ -0,0 +1,40 @@ +package i + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Io lexer. +var Io = internal.Register(MustNewLexer( + &Config{ + Name: "Io", + Aliases: []string{"io"}, + Filenames: []string{"*.io"}, + MimeTypes: []string{"text/x-iosrc"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`#(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/\+`, CommentMultiline, Push("nestedcomment")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}`, Operator, nil}, + {`(clone|do|doFile|doString|method|for|if|else|elseif|then)\b`, Keyword, nil}, + {`(nil|false|true)\b`, NameConstant, nil}, + {`(Object|list|List|Map|args|Sequence|Coroutine|File)\b`, NameBuiltin, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + }, + "nestedcomment": { + {`[^+/]+`, CommentMultiline, nil}, + {`/\+`, CommentMultiline, Push()}, + {`\+/`, CommentMultiline, Pop(1)}, + {`[+/]`, CommentMultiline, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/internal/api.go b/vendor/github.com/alecthomas/chroma/lexers/internal/api.go new file mode 100644 index 00000000..f1b67143 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/internal/api.go @@ -0,0 +1,159 @@ +package internal + +import ( + "path/filepath" + "sort" + "strings" + + "github.com/danwakefield/fnmatch" + + "github.com/alecthomas/chroma" +) + +// Registry of Lexers. +var Registry = struct { + Lexers chroma.Lexers + byName map[string]chroma.Lexer + byAlias map[string]chroma.Lexer +}{ + byName: map[string]chroma.Lexer{}, + byAlias: map[string]chroma.Lexer{}, +} + +// Names of all lexers, optionally including aliases. +func Names(withAliases bool) []string { + out := []string{} + for _, lexer := range Registry.Lexers { + config := lexer.Config() + out = append(out, config.Name) + if withAliases { + out = append(out, config.Aliases...) + } + } + sort.Strings(out) + return out +} + +// Get a Lexer by name, alias or file extension. +func Get(name string) chroma.Lexer { + candidates := chroma.PrioritisedLexers{} + if lexer := Registry.byName[name]; lexer != nil { + candidates = append(candidates, lexer) + } + if lexer := Registry.byAlias[name]; lexer != nil { + candidates = append(candidates, lexer) + } + if lexer := Registry.byName[strings.ToLower(name)]; lexer != nil { + candidates = append(candidates, lexer) + } + if lexer := Registry.byAlias[strings.ToLower(name)]; lexer != nil { + candidates = append(candidates, lexer) + } + // Try file extension. + if lexer := Match("filename." + name); lexer != nil { + candidates = append(candidates, lexer) + } + // Try exact filename. + if lexer := Match(name); lexer != nil { + candidates = append(candidates, lexer) + } + if len(candidates) == 0 { + return nil + } + sort.Sort(candidates) + return candidates[0] +} + +// MatchMimeType attempts to find a lexer for the given MIME type. +func MatchMimeType(mimeType string) chroma.Lexer { + matched := chroma.PrioritisedLexers{} + for _, l := range Registry.Lexers { + for _, lmt := range l.Config().MimeTypes { + if mimeType == lmt { + matched = append(matched, l) + } + } + } + if len(matched) != 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Match returns the first lexer matching filename. +func Match(filename string) chroma.Lexer { + filename = filepath.Base(filename) + matched := chroma.PrioritisedLexers{} + // First, try primary filename matches. + for _, lexer := range Registry.Lexers { + config := lexer.Config() + for _, glob := range config.Filenames { + if fnmatch.Match(glob, filename, 0) { + matched = append(matched, lexer) + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + matched = nil + // Next, try filename aliases. + for _, lexer := range Registry.Lexers { + config := lexer.Config() + for _, glob := range config.AliasFilenames { + if fnmatch.Match(glob, filename, 0) { + matched = append(matched, lexer) + } + } + } + if len(matched) > 0 { + sort.Sort(matched) + return matched[0] + } + return nil +} + +// Analyse text content and return the "best" lexer.. +func Analyse(text string) chroma.Lexer { + var picked chroma.Lexer + highest := float32(0.0) + for _, lexer := range Registry.Lexers { + if analyser, ok := lexer.(chroma.Analyser); ok { + weight := analyser.AnalyseText(text) + if weight > highest { + picked = lexer + highest = weight + } + } + } + return picked +} + +// Register a Lexer with the global registry. +func Register(lexer chroma.Lexer) chroma.Lexer { + config := lexer.Config() + Registry.byName[config.Name] = lexer + Registry.byName[strings.ToLower(config.Name)] = lexer + for _, alias := range config.Aliases { + Registry.byAlias[alias] = lexer + Registry.byAlias[strings.ToLower(alias)] = lexer + } + Registry.Lexers = append(Registry.Lexers, lexer) + return lexer +} + +// Used for the fallback lexer as well as the explicit plaintext lexer +var PlaintextRules = chroma.Rules{ + "root": []chroma.Rule{ + {`.+`, chroma.Text, nil}, + {`\n`, chroma.Text, nil}, + }, +} + +// Fallback lexer if no other is found. +var Fallback chroma.Lexer = chroma.MustNewLexer(&chroma.Config{ + Name: "fallback", + Filenames: []string{"*"}, +}, PlaintextRules) diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/java.go b/vendor/github.com/alecthomas/chroma/lexers/j/java.go new file mode 100644 index 00000000..c6b9a762 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/java.go @@ -0,0 +1,51 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Java lexer. +var Java = internal.Register(MustNewLexer( + &Config{ + Name: "Java", + Aliases: []string{"java"}, + Filenames: []string{"*.java"}, + MimeTypes: []string{"text/x-java"}, + DotAll: true, + }, + Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*.*?\*/`, CommentMultiline, nil}, + {`(assert|break|case|catch|continue|default|do|else|finally|for|if|goto|instanceof|new|return|switch|this|throw|try|while)\b`, Keyword, nil}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + {`@[^\W\d][\w.]*`, NameDecorator, nil}, + {`(abstract|const|enum|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b`, KeywordDeclaration, nil}, + {`(boolean|byte|char|double|float|int|long|short|void)\b`, KeywordType, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(class|interface)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(import(?:\s+static)?)(\s+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {`(\.)((?:[^\W\d]|\$)[\w$]*)`, ByGroups(Operator, NameAttribute), nil}, + {`^\s*([^\W\d]|\$)[\w$]*:`, NameLabel, nil}, + {`([^\W\d]|\$)[\w$]*`, Name, nil}, + {`([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?`, LiteralNumberFloat, nil}, + {`0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?`, LiteralNumberHex, nil}, + {`0[bB][01][01_]*[lL]?`, LiteralNumberBin, nil}, + {`0[0-7_]+[lL]?`, LiteralNumberOct, nil}, + {`0|[1-9][0-9_]*[lL]?`, LiteralNumberInteger, nil}, + {`[~^*!%&\[\](){}<>|+=:;,./?-]`, Operator, nil}, + {`\n`, Text, nil}, + }, + "class": { + {`([^\W\d]|\$)[\w$]*`, NameClass, Pop(1)}, + }, + "import": { + {`[\w.]+\*?`, NameNamespace, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go b/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go new file mode 100644 index 00000000..8301e412 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/javascript.go @@ -0,0 +1,72 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var JavascriptRules = Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {``, `||`, `&&`, `>`, `<`, `>=`, `≥`, `<=`, `≤`, `==`, `===`, `≡`, `!=`, `≠`, `!==`, `≢`, `.>`, `.<`, `.>=`, `.≥`, `.<=`, `.≤`, `.==`, `.!=`, `.≠`, `.=`, `.!`, `<:`, `>:`, `∈`, `∉`, `∋`, `∌`, `⊆`, `⊈`, `⊂`, `⊄`, `⊊`, `|>`, `<|`, `:`, `+`, `-`, `.+`, `.-`, `|`, `∪`, `$`, `<<`, `>>`, `>>>`, `.<<`, `.>>`, `.>>>`, `*`, `/`, `./`, `÷`, `.÷`, `%`, `⋅`, `.%`, `.*`, `\`, `.\`, `&`, `∩`, `//`, `.//`, `^`, `.^`, `::`, `.`, `+`, `-`, `!`, `~`, `√`, `∛`, `∜`), Operator, nil}, + {`'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`(?<=[.\w)\]])\'+`, Operator, nil}, + {`"""`, LiteralString, Push("tqstring")}, + {`"`, LiteralString, Push("string")}, + {`r"""`, LiteralStringRegex, Push("tqregex")}, + {`r"`, LiteralStringRegex, Push("regex")}, + {"`", LiteralStringBacktick, Push("command")}, + {`(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, Name, nil}, + {`@(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, NameDecorator, nil}, + {`(\d+(_\d+)+\.\d*|\d*\.\d+(_\d+)+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+(_\d+)+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`0b[01]+(_[01]+)+`, LiteralNumberBin, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`0o[0-7]+(_[0-7]+)+`, LiteralNumberOct, nil}, + {`0o[0-7]+`, LiteralNumberOct, nil}, + {`0x[a-fA-F0-9]+(_[a-fA-F0-9]+)+`, LiteralNumberHex, nil}, + {`0x[a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`\d+(_\d+)+`, LiteralNumberInteger, nil}, + {`\d+`, LiteralNumberInteger, nil}, + }, + "blockcomment": { + {`[^=#]`, CommentMultiline, nil}, + {`#=`, CommentMultiline, Push()}, + {`=#`, CommentMultiline, Pop(1)}, + {`[=#]`, CommentMultiline, nil}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, + {`\$(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, LiteralStringInterpol, nil}, + {`(\$)(\()`, ByGroups(LiteralStringInterpol, Punctuation), Push("in-intp")}, + {`%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil}, + {`.|\s`, LiteralString, nil}, + }, + "tqstring": { + {`"""`, LiteralString, Pop(1)}, + {`\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)`, LiteralStringEscape, nil}, + {`\$(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, LiteralStringInterpol, nil}, + {`(\$)(\()`, ByGroups(LiteralStringInterpol, Punctuation), Push("in-intp")}, + {`.|\s`, LiteralString, nil}, + }, + "regex": { + {`"`, LiteralStringRegex, Pop(1)}, + {`\\"`, LiteralStringRegex, nil}, + {`.|\s`, LiteralStringRegex, nil}, + }, + "tqregex": { + {`"""`, LiteralStringRegex, Pop(1)}, + {`.|\s`, LiteralStringRegex, nil}, + }, + "command": { + {"`", LiteralStringBacktick, Pop(1)}, + {`\$(?:[a-zA-Z_¡-￿]|[𐀀-􏿿])(?:[a-zA-Z_0-9¡-￿]|[𐀀-􏿿])*!*`, LiteralStringInterpol, nil}, + {`(\$)(\()`, ByGroups(LiteralStringInterpol, Punctuation), Push("in-intp")}, + {`.|\s`, LiteralStringBacktick, nil}, + }, + "in-intp": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go b/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go new file mode 100644 index 00000000..83d46a48 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/j/jungle.go @@ -0,0 +1,50 @@ +package j + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var Jungle = internal.Register(MustNewLexer( + &Config{ + Name: "Jungle", + Aliases: []string{"jungle"}, + Filenames: []string{"*.jungle"}, + MimeTypes: []string{"text/x-jungle"}, + }, + Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`\n`, Text, nil}, + {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, + {`^(?=\S)`, None, Push("instruction")}, + {`[\.;\[\]\(\)\$]`, Punctuation, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "instruction": { + {`[^\S\n]+`, Text, nil}, + {`=`, Operator, Push("value")}, + {`(?=\S)`, None, Push("var")}, + Default(Pop(1)), + }, + "value": { + {`[^\S\n]+`, Text, nil}, + {`\$\(`, Punctuation, Push("var")}, + {`[;\[\]\(\)\$]`, Punctuation, nil}, + {`#(\n|[\w\W]*?[^#]\n)`, CommentSingle, nil}, + {`[\w_\-\.\/\\]+`, Text, nil}, + Default(Pop(1)), + }, + "var": { + {`[^\S\n]+`, Text, nil}, + {`\b(((re)?source|barrel)Path|excludeAnnotations|annotations|lang)\b`, NameBuiltin, nil}, + {`\bbase\b`, NameConstant, nil}, + {`\b(ind|zsm|hrv|ces|dan|dut|eng|fin|fre|deu|gre|hun|ita|nob|po[lr]|rus|sl[ov]|spa|swe|ara|heb|zh[st]|jpn|kor|tha|vie|bul|tur)`, NameConstant, nil}, + {`\b((semi)?round|rectangle)(-\d+x\d+)?\b`, NameConstant, nil}, + {`[\.;\[\]\(\$]`, Punctuation, nil}, + {`\)`, Punctuation, Pop(1)}, + {`[a-zA-Z_]\w*`, Name, nil}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go b/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go new file mode 100644 index 00000000..bf6fd315 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/k/kotlin.go @@ -0,0 +1,55 @@ +package k + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var kotlinIdentifier = "_A-Z\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3aa-z\u00b5\u00df-\u00f6\u00f8-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef][A-Z\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3aa-z\u00b5\u00df-\u00f6\u00f8-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f\u00ad\u0600-\u0604\u061c\u06dd\u070f\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u19b0-\u19c0\u19c8-\u19c9\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1bac-\u1bad\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec]*|`@?[_A-Z\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3aa-z\u00b5\u00df-\u00f6\u00f8-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef][A-Z\u00c0-\u00d6\u00d8-\u00de\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038a\u038c\u038e-\u038f\u0391-\u03a1\u03a3-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0524\u0526\u0531-\u0556\u10a0-\u10c5\u10c7\u10cd\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59\u1f5b\u1f5d\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c70\u2c72\u2c75\u2c7e-\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\u2ceb\u2ced\u2cf2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua660\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\ua78d\ua790\ua792\ua7a0\ua7a2\ua7a4\ua7a6\ua7a8\ua7aa\uff21-\uff3aa-z\u00b5\u00df-\u00f6\u00f8-\u00ff\u0101\u0103\u0105\u0107\u0109\u010b\u010d\u010f\u0111\u0113\u0115\u0117\u0119\u011b\u011d\u011f\u0121\u0123\u0125\u0127\u0129\u012b\u012d\u012f\u0131\u0133\u0135\u0137-\u0138\u013a\u013c\u013e\u0140\u0142\u0144\u0146\u0148-\u0149\u014b\u014d\u014f\u0151\u0153\u0155\u0157\u0159\u015b\u015d\u015f\u0161\u0163\u0165\u0167\u0169\u016b\u016d\u016f\u0171\u0173\u0175\u0177\u017a\u017c\u017e-\u0180\u0183\u0185\u0188\u018c-\u018d\u0192\u0195\u0199-\u019b\u019e\u01a1\u01a3\u01a5\u01a8\u01aa-\u01ab\u01ad\u01b0\u01b4\u01b6\u01b9-\u01ba\u01bd-\u01bf\u01c6\u01c9\u01cc\u01ce\u01d0\u01d2\u01d4\u01d6\u01d8\u01da\u01dc-\u01dd\u01df\u01e1\u01e3\u01e5\u01e7\u01e9\u01eb\u01ed\u01ef-\u01f0\u01f3\u01f5\u01f9\u01fb\u01fd\u01ff\u0201\u0203\u0205\u0207\u0209\u020b\u020d\u020f\u0211\u0213\u0215\u0217\u0219\u021b\u021d\u021f\u0221\u0223\u0225\u0227\u0229\u022b\u022d\u022f\u0231\u0233-\u0239\u023c\u023f-\u0240\u0242\u0247\u0249\u024b\u024d\u024f-\u0293\u0295-\u02af\u0371\u0373\u0377\u037b-\u037d\u0390\u03ac-\u03ce\u03d0-\u03d1\u03d5-\u03d7\u03d9\u03db\u03dd\u03df\u03e1\u03e3\u03e5\u03e7\u03e9\u03eb\u03ed\u03ef-\u03f3\u03f5\u03f8\u03fb-\u03fc\u0430-\u045f\u0461\u0463\u0465\u0467\u0469\u046b\u046d\u046f\u0471\u0473\u0475\u0477\u0479\u047b\u047d\u047f\u0481\u048b\u048d\u048f\u0491\u0493\u0495\u0497\u0499\u049b\u049d\u049f\u04a1\u04a3\u04a5\u04a7\u04a9\u04ab\u04ad\u04af\u04b1\u04b3\u04b5\u04b7\u04b9\u04bb\u04bd\u04bf\u04c2\u04c4\u04c6\u04c8\u04ca\u04cc\u04ce-\u04cf\u04d1\u04d3\u04d5\u04d7\u04d9\u04db\u04dd\u04df\u04e1\u04e3\u04e5\u04e7\u04e9\u04eb\u04ed\u04ef\u04f1\u04f3\u04f5\u04f7\u04f9\u04fb\u04fd\u04ff\u0501\u0503\u0505\u0507\u0509\u050b\u050d\u050f\u0511\u0513\u0515\u0517\u0519\u051b\u051d\u051f\u0521\u0523\u0525\u0527\u0561-\u0587\u1d00-\u1d2b\u1d6b-\u1d77\u1d79-\u1d9a\u1e01\u1e03\u1e05\u1e07\u1e09\u1e0b\u1e0d\u1e0f\u1e11\u1e13\u1e15\u1e17\u1e19\u1e1b\u1e1d\u1e1f\u1e21\u1e23\u1e25\u1e27\u1e29\u1e2b\u1e2d\u1e2f\u1e31\u1e33\u1e35\u1e37\u1e39\u1e3b\u1e3d\u1e3f\u1e41\u1e43\u1e45\u1e47\u1e49\u1e4b\u1e4d\u1e4f\u1e51\u1e53\u1e55\u1e57\u1e59\u1e5b\u1e5d\u1e5f\u1e61\u1e63\u1e65\u1e67\u1e69\u1e6b\u1e6d\u1e6f\u1e71\u1e73\u1e75\u1e77\u1e79\u1e7b\u1e7d\u1e7f\u1e81\u1e83\u1e85\u1e87\u1e89\u1e8b\u1e8d\u1e8f\u1e91\u1e93\u1e95-\u1e9d\u1e9f\u1ea1\u1ea3\u1ea5\u1ea7\u1ea9\u1eab\u1ead\u1eaf\u1eb1\u1eb3\u1eb5\u1eb7\u1eb9\u1ebb\u1ebd\u1ebf\u1ec1\u1ec3\u1ec5\u1ec7\u1ec9\u1ecb\u1ecd\u1ecf\u1ed1\u1ed3\u1ed5\u1ed7\u1ed9\u1edb\u1edd\u1edf\u1ee1\u1ee3\u1ee5\u1ee7\u1ee9\u1eeb\u1eed\u1eef\u1ef1\u1ef3\u1ef5\u1ef7\u1ef9\u1efb\u1efd\u1eff-\u1f07\u1f10-\u1f15\u1f20-\u1f27\u1f30-\u1f37\u1f40-\u1f45\u1f50-\u1f57\u1f60-\u1f67\u1f70-\u1f7d\u1f80-\u1f87\u1f90-\u1f97\u1fa0-\u1fa7\u1fb0-\u1fb4\u1fb6-\u1fb7\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fc7\u1fd0-\u1fd3\u1fd6-\u1fd7\u1fe0-\u1fe7\u1ff2-\u1ff4\u1ff6-\u1ff7\u210a\u210e-\u210f\u2113\u212f\u2134\u2139\u213c-\u213d\u2146-\u2149\u214e\u2184\u2c30-\u2c5e\u2c61\u2c65-\u2c66\u2c68\u2c6a\u2c6c\u2c71\u2c73-\u2c74\u2c76-\u2c7b\u2c81\u2c83\u2c85\u2c87\u2c89\u2c8b\u2c8d\u2c8f\u2c91\u2c93\u2c95\u2c97\u2c99\u2c9b\u2c9d\u2c9f\u2ca1\u2ca3\u2ca5\u2ca7\u2ca9\u2cab\u2cad\u2caf\u2cb1\u2cb3\u2cb5\u2cb7\u2cb9\u2cbb\u2cbd\u2cbf\u2cc1\u2cc3\u2cc5\u2cc7\u2cc9\u2ccb\u2ccd\u2ccf\u2cd1\u2cd3\u2cd5\u2cd7\u2cd9\u2cdb\u2cdd\u2cdf\u2ce1\u2ce3-\u2ce4\u2cec\u2cee\u2cf3\u2d00-\u2d25\u2d27\u2d2d\ua641\ua643\ua645\ua647\ua649\ua64b\ua64d\ua64f\ua651\ua653\ua655\ua657\ua659\ua65b\ua65d\ua65f\ua661\ua663\ua665\ua667\ua669\ua66b\ua66d\ua681\ua683\ua685\ua687\ua689\ua68b\ua68d\ua68f\ua691\ua693\ua695\ua697\ua723\ua725\ua727\ua729\ua72b\ua72d\ua72f-\ua731\ua733\ua735\ua737\ua739\ua73b\ua73d\ua73f\ua741\ua743\ua745\ua747\ua749\ua74b\ua74d\ua74f\ua751\ua753\ua755\ua757\ua759\ua75b\ua75d\ua75f\ua761\ua763\ua765\ua767\ua769\ua76b\ua76d\ua76f\ua771-\ua778\ua77a\ua77c\ua77f\ua781\ua783\ua785\ua787\ua78c\ua78e\ua791\ua793\ua7a1\ua7a3\ua7a5\ua7a7\ua7a9\ua7fa\ufb00-\ufb06\ufb13-\ufb17\uff41-\uff5a\u01c5\u01c8\u01cb\u01f2\u1f88-\u1f8f\u1f98-\u1f9f\u1fa8-\u1faf\u1fbc\u1fcc\u1ffc\u02b0-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0374\u037a\u0559\u0640\u06e5-\u06e6\u07f4-\u07f5\u07fa\u081a\u0824\u0828\u0971\u0e46\u0ec6\u10fc\u17d7\u1843\u1aa7\u1c78-\u1c7d\u1d2c-\u1d6a\u1d78\u1d9b-\u1dbf\u2071\u207f\u2090-\u209c\u2c7c-\u2c7d\u2d6f\u2e2f\u3005\u3031-\u3035\u303b\u309d-\u309e\u30fc-\u30fe\ua015\ua4f8-\ua4fd\ua60c\ua67f\ua717-\ua71f\ua770\ua788\ua7f8-\ua7f9\ua9cf\uaa70\uaadd\uaaf3-\uaaf4\uff70\uff9e-\uff9f\u16ee-\u16f0\u2160-\u2182\u2185-\u2188\u3007\u3021-\u3029\u3038-\u303a\ua6e6-\ua6ef0-9\u0660-\u0669\u06f0-\u06f9\u07c0-\u07c9\u0966-\u096f\u09e6-\u09ef\u0a66-\u0a6f\u0ae6-\u0aef\u0b66-\u0b6f\u0be6-\u0bef\u0c66-\u0c6f\u0ce6-\u0cef\u0d66-\u0d6f\u0e50-\u0e59\u0ed0-\u0ed9\u0f20-\u0f29\u1040-\u1049\u1090-\u1099\u17e0-\u17e9\u1810-\u1819\u1946-\u194f\u19d0-\u19d9\u1a80-\u1a89\u1a90-\u1a99\u1b50-\u1b59\u1bb0-\u1bb9\u1c40-\u1c49\u1c50-\u1c59\ua620-\ua629\ua8d0-\ua8d9\ua900-\ua909\ua9d0-\ua9d9\uaa50-\uaa59\uabf0-\uabf9\uff10-\uff19_\u203f-\u2040\u2054\ufe33-\ufe34\ufe4d-\ufe4f\uff3f\u00ad\u0600-\u0604\u061c\u06dd\u070f\u180e\u200b-\u200f\u202a-\u202e\u2060-\u2064\u2066-\u206f\ufeff\ufff9-\ufffb\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1-\u05c2\u05c4-\u05c5\u05c7\u0610-\u061a\u064b-\u065f\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7-\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0902\u093a\u093c\u0941-\u0948\u094d\u0951-\u0957\u0962-\u0963\u0981\u09bc\u09c1-\u09c4\u09cd\u09e2-\u09e3\u0a01-\u0a02\u0a3c\u0a41-\u0a42\u0a47-\u0a48\u0a4b-\u0a4d\u0a51\u0a70-\u0a71\u0a75\u0a81-\u0a82\u0abc\u0ac1-\u0ac5\u0ac7-\u0ac8\u0acd\u0ae2-\u0ae3\u0b01\u0b3c\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b62-\u0b63\u0b82\u0bc0\u0bcd\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55-\u0c56\u0c62-\u0c63\u0cbc\u0cbf\u0cc6\u0ccc-\u0ccd\u0ce2-\u0ce3\u0d41-\u0d44\u0d4d\u0d62-\u0d63\u0dca\u0dd2-\u0dd4\u0dd6\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb-\u0ebc\u0ec8-\u0ecd\u0f18-\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86-\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039-\u103a\u103d-\u103e\u1058-\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085-\u1086\u108d\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752-\u1753\u1772-\u1773\u17b4-\u17b5\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927-\u1928\u1932\u1939-\u193b\u1a17-\u1a18\u1a1b\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80-\u1b81\u1ba2-\u1ba5\u1ba8-\u1ba9\u1bab\u1be6\u1be8-\u1be9\u1bed\u1bef-\u1bf1\u1c2c-\u1c33\u1c36-\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302d\u3099-\u309a\ua66f\ua674-\ua67d\ua69f\ua6f0-\ua6f1\ua802\ua806\ua80b\ua825-\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31-\uaa32\uaa35-\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7-\uaab8\uaabe-\uaabf\uaac1\uaaec-\uaaed\uaaf6\uabe5\uabe8\uabed\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\u0903\u093b\u093e-\u0940\u0949-\u094c\u094e-\u094f\u0982-\u0983\u09be-\u09c0\u09c7-\u09c8\u09cb-\u09cc\u09d7\u0a03\u0a3e-\u0a40\u0a83\u0abe-\u0ac0\u0ac9\u0acb-\u0acc\u0b02-\u0b03\u0b3e\u0b40\u0b47-\u0b48\u0b4b-\u0b4c\u0b57\u0bbe-\u0bbf\u0bc1-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcc\u0bd7\u0c01-\u0c03\u0c41-\u0c44\u0c82-\u0c83\u0cbe\u0cc0-\u0cc4\u0cc7-\u0cc8\u0cca-\u0ccb\u0cd5-\u0cd6\u0d02-\u0d03\u0d3e-\u0d40\u0d46-\u0d48\u0d4a-\u0d4c\u0d57\u0d82-\u0d83\u0dcf-\u0dd1\u0dd8-\u0ddf\u0df2-\u0df3\u0f3e-\u0f3f\u0f7f\u102b-\u102c\u1031\u1038\u103b-\u103c\u1056-\u1057\u1062-\u1064\u1067-\u106d\u1083-\u1084\u1087-\u108c\u108f\u109a-\u109c\u17b6\u17be-\u17c5\u17c7-\u17c8\u1923-\u1926\u1929-\u192b\u1930-\u1931\u1933-\u1938\u19b0-\u19c0\u19c8-\u19c9\u1a19-\u1a1a\u1a55\u1a57\u1a61\u1a63-\u1a64\u1a6d-\u1a72\u1b04\u1b35\u1b3b\u1b3d-\u1b41\u1b43-\u1b44\u1b82\u1ba1\u1ba6-\u1ba7\u1baa\u1bac-\u1bad\u1be7\u1bea-\u1bec\u1bee\u1bf2-\u1bf3\u1c24-\u1c2b\u1c34-\u1c35\u1ce1\u1cf2-\u1cf3\u302e-\u302f\ua823-\ua824\ua827\ua880-\ua881\ua8b4-\ua8c3\ua952-\ua953\ua983\ua9b4-\ua9b5\ua9ba-\ua9bb\ua9bd-\ua9c0\uaa2f-\uaa30\uaa33-\uaa34\uaa4d\uaa7b\uaaeb\uaaee-\uaaef\uaaf5\uabe3-\uabe4\uabe6-\uabe7\uabe9-\uabea\uabec" + +// Kotlin lexer. +var Kotlin = internal.Register(MustNewLexer( + &Config{ + Name: "Kotlin", + Aliases: []string{"kotlin"}, + Filenames: []string{"*.kt"}, + MimeTypes: []string{"text/x-kotlin"}, + DotAll: true, + }, + Rules{ + "root": { + {`^\s*\[.*?\]`, NameAttribute, nil}, + {`[^\S\n]+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//[^\n]*\n?`, CommentSingle, nil}, + {`/[*].*?[*]/`, CommentMultiline, nil}, + {`\n`, Text, nil}, + {`::|!!|\?[:.]`, Operator, nil}, + {`[~!%^&*()+=|\[\]:;,.<>/?-]`, Punctuation, nil}, + {`[{}]`, Punctuation, nil}, + {`"""[^"]*"""`, LiteralString, nil}, + {`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil}, + {`'\\.'|'[^\\]'`, LiteralStringChar, nil}, + {`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`(companion)(\s+)(object)`, ByGroups(Keyword, Text, Keyword), nil}, + {`(class|interface|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`(package|import)(\s+)`, ByGroups(Keyword, Text), Push("package")}, + {`(val|var)(\s+)`, ByGroups(Keyword, Text), Push("property")}, + {`(fun)(\s+)(<[^>]*>\s+)?`, ByGroups(Keyword, Text, Text), Push("function")}, + {`(abstract|actual|annotation|as|break|by|catch|class|companion|const|constructor|continue|crossinline|data|do|dynamic|else|enum|expect|external|false|final|finally|for|fun|get|if|import|in|infix|inline|inner|interface|internal|is|lateinit|noinline|null|object|open|operator|out|override|package|private|protected|public|reified|return|sealed|set|super|suspend|tailrec|this|throw|true|try|val|var|vararg|when|where|while)\b`, Keyword, nil}, + {"(@?[" + kotlinIdentifier + "]*`)", Name, nil}, + }, + "package": { + {`\S+`, NameNamespace, Pop(1)}, + }, + "class": { + {"(@?[" + kotlinIdentifier + "]*`)", NameClass, Pop(1)}, + }, + "property": { + {"(@?[" + kotlinIdentifier + " ]*`)", NameProperty, Pop(1)}, + }, + "function": { + {"(@?[" + kotlinIdentifier + " ]*`)", NameFunction, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go b/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go new file mode 100644 index 00000000..799b77c3 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/lighttpd.go @@ -0,0 +1,30 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Lighttpd Configuration File lexer. +var Lighttpd = internal.Register(MustNewLexer( + &Config{ + Name: "Lighttpd configuration file", + Aliases: []string{"lighty", "lighttpd"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-lighttpd-conf"}, + }, + Rules{ + "root": { + {`#.*\n`, CommentSingle, nil}, + {`/\S*`, Name, nil}, + {`[a-zA-Z._-]+`, Keyword, nil}, + {`\d+\.\d+\.\d+\.\d+(?:/\d+)?`, LiteralNumber, nil}, + {`[0-9]+`, LiteralNumber, nil}, + {`=>|=~|\+=|==|=|\+`, Operator, nil}, + {`\$[A-Z]+`, NameBuiltin, nil}, + {`[(){}\[\],]`, Punctuation, nil}, + {`"([^"\\]*(?:\\.[^"\\]*)*)"`, LiteralStringDouble, nil}, + {`\s+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go b/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go new file mode 100644 index 00000000..8f5b0b12 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/llvm.go @@ -0,0 +1,43 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Llvm lexer. +var Llvm = internal.Register(MustNewLexer( + &Config{ + Name: "LLVM", + Aliases: []string{"llvm"}, + Filenames: []string{"*.ll"}, + MimeTypes: []string{"text/x-llvm"}, + }, + Rules{ + "root": { + Include("whitespace"), + {`([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:`, NameLabel, nil}, + Include("keyword"), + {`%([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, + {`@([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariableGlobal, nil}, + {`%\d+`, NameVariableAnonymous, nil}, + {`@\d+`, NameVariableGlobal, nil}, + {`#\d+`, NameVariableGlobal, nil}, + {`!([-a-zA-Z$._][\w\-$.]*|"[^"]*?")`, NameVariable, nil}, + {`!\d+`, NameVariableAnonymous, nil}, + {`c?"[^"]*?"`, LiteralString, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumber, nil}, + {`-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?`, LiteralNumber, nil}, + {`[=<>{}\[\]()*.,!]|x\b`, Punctuation, nil}, + }, + "whitespace": { + {`(\n|\s)+`, Text, nil}, + {`;.*?\n`, Comment, nil}, + }, + "keyword": { + {Words(``, `\b`, `begin`, `end`, `true`, `false`, `declare`, `define`, `global`, `constant`, `private`, `linker_private`, `internal`, `available_externally`, `linkonce`, `linkonce_odr`, `weak`, `weak_odr`, `appending`, `dllimport`, `dllexport`, `common`, `default`, `hidden`, `protected`, `extern_weak`, `external`, `thread_local`, `zeroinitializer`, `undef`, `null`, `to`, `tail`, `target`, `triple`, `datalayout`, `volatile`, `nuw`, `nsw`, `nnan`, `ninf`, `nsz`, `arcp`, `fast`, `exact`, `inbounds`, `align`, `addrspace`, `section`, `alias`, `module`, `asm`, `sideeffect`, `gc`, `dbg`, `linker_private_weak`, `attributes`, `blockaddress`, `initialexec`, `localdynamic`, `localexec`, `prefix`, `unnamed_addr`, `ccc`, `fastcc`, `coldcc`, `x86_stdcallcc`, `x86_fastcallcc`, `arm_apcscc`, `arm_aapcscc`, `arm_aapcs_vfpcc`, `ptx_device`, `ptx_kernel`, `intel_ocl_bicc`, `msp430_intrcc`, `spir_func`, `spir_kernel`, `x86_64_sysvcc`, `x86_64_win64cc`, `x86_thiscallcc`, `cc`, `c`, `signext`, `zeroext`, `inreg`, `sret`, `nounwind`, `noreturn`, `noalias`, `nocapture`, `byval`, `nest`, `readnone`, `readonly`, `inlinehint`, `noinline`, `alwaysinline`, `optsize`, `ssp`, `sspreq`, `noredzone`, `noimplicitfloat`, `naked`, `builtin`, `cold`, `nobuiltin`, `noduplicate`, `nonlazybind`, `optnone`, `returns_twice`, `sanitize_address`, `sanitize_memory`, `sanitize_thread`, `sspstrong`, `uwtable`, `returned`, `type`, `opaque`, `eq`, `ne`, `slt`, `sgt`, `sle`, `sge`, `ult`, `ugt`, `ule`, `uge`, `oeq`, `one`, `olt`, `ogt`, `ole`, `oge`, `ord`, `uno`, `ueq`, `une`, `x`, `acq_rel`, `acquire`, `alignstack`, `atomic`, `catch`, `cleanup`, `filter`, `inteldialect`, `max`, `min`, `monotonic`, `nand`, `personality`, `release`, `seq_cst`, `singlethread`, `umax`, `umin`, `unordered`, `xchg`, `add`, `fadd`, `sub`, `fsub`, `mul`, `fmul`, `udiv`, `sdiv`, `fdiv`, `urem`, `srem`, `frem`, `shl`, `lshr`, `ashr`, `and`, `or`, `xor`, `icmp`, `fcmp`, `phi`, `call`, `trunc`, `zext`, `sext`, `fptrunc`, `fpext`, `uitofp`, `sitofp`, `fptoui`, `fptosi`, `inttoptr`, `ptrtoint`, `bitcast`, `addrspacecast`, `select`, `va_arg`, `ret`, `br`, `switch`, `invoke`, `unwind`, `unreachable`, `indirectbr`, `landingpad`, `resume`, `malloc`, `alloca`, `free`, `load`, `store`, `getelementptr`, `extractelement`, `insertelement`, `shufflevector`, `getresult`, `extractvalue`, `insertvalue`, `atomicrmw`, `cmpxchg`, `fence`, `allocsize`, `amdgpu_cs`, `amdgpu_gs`, `amdgpu_kernel`, `amdgpu_ps`, `amdgpu_vs`, `any`, `anyregcc`, `argmemonly`, `avr_intrcc`, `avr_signalcc`, `caller`, `catchpad`, `catchret`, `catchswitch`, `cleanuppad`, `cleanupret`, `comdat`, `convergent`, `cxx_fast_tlscc`, `deplibs`, `dereferenceable`, `dereferenceable_or_null`, `distinct`, `exactmatch`, `externally_initialized`, `from`, `ghccc`, `hhvm_ccc`, `hhvmcc`, `ifunc`, `inaccessiblemem_or_argmemonly`, `inaccessiblememonly`, `inalloca`, `jumptable`, `largest`, `local_unnamed_addr`, `minsize`, `musttail`, `noduplicates`, `none`, `nonnull`, `norecurse`, `notail`, `preserve_allcc`, `preserve_mostcc`, `prologue`, `safestack`, `samesize`, `source_filename`, `swiftcc`, `swifterror`, `swiftself`, `webkit_jscc`, `within`, `writeonly`, `x86_intrcc`, `x86_vectorcallcc`), Keyword, nil}, + {Words(``, ``, `void`, `half`, `float`, `double`, `x86_fp80`, `fp128`, `ppc_fp128`, `label`, `metadata`, `token`), KeywordType, nil}, + {`i[1-9]\d*`, Keyword, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/l/lua.go b/vendor/github.com/alecthomas/chroma/lexers/l/lua.go new file mode 100644 index 00000000..c397de06 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/l/lua.go @@ -0,0 +1,75 @@ +package l + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Lua lexer. +var Lua = internal.Register(MustNewLexer( + &Config{ + Name: "Lua", + Aliases: []string{"lua"}, + Filenames: []string{"*.lua", "*.wlua"}, + MimeTypes: []string{"text/x-lua", "application/x-lua"}, + }, + Rules{ + "root": { + {`#!.*`, CommentPreproc, nil}, + Default(Push("base")), + }, + "ws": { + {`(?:--\[(=*)\[[\w\W]*?\](\1)\])`, CommentMultiline, nil}, + {`(?:--.*$)`, CommentSingle, nil}, + {`(?:\s+)`, Text, nil}, + }, + "base": { + Include("ws"), + {`(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?`, LiteralNumberHex, nil}, + {`(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, + {`(?i)\d+e[+-]?\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(?s)\[(=*)\[.*?\]\1\]`, LiteralString, nil}, + {`::`, Punctuation, Push("label")}, + {`\.{3}`, Punctuation, nil}, + {`[=<>|~&+\-*/%#^]+|\.\.`, Operator, nil}, + {`[\[\]{}().,:;]`, Punctuation, nil}, + {`(and|or|not)\b`, OperatorWord, nil}, + {`(break|do|else|elseif|end|for|if|in|repeat|return|then|until|while)\b`, KeywordReserved, nil}, + {`goto\b`, KeywordReserved, Push("goto")}, + {`(local)\b`, KeywordDeclaration, nil}, + {`(true|false|nil)\b`, KeywordConstant, nil}, + {`(function)\b`, KeywordReserved, Push("funcname")}, + {`[A-Za-z_]\w*(\.[A-Za-z_]\w*)?`, Name, nil}, + {`'`, LiteralStringSingle, Combined("stringescape", "sqs")}, + {`"`, LiteralStringDouble, Combined("stringescape", "dqs")}, + }, + "funcname": { + Include("ws"), + {`[.:]`, Punctuation, nil}, + {`(?:[^\W\d]\w*)(?=(?:(?:--\[(=*)\[[\w\W]*?\](\2)\])|(?:--.*$)|(?:\s+))*[.:])`, NameClass, nil}, + {`(?:[^\W\d]\w*)`, NameFunction, Pop(1)}, + {`\(`, Punctuation, Pop(1)}, + }, + "goto": { + Include("ws"), + {`(?:[^\W\d]\w*)`, NameLabel, Pop(1)}, + }, + "label": { + Include("ws"), + {`::`, Punctuation, Pop(1)}, + {`(?:[^\W\d]\w*)`, NameLabel, nil}, + }, + "stringescape": { + {`\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|u\{[0-9a-fA-F]+\})`, LiteralStringEscape, nil}, + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^\\']+`, LiteralStringSingle, nil}, + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^\\"]+`, LiteralStringDouble, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/lexers.go b/vendor/github.com/alecthomas/chroma/lexers/lexers.go new file mode 100644 index 00000000..2897299c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/lexers.go @@ -0,0 +1,59 @@ +// Package lexers contains the registry of all lexers. +// +// Sub-packages contain lexer implementations. +package lexers + +// nolint: golint +import ( + "github.com/alecthomas/chroma" + _ "github.com/alecthomas/chroma/lexers/a" + _ "github.com/alecthomas/chroma/lexers/b" + _ "github.com/alecthomas/chroma/lexers/c" + _ "github.com/alecthomas/chroma/lexers/circular" + _ "github.com/alecthomas/chroma/lexers/d" + _ "github.com/alecthomas/chroma/lexers/e" + _ "github.com/alecthomas/chroma/lexers/f" + _ "github.com/alecthomas/chroma/lexers/g" + _ "github.com/alecthomas/chroma/lexers/h" + _ "github.com/alecthomas/chroma/lexers/i" + "github.com/alecthomas/chroma/lexers/internal" + _ "github.com/alecthomas/chroma/lexers/j" + _ "github.com/alecthomas/chroma/lexers/k" + _ "github.com/alecthomas/chroma/lexers/l" + _ "github.com/alecthomas/chroma/lexers/m" + _ "github.com/alecthomas/chroma/lexers/n" + _ "github.com/alecthomas/chroma/lexers/o" + _ "github.com/alecthomas/chroma/lexers/p" + _ "github.com/alecthomas/chroma/lexers/q" + _ "github.com/alecthomas/chroma/lexers/r" + _ "github.com/alecthomas/chroma/lexers/s" + _ "github.com/alecthomas/chroma/lexers/t" + _ "github.com/alecthomas/chroma/lexers/v" + _ "github.com/alecthomas/chroma/lexers/w" + _ "github.com/alecthomas/chroma/lexers/x" + _ "github.com/alecthomas/chroma/lexers/y" +) + +// Registry of Lexers. +var Registry = internal.Registry + +// Names of all lexers, optionally including aliases. +func Names(withAliases bool) []string { return internal.Names(withAliases) } + +// Get a Lexer by name, alias or file extension. +func Get(name string) chroma.Lexer { return internal.Get(name) } + +// MatchMimeType attempts to find a lexer for the given MIME type. +func MatchMimeType(mimeType string) chroma.Lexer { return internal.MatchMimeType(mimeType) } + +// Match returns the first lexer matching filename. +func Match(filename string) chroma.Lexer { return internal.Match(filename) } + +// Analyse text content and return the "best" lexer.. +func Analyse(text string) chroma.Lexer { return internal.Analyse(text) } + +// Register a Lexer with the global registry. +func Register(lexer chroma.Lexer) chroma.Lexer { return internal.Register(lexer) } + +// Fallback lexer if no other is found. +var Fallback = internal.Fallback diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/make.go b/vendor/github.com/alecthomas/chroma/lexers/m/make.go new file mode 100644 index 00000000..eb9d9e68 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/make.go @@ -0,0 +1,54 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/b" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Makefile lexer. +var Makefile = internal.Register(MustNewLexer( + &Config{ + Name: "Base Makefile", + Aliases: []string{"make", "makefile", "mf", "bsdmake"}, + Filenames: []string{"*.mak", "*.mk", "Makefile", "makefile", "Makefile.*", "GNUmakefile"}, + MimeTypes: []string{"text/x-makefile"}, + EnsureNL: true, + }, + Rules{ + "root": { + {`^(?:[\t ]+.*\n|\n)+`, Using(Bash), nil}, + {`\$[<@$+%?|*]`, Keyword, nil}, + {`\s+`, Text, nil}, + {`#.*?\n`, Comment, nil}, + {`(export)(\s+)(?=[\w${}\t -]+\n)`, ByGroups(Keyword, Text), Push("export")}, + {`export\s+`, Keyword, nil}, + {`([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)`, ByGroups(NameVariable, Text, Operator, Text, Using(Bash)), nil}, + {`(?s)"(\\\\|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`([^\n:]+)(:+)([ \t]*)`, ByGroups(NameFunction, Operator, Text), Push("block-header")}, + {`\$\(`, Keyword, Push("expansion")}, + }, + "expansion": { + {`[^$a-zA-Z_()]+`, Text, nil}, + {`[a-zA-Z_]+`, NameVariable, nil}, + {`\$`, Keyword, nil}, + {`\(`, Keyword, Push()}, + {`\)`, Keyword, Pop(1)}, + }, + "export": { + {`[\w${}-]+`, NameVariable, nil}, + {`\n`, Text, Pop(1)}, + {`\s+`, Text, nil}, + }, + "block-header": { + {`[,|]`, Punctuation, nil}, + {`#.*?\n`, Comment, Pop(1)}, + {`\\\n`, Text, nil}, + {`\$\(`, Keyword, Push("expansion")}, + {`[a-zA-Z_]+`, Name, nil}, + {`\n`, Text, Pop(1)}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mako.go b/vendor/github.com/alecthomas/chroma/lexers/m/mako.go new file mode 100644 index 00000000..f7c140d5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mako.go @@ -0,0 +1,60 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Mako lexer. +var Mako = internal.Register(MustNewLexer( + &Config{ + Name: "Mako", + Aliases: []string{"mako"}, + Filenames: []string{"*.mao"}, + MimeTypes: []string{"application/x-mako"}, + }, + Rules{ + "root": { + {`(\s*)(%)(\s*end(?:\w+))(\n|\Z)`, ByGroups(Text, CommentPreproc, Keyword, Other), nil}, + {`(\s*)(%)([^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Using(Python), Other), nil}, + {`(\s*)(##[^\n]*)(\n|\Z)`, ByGroups(Text, CommentPreproc, Other), nil}, + {`(?s)<%doc>.*?`, CommentPreproc, nil}, + {`(<%)([\w.:]+)`, ByGroups(CommentPreproc, NameBuiltin), Push("tag")}, + {`()`, ByGroups(CommentPreproc, NameBuiltin, CommentPreproc), nil}, + {`<%(?=([\w.:]+))`, CommentPreproc, Push("ondeftags")}, + {`(<%(?:!?))(.*?)(%>)(?s)`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(\$\{)(.*?)(\})`, ByGroups(CommentPreproc, Using(Python), CommentPreproc), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=%|\#\#) | # an eval or comment line + (?=\#\*) | # multiline comment + (?=`, CommentPreproc, Pop(1)}, + {`\s+`, Text, nil}, + }, + "attr": { + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go b/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go new file mode 100644 index 00000000..b4690e38 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/markdown.go @@ -0,0 +1,47 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Markdown lexer. +var Markdown = internal.Register(MustNewLexer( + &Config{ + Name: "markdown", + Aliases: []string{"md", "mkd"}, + Filenames: []string{"*.md", "*.mkd", "*.markdown"}, + MimeTypes: []string{"text/x-markdown"}, + }, + Rules{ + "root": { + {`^(#)([^#].+\n)`, ByGroups(GenericHeading, Text), nil}, + {`^(#{2,6})(.+\n)`, ByGroups(GenericSubheading, Text), nil}, + {`^(\s*)([*-] )(\[[ xX]\])( .+\n)`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^(\s*)([*-])(\s)(.+\n)`, ByGroups(Text, Keyword, Text, UsingSelf("inline")), nil}, + {`^(\s*)([0-9]+\.)( .+\n)`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + {`^(\s*>\s)(.+\n)`, ByGroups(Keyword, GenericEmph), nil}, + {"^(```\\n)([\\w\\W]*?)(^```$)", ByGroups(String, Text, String), nil}, + {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", + UsingByGroup( + internal.Get, + 2, 4, + String, String, String, Text, String, + ), + nil, + }, + Include("inline"), + }, + "inline": { + {`\\.`, Text, nil}, + {`(\s)([*_][^*_]+[*_])(\W|\n)`, ByGroups(Text, GenericEmph, Text), nil}, + {`(\s)((\*\*|__).*\3)((?=\W|\n))`, ByGroups(Text, GenericStrong, None, Text), nil}, + {`(\s)(~~[^~]+~~)((?=\W|\n))`, ByGroups(Text, GenericDeleted, Text), nil}, + {"`[^`]+`", LiteralStringBacktick, nil}, + {`[@#][\w/:]+`, NameEntity, nil}, + {`(!?\[)([^]]+)(\])(\()([^)]+)(\))`, ByGroups(Text, NameTag, Text, Text, NameAttribute, Text), nil}, + {`[^\\\s]+`, Text, nil}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mason.go b/vendor/github.com/alecthomas/chroma/lexers/m/mason.go new file mode 100644 index 00000000..5c70ab0a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mason.go @@ -0,0 +1,43 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/h" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Mason lexer. +var Mason = internal.Register(MustNewLexer( + &Config{ + Name: "Mason", + Aliases: []string{"mason"}, + Filenames: []string{"*.m", "*.mhtml", "*.mc", "*.mi", "autohandler", "dhandler"}, + MimeTypes: []string{"application/x-mason"}, + Priority: 0.1, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`(<%doc>)(.*?)()(?s)`, ByGroups(NameTag, CommentMultiline, NameTag), nil}, + {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, + {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Perl), NameTag), nil}, + {`(<&[^|])(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, + {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Perl), NameTag), nil}, + {``, NameTag, nil}, + {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Perl), NameTag), nil}, + {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, + {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Perl), Other), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=[%#]) | # an eval or comment line + (?=`, `:>`, `/.`, `+`, `-`, `*`, `/`, `^`, `&&`, `||`, `!`, `<>`, `|`, `/;`, `?`, `@`, `//`, `/@`, `@@`, `@@@`, `~~`, `===`, `&`, `<`, `>`, `<=`, `>=`), Operator, nil}, + {Words(``, ``, `,`, `;`, `(`, `)`, `[`, `]`, `{`, `}`), Punctuation, nil}, + {`".*?"`, LiteralString, nil}, + {`\s+`, TextWhitespace, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/minizinc.go b/vendor/github.com/alecthomas/chroma/lexers/m/minizinc.go new file mode 100644 index 00000000..ccf4eed9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/minizinc.go @@ -0,0 +1,41 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// MiniZinc lexer. +var MZN = internal.Register(MustNewLexer( + &Config{ + Name: "MiniZinc", + Aliases: []string{"minizinc", "MZN", "mzn"}, + Filenames: []string{"*.mzn", "*.dzn", "*.fzn"}, + MimeTypes: []string{"text/minizinc"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`\%(.*?)\n`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {Words(`\b`, `\b`, `ann`, `annotation`, `any`, `constraint`, `function`, `include`, `list`, `of`, `op`, `output`, `minimize`, `maximize`, `par`, `predicate`, `record`, `satisfy`, `solve`, `test`, `type`, `var`), Keyword, nil}, + {Words(`\b`, `\b`, `array`, `set`, `bool`, `enum`, `float`, `int`, `string`, `tuple`), KeywordType, nil}, + {Words(`\b`, `\b`, `for`, `forall`, `if`, `then`, `else`, `endif`, `where`), Keyword, nil}, + {Words(`\b`, `\b`, `abort`, `abs`, `acosh`, `array_intersect`, `array_union`, `array1d`, `array2d`, `array3d`, `array4d`, `array5d`, `array6d`, `asin`, `assert`, `atan`, `bool2int`, `card`, `ceil`, `concat`, `cos`, `cosh`, `dom`, `dom_array`, `dom_size`, `fix`, `exp`, `floor`, `index_set`, `index_set_1of2`, `index_set_2of2`, `index_set_1of3`, `index_set_2of3`, `index_set_3of3`, `int2float`, `is_fixed`, `join`, `lb`, `lb_array`, `length`, `ln`, `log`, `log2`, `log10`, `min`, `max`, `pow`, `product`, `round`, `set2array`, `show`, `show_int`, `show_float`, `sin`, `sinh`, `sqrt`, `sum`, `tan`, `tanh`, `trace`, `ub`, `ub_array`), NameBuiltin, nil}, + {`(not|<->|->|<-|\\/|xor|/\\)`, Operator, nil}, + {`(<|>|<=|>=|==|=|!=)`, Operator, nil}, + {`(\+|-|\*|/|div|mod)`, Operator, nil}, + {Words(`\b`, `\b`, `in`, `subset`, `superset`, `union`, `diff`, `symdiff`, `intersect`), Operator, nil}, + {`(\\|\.\.|\+\+)`, Operator, nil}, + {`[|()\[\]{},:;]`, Punctuation, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`([+-]?)\d+(\.(?!\.)\d*)?([eE][-+]?\d+)?`, LiteralNumber, nil}, + {`::\s*([^\W\d]\w*)(\s*\([^\)]*\))?`, NameDecorator, nil}, + {`\b([^\W\d]\w*)\b(\()`, ByGroups(NameFunction, Punctuation), nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go b/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go new file mode 100644 index 00000000..6fadb79f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/modula2.go @@ -0,0 +1,115 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Modula-2 lexer. +var Modula2 = internal.Register(MustNewLexer( + &Config{ + Name: "Modula-2", + Aliases: []string{"modula2", "m2"}, + Filenames: []string{"*.def", "*.mod"}, + MimeTypes: []string{"text/x-modula2"}, + DotAll: true, + }, + Rules{ + "whitespace": { + {`\n+`, Text, nil}, + {`\s+`, Text, nil}, + }, + "dialecttags": { + {`\(\*!m2pim\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\*\)`, CommentSpecial, nil}, + {`\(\*!m2r10\*\)`, CommentSpecial, nil}, + {`\(\*!objm2\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+aglet\*\)`, CommentSpecial, nil}, + {`\(\*!m2pim\+gm2\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+p1\*\)`, CommentSpecial, nil}, + {`\(\*!m2iso\+xds\*\)`, CommentSpecial, nil}, + }, + "identifiers": { + {`([a-zA-Z_$][\w$]*)`, Name, nil}, + }, + "prefixed_number_literals": { + {`0b[01]+(\'[01]+)*`, LiteralNumberBin, nil}, + {`0[ux][0-9A-F]+(\'[0-9A-F]+)*`, LiteralNumberHex, nil}, + }, + "plain_number_literals": { + {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, + {`[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*`, LiteralNumberFloat, nil}, + {`[0-9]+(\'[0-9]+)*`, LiteralNumberInteger, nil}, + }, + "suffixed_number_literals": { + {`[0-7]+B`, LiteralNumberOct, nil}, + {`[0-7]+C`, LiteralNumberOct, nil}, + {`[0-9A-F]+H`, LiteralNumberHex, nil}, + }, + "string_literals": { + {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + }, + "digraph_operators": { + {`\*\.`, Operator, nil}, + {`\+>`, Operator, nil}, + {`<>`, Operator, nil}, + {`<=`, Operator, nil}, + {`>=`, Operator, nil}, + {`==`, Operator, nil}, + {`::`, Operator, nil}, + {`:=`, Operator, nil}, + {`\+\+`, Operator, nil}, + {`--`, Operator, nil}, + }, + "unigraph_operators": { + {`[+-]`, Operator, nil}, + {`[*/]`, Operator, nil}, + {`\\`, Operator, nil}, + {`[=#<>]`, Operator, nil}, + {`\^`, Operator, nil}, + {`@`, Operator, nil}, + {`&`, Operator, nil}, + {`~`, Operator, nil}, + {"`", Operator, nil}, + }, + "digraph_punctuation": { + {`\.\.`, Punctuation, nil}, + {`<<`, Punctuation, nil}, + {`>>`, Punctuation, nil}, + {`->`, Punctuation, nil}, + {`\|#`, Punctuation, nil}, + {`##`, Punctuation, nil}, + {`\|\*`, Punctuation, nil}, + }, + "unigraph_punctuation": { + {`[()\[\]{},.:;|]`, Punctuation, nil}, + {`!`, Punctuation, nil}, + {`\?`, Punctuation, nil}, + }, + "comments": { + {`^//.*?\n`, CommentSingle, nil}, + {`\(\*([^$].*?)\*\)`, CommentMultiline, nil}, + {`/\*(.*?)\*/`, CommentMultiline, nil}, + }, + "pragmas": { + {`<\*.*?\*>`, CommentPreproc, nil}, + {`\(\*\$.*?\*\)`, CommentPreproc, nil}, + }, + "root": { + Include("whitespace"), + Include("dialecttags"), + Include("pragmas"), + Include("comments"), + Include("identifiers"), + Include("suffixed_number_literals"), + Include("prefixed_number_literals"), + Include("plain_number_literals"), + Include("string_literals"), + Include("digraph_punctuation"), + Include("digraph_operators"), + Include("unigraph_punctuation"), + Include("unigraph_operators"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go b/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go new file mode 100644 index 00000000..828ff0bf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/monkeyc.go @@ -0,0 +1,62 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var MonkeyC = internal.Register(MustNewLexer( + &Config{ + Name: "MonkeyC", + Aliases: []string{"monkeyc"}, + Filenames: []string{"*.mc"}, + MimeTypes: []string{"text/x-monkeyc"}, + }, + Rules{ + "root": { + {`[^\S\n]+`, Text, nil}, + {`\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + {`:[a-zA-Z_][\w_\.]*`, StringSymbol, nil}, + {`[{}\[\]\(\),;:\.]`, Punctuation, nil}, + {`[&~\|\^!+\-*\/%=?]`, Operator, nil}, + {`=>|[+-]=|&&|\|\||>>|<<|[<>]=?|[!=]=`, Operator, nil}, + {`\b(and|or|instanceof|has|extends|new)`, OperatorWord, nil}, + {Words(``, `\b`, `NaN`, `null`, `true`, `false`), KeywordConstant, nil}, + {`(using)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + {`(class)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("class")}, + {`(function)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("function")}, + {`(module)((?:\s|\\\\s)+)`, ByGroups(KeywordDeclaration, Text), Push("module")}, + {`\b(if|else|for|switch|case|while|break|continue|default|do|try|catch|finally|return|throw|extends|function)\b`, Keyword, nil}, + {`\b(const|enum|hidden|public|protected|private|static)\b`, KeywordType, nil}, + {`\bvar\b`, KeywordDeclaration, nil}, + {`\b(Activity(Monitor|Recording)?|Ant(Plus)?|Application|Attention|Background|Communications|Cryptography|FitContributor|Graphics|Gregorian|Lang|Math|Media|Persisted(Content|Locations)|Position|Properties|Sensor(History|Logging)?|Storage|StringUtil|System|Test|Time(r)?|Toybox|UserProfile|WatchUi|Rez|Drawables|Strings|Fonts|method)\b`, NameBuiltin, nil}, + {`\b(me|self|\$)\b`, NameBuiltinPseudo, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^''])*'`, LiteralStringSingle, nil}, + {`-?(0x[0-9a-fA-F]+l?)`, NumberHex, nil}, + {`-?([0-9]+(\.[0-9]+[df]?|[df]))\b`, NumberFloat, nil}, + {`-?([0-9]+l?)`, NumberInteger, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "import": { + {`([a-zA-Z_][\w_\.]*)(?:(\s+)(as)(\s+)([a-zA-Z_][\w_]*))?`, ByGroups(NameNamespace, Text, KeywordNamespace, Text, NameNamespace), nil}, + Default(Pop(1)), + }, + "class": { + {`([a-zA-Z_][\w_\.]*)(?:(\s+)(extends)(\s+)([a-zA-Z_][\w_\.]*))?`, ByGroups(NameClass, Text, KeywordDeclaration, Text, NameClass), nil}, + Default(Pop(1)), + }, + "function": { + {`initialize`, NameFunctionMagic, nil}, + {`[a-zA-Z_][\w_\.]*`, NameFunction, nil}, + Default(Pop(1)), + }, + "module": { + {`[a-zA-Z_][\w_\.]*`, NameNamespace, nil}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go b/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go new file mode 100644 index 00000000..b86bbdfe --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/mwscript.go @@ -0,0 +1,53 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// MorrowindScript lexer. +var MorrowindScript = internal.Register(MustNewLexer( + &Config{ + Name: "MorrowindScript", + Aliases: []string{"morrowind", "mwscript"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`;.*$`, Comment, nil}, + {`(["'])(?:(?=(\\?))\2.)*?\1`, LiteralString, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[0-9]+\.[0-9]*(?!\.)`, LiteralNumberFloat, nil}, + Include("keywords"), + Include("types"), + Include("builtins"), + Include("punct"), + Include("operators"), + {`\n`, Text, nil}, + {`\S+\s+`, Text, nil}, + {`[a-zA-Z0-9_]\w*`, Name, nil}, + }, + "keywords": { + {`(?i)(begin|if|else|elseif|endif|while|endwhile|return|to)\b`, Keyword, nil}, + {`(?i)(end)\b`, Keyword, nil}, + {`(?i)(end)\w+.*$`, Text, nil}, + {`[\w+]->[\w+]`, Operator, nil}, + }, + "builtins": { + {`(?i)(Activate|AddItem|AddSoulGem|AddSpell|AddToLevCreature|AddToLevItem|AddTopic|AIActivate|AIEscort|AIEscortCell|AIFollow|AiFollowCell|AITravel|AIWander|BecomeWerewolf|Cast|ChangeWeather|Choice|ClearForceJump|ClearForceMoveJump|ClearForceRun|ClearForceSneak|ClearInfoActor|Disable|DisableLevitation|DisablePlayerControls|DisablePlayerFighting|DisablePlayerJumping|DisablePlayerLooking|DisablePlayerMagic|DisablePlayerViewSwitch|DisableTeleporting|DisableVanityMode|DontSaveObject|Drop|Enable|EnableBirthMenu|EnableClassMenu|EnableInventoryMenu|EnableLevelUpMenu|EnableLevitation|EnableMagicMenu|EnableMapMenu|EnableNameMenu|EnablePlayerControls|EnablePlayerFighting|EnablePlayerJumping|EnablePlayerLooking|EnablePlayerMagic|EnablePlayerViewSwitch|EnableRaceMenu|EnableRest|EnableStatsMenu|EnableTeleporting|EnableVanityMode|Equip|ExplodeSpell|Face|FadeIn|FadeOut|FadeTo|Fall|ForceGreeting|ForceJump|ForceRun|ForceSneak|Flee|GotoJail|HurtCollidingActor|HurtStandingActor|Journal|Lock|LoopGroup|LowerRank|MenuTest|MessageBox|ModAcrobatics|ModAgility|ModAlarm|ModAlchemy|ModAlteration|ModArmorBonus|ModArmorer|ModAthletics|ModAttackBonus|ModAxe|ModBlock|ModBluntWeapon|ModCastPenalty|ModChameleon|ModConjuration|ModCurrentFatigue|ModCurrentHealth|ModCurrentMagicka|ModDefendBonus|ModDestruction|ModDisposition|ModEnchant|ModEndurance|ModFactionReaction|ModFatigue|ModFight|ModFlee|ModFlying|ModHandToHand|ModHealth|ModHeavyArmor|ModIllusion|ModIntelligence|ModInvisible|ModLightArmor|ModLongBlade|ModLuck|ModMagicka|ModMarksman|ModMediumArmor|ModMercantile|ModMysticism|ModParalysis|ModPCCrimeLevel|ModPCFacRep|ModPersonality|ModRegion|ModReputation|ModResistBlight|ModResistCorprus|ModResistDisease|ModResistFire|ModResistFrost|ModResistMagicka|ModResistNormalWeapons|ModResistParalysis|ModResistPoison|ModResistShock|ModRestoration|ModScale|ModSecurity|ModShortBlade|ModSilence|ModSneak|ModSpear|ModSpeechcraft|ModSpeed|ModStrength|ModSuperJump|ModSwimSpeed|ModUnarmored|ModWaterBreathing|ModWaterLevel|ModWaterWalking|ModWillpower|Move|MoveWorld|PayFine|PayFineThief|PCClearExpelled|PCExpell|PCForce1stPerson|PCForce3rdPerson|PCJoinFaction|PCLowerRank|PCRaiseRank|PlaceAtMe|PlaceAtPC|PlaceItem|PlaceItemCell|PlayBink|PlayGroup|PlayLoopSound3D|PlayLoopSound3DVP|PlaySound|PlaySound3D|PlaySound3DVP|PlaySoundVP|Position|PositionCell|RaiseRank|RemoveEffects|RemoveFromLevCreature|RemoveFromLevItem|RemoveItem|RemoveSoulgem|RemoveSpell|RemoveSpellEffects|ResetActors|Resurrect|Rotate|RotateWorld|Say|StartScript|[S|s]et|SetAcrobatics|SetAgility|SetAlarm|SetAlchemy|SetAlteration|SetAngle|SetArmorBonus|SetArmorer|SetAthletics|SetAtStart|SetAttackBonus|SetAxe|SetBlock|SetBluntWeapon|SetCastPenalty|SetChameleon|SetConjuration|SetDelete|SetDefendBonus|SetDestruction|SetDisposition|SetEnchant|SetEndurance|SetFactionReaction|SetFatigue|SetFight|SetFlee|SetFlying|SetHandToHand|SetHealth|SetHeavyArmor|SetIllusion|SetIntelligence|SetInvisible|SetJournalIndex|SetLightArmor|SetLevel|SetLongBlade|SetLuck|SetMagicka|SetMarksman|SetMediumArmor|SetMercantile|SetMysticism|SetParalysis|SetPCCCrimeLevel|SetPCFacRep|SetPersonality|SetPos|SetReputation|SetResistBlight|SetResistCorprus|SetResistDisease|SetResistFire|SetResistFrost|SetResistMagicka|SetResistNormalWeapons|SetResistParalysis|SetResistPoison|SetResistShock|SetRestoration|SetScale|SetSecurity|SetShortBlade|SetSilence|SetSneak|SetSpear|SetSpeechcraft|SetSpeed|SetStrength|SetSuperJump|SetSwimSpeed|SetUnarmored|SetWaterBreathing|SetWaterlevel|SetWaterWalking|SetWerewolfAcrobatics|SetWillpower|ShowMap|ShowRestMenu|SkipAnim|StartCombat|StopCombat|StopScript|StopSound|StreamMusic|TurnMoonRed|TurnMoonWhite|UndoWerewolf|Unlock|WakeUpPC|CenterOnCell|CenterOnExterior|FillMap|FixMe|ToggleAI|ToggleCollision|ToggleFogOfWar|ToggleGodMode|ToggleMenus|ToggleSky|ToggleWorld|ToggleVanityMode|CellChanged|GetAcrobatics|GetAgility|GetAIPackageDone|GetAlarm|GetAlchemy|GetAlteration|GetAngle|GetArmorBonus|GetArmorer|GetAthletics|GetAttackBonus|GetAttacked|GetArmorType,|GetAxe|GetBlightDisease|GetBlock|GetBluntWeapon|GetButtonPressed|GetCastPenalty|GetChameleon|GetCollidingActor|GetCollidingPC|GetCommonDisease|GetConjuration|GetCurrentAIPackage|GetCurrentTime|GetCurrentWeather|GetDeadCount|GetDefendBonus|GetDestruction|GetDetected|GetDisabled|GetDisposition|GetDistance|GetEffect|GetEnchant|GetEndurance|GetFatigue|GetFight|GetFlee|GetFlying|GetForceJump|GetForceRun|GetForceSneak|GetHandToHand|GetHealth|GetHealthGetRatio|GetHeavyArmor|GetIllusion|GetIntelligence|GetInterior|GetInvisible|GetItemCount|GetJournalIndex|GetLightArmor|GetLineOfSight|GetLOS|GetLevel|GetLocked|GetLongBlade|GetLuck|GetMagicka|GetMarksman|GetMasserPhase|GetSecundaPhase|GetMediumArmor|GetMercantile|GetMysticism|GetParalysis|GetPCCell|GetPCCrimeLevel|GetPCinJail|GetPCJumping|GetPCRank|GetPCRunning|GetPCSleep|GetPCSneaking|GetPCTraveling|GetPersonality|GetPlayerControlsDisabled|GetPlayerFightingDisabled|GetPlayerJumpingDisabled|GetPlayerLookingDisabled|GetPlayerMagicDisabled|GetPos|GetRace|GetReputation|GetResistBlight|GetResistCorprus|GetResistDisease|GetResistFire|GetResistFrost|GetResistMagicka|GetResistNormalWeapons|GetResistParalysis|GetResistPoison|GetResistShock|GetRestoration|GetScale|GetSecondsPassed|GetSecurity|GetShortBlade|GetSilence|GetSneak|GetSoundPlaying|GetSpear|GetSpeechcraft|GetSpeed|GetSpell|GetSpellEffects|GetSpellReadied|GetSquareRoot|GetStandingActor|GetStandingPC|GetStrength|GetSuperJump|GetSwimSpeed|GetTarget|GetUnarmored|GetVanityModeDisabled|GetWaterBreathing|GetWaterLevel|GetWaterWalking|GetWeaponDrawn|GetWeaponType|GetWerewolfKills|GetWillpower|GetWindSpeed|HasItemEquipped|HasSoulgem|HitAttemptOnMe|HitOnMe|IsWerewolf|MenuMode|OnActivate|OnDeath|OnKnockout|OnMurder|PCExpelled|PCGet3rdPerson|PCKnownWerewolf|Random|RepairedOnMe|SameFaction|SayDone|ScriptRunning|AllowWereWolfForceGreeting|Companion|MinimumProfit|NoFlee|NoHello|NoIdle|NoLore|OnPCAdd|OnPCDrop|OnPCEquip|OnPCHitMe|OnPCRepair|PCSkipEquip|OnPCSoulGemUse|StayOutside|CrimeGoldDiscount|CrimeGoldTurnIn|Day|DaysPassed|GameHour|Month|NPCVoiceDistance|PCRace|PCWerewolf|PCVampire|TimeScale|VampClan|Year)\b`, NameBuiltin, nil}, + {`(?i)(sEffectWaterBreathing|sEffectSwiftSwim|sEffectWaterWalking|sEffectShield|sEffectFireShield|sEffectLightningShield|sEffectFrostShield|sEffectBurden|sEffectFeather|sEffectJump|sEffectLevitate|sEffectSlowFall|sEffectLock|sEffectOpen|sEffectFireDamage|sEffectShockDamage|sEffectFrostDamage|sEffectDrainAttribute|sEffectDrainHealth|sEffectDrainSpellpoints|sEffectDrainFatigue|sEffectDrainSkill|sEffectDamageAttribute|sEffectDamageHealth|sEffectDamageMagicka|sEffectDamageFatigue|sEffectDamageSkill|sEffectPoison|sEffectWeaknessToFire|sEffectWeaknessToFrost|sEffectWeaknessToShock|sEffectWeaknessToMagicka|sEffectWeaknessToCommonDisease|sEffectWeaknessToBlightDisease|sEffectWeaknessToCorprusDisease|sEffectWeaknessToPoison|sEffectWeaknessToNormalWeapons|sEffectDisintegrateWeapon|sEffectDisintegrateArmor|sEffectInvisibility|sEffectChameleon|sEffectLight|sEffectSanctuary|sEffectNightEye|sEffectCharm|sEffectParalyze|sEffectSilence|sEffectBlind|sEffectSound|sEffectCalmHumanoid|sEffectCalmCreature|sEffectFrenzyHumanoid|sEffectFrenzyCreature|sEffectDemoralizeHumanoid|sEffectDemoralizeCreature|sEffectRallyHumanoid|sEffectRallyCreature|sEffectDispel|sEffectSoultrap|sEffectTelekinesis|sEffectMark|sEffectRecall|sEffectDivineIntervention|sEffectAlmsiviIntervention|sEffectDetectAnimal|sEffectDetectEnchantment|sEffectDetectKey|sEffectSpellAbsorption|sEffectReflect|sEffectCureCommonDisease|sEffectCureBlightDisease|sEffectCureCorprusDisease|sEffectCurePoison|sEffectCureParalyzation|sEffectRestoreAttribute|sEffectRestoreHealth|sEffectRestoreSpellPoints|sEffectRestoreFatigue|sEffectRestoreSkill|sEffectFortifyAttribute|sEffectFortifyHealth|sEffectFortifySpellpoints|sEffectFortifyFatigue|sEffectFortifySkill|sEffectFortifyMagickaMultiplier|sEffectAbsorbAttribute|sEffectAbsorbHealth|sEffectAbsorbSpellPoints|sEffectAbsorbFatigue|sEffectAbsorbSkill|sEffectResistFire|sEffectResistFrost|sEffectResistShock|sEffectResistMagicka|sEffectResistCommonDisease|sEffectResistBlightDisease|sEffectResistCorprusDisease|sEffectResistPoison|sEffectResistNormalWeapons|sEffectResistParalysis|sEffectRemoveCurse|sEffectTurnUndead|sEffectSummonScamp|sEffectSummonClannfear|sEffectSummonDaedroth|sEffectSummonDremora|sEffectSummonAncestralGhost|sEffectSummonSkeletalMinion|sEffectSummonLeastBonewalker|sEffectSummonGreaterBonewalker|sEffectSummonBonelord|sEffectSummonWingedTwilight|sEffectSummonHunger|sEffectSummonGoldensaint|sEffectSummonFlameAtronach|sEffectSummonFrostAtronach|sEffectSummonStormAtronach|sEffectFortifyAttackBonus|sEffectCommandCreatures|sEffectCommandHumanoids|sEffectBoundDagger|sEffectBoundLongsword|sEffectBoundMace|sEffectBoundBattleAxe|sEffectBoundSpear|sEffectBoundLongbow|sEffectExtraSpell|sEffectBoundCuirass|sEffectBoundHelm|sEffectBoundBoots|sEffectBoundShield|sEffectBoundGloves|sEffectCorpus|sEffectVampirism|sEffectSummonCenturionSphere|sEffectSunDamage|sEffectStuntedMagicka)`, NameBuiltin, nil}, + }, + "types": { + {`(?i)(short|long|float)\b`, KeywordType, nil}, + }, + "punct": { + {`[()]`, Punctuation, nil}, + }, + "operators": { + {`[#=,./%+\-?]`, Operator, nil}, + {`(==|<=|<|>=|>|!=)`, Operator, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go b/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go new file mode 100644 index 00000000..02a20eae --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/m/myghty.go @@ -0,0 +1,40 @@ +package m + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" + . "github.com/alecthomas/chroma/lexers/p" // nolint +) + +// Myghty lexer. +var Myghty = internal.Register(MustNewLexer( + &Config{ + Name: "Myghty", + Aliases: []string{"myghty"}, + Filenames: []string{"*.myt", "autodelegate"}, + MimeTypes: []string{"application/x-myghty"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`(<%(?:def|method))(\s*)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, Text, NameFunction, NameTag, UsingSelf("root"), NameTag), nil}, + {`(<%\w+)(.*?)(>)(.*?)()(?s)`, ByGroups(NameTag, NameFunction, NameTag, Using(Python), NameTag), nil}, + {`(<&[^|])(.*?)(,.*?)?(&>)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil}, + {`(<&\|)(.*?)(,.*?)?(&>)(?s)`, ByGroups(NameTag, NameFunction, Using(Python), NameTag), nil}, + {``, NameTag, nil}, + {`(<%!?)(.*?)(%>)(?s)`, ByGroups(NameTag, Using(Python), NameTag), nil}, + {`(?<=^)#[^\n]*(\n|\Z)`, Comment, nil}, + {`(?<=^)(%)([^\n]*)(\n|\Z)`, ByGroups(NameTag, Using(Python), Other), nil}, + {`(?sx) + (.+?) # anything, followed by: + (?: + (?<=\n)(?=[%#]) | # an eval or comment line + (?==~!@#%^&|`?-]", Operator, nil}, + {`\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?`, ByGroups(KeywordType, Text, Punctuation), nil}, + {`\b(add|all|alter|analyze|and|as|asc|asensitive|before|between|bigint|binary|blob|both|by|call|cascade|case|change|char|character|check|collate|column|condition|constraint|continue|convert|create|cross|current_date|current_time|current_timestamp|current_user|cursor|database|databases|day_hour|day_microsecond|day_minute|day_second|dec|decimal|declare|default|delayed|delete|desc|describe|deterministic|distinct|distinctrow|div|double|drop|dual|each|else|elseif|enclosed|escaped|exists|exit|explain|fetch|flush|float|float4|float8|for|force|foreign|from|fulltext|grant|group|having|high_priority|hour_microsecond|hour_minute|hour_second|if|ignore|in|index|infile|inner|inout|insensitive|insert|int|int1|int2|int3|int4|int8|integer|interval|into|is|iterate|join|key|keys|kill|leading|leave|left|like|limit|lines|load|localtime|localtimestamp|lock|long|loop|low_priority|match|minute_microsecond|minute_second|mod|modifies|natural|no_write_to_binlog|not|numeric|on|optimize|option|optionally|or|order|out|outer|outfile|precision|primary|procedure|purge|raid0|read|reads|real|references|regexp|release|rename|repeat|replace|require|restrict|return|revoke|right|rlike|schema|schemas|second_microsecond|select|sensitive|separator|set|show|smallint|soname|spatial|specific|sql|sql_big_result|sql_calc_found_rows|sql_small_result|sqlexception|sqlstate|sqlwarning|ssl|starting|straight_join|table|terminated|then|to|trailing|trigger|undo|union|unique|unlock|unsigned|update|usage|use|using|utc_date|utc_time|utc_timestamp|values|varying|when|where|while|with|write|x509|xor|year_month|zerofill)\b`, Keyword, nil}, + {`\b(auto_increment|engine|charset|tables)\b`, KeywordPseudo, nil}, + {`(true|false|null)`, NameConstant, nil}, + {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-z_]\w*`, Name, nil}, + {`@[a-z0-9]*[._]*[a-z0-9]*`, NameVariable, nil}, + {`[;:()\[\],.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "double-string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go b/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go new file mode 100644 index 00000000..d769d157 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nasm.go @@ -0,0 +1,59 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nasm lexer. +var Nasm = internal.Register(MustNewLexer( + &Config{ + Name: "NASM", + Aliases: []string{"nasm"}, + Filenames: []string{"*.asm", "*.ASM"}, + MimeTypes: []string{"text/x-nasm"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`^\s*%`, CommentPreproc, Push("preproc")}, + Include("whitespace"), + {`[a-z$._?][\w$.?#@~]*:`, NameLabel, nil}, + {`([a-z$._?][\w$.?#@~]*)(\s+)(equ)`, ByGroups(NameConstant, KeywordDeclaration, KeywordDeclaration), Push("instruction-args")}, + {`BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE`, Keyword, Push("instruction-args")}, + {`(?:res|d)[bwdqt]|times`, KeywordDeclaration, Push("instruction-args")}, + {`[a-z$._?][\w$.?#@~]*`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "instruction-args": { + {"\"(\\\\\"|[^\"\\n])*\"|'(\\\\'|[^'\\n])*'|`(\\\\`|[^`\\n])*`", LiteralString, nil}, + {`(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)`, LiteralNumberHex, nil}, + {`[0-7]+q`, LiteralNumberOct, nil}, + {`[01]+b`, LiteralNumberBin, nil}, + {`[0-9]+\.e?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + Include("punctuation"), + {`r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]`, NameBuiltin, nil}, + {`[a-z$._?][\w$.?#@~]*`, NameVariable, nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("whitespace"), + }, + "preproc": { + {`[^;\n]+`, CommentPreproc, nil}, + {`;.*?\n`, CommentSingle, Pop(1)}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "whitespace": { + {`\n`, Text, nil}, + {`[ \t]+`, Text, nil}, + {`;.*`, CommentSingle, nil}, + }, + "punctuation": { + {`[,():\[\]]+`, Punctuation, nil}, + {`[&|^<>+*/%~-]+`, Operator, nil}, + {`[$]+`, KeywordConstant, nil}, + {`seg|wrt|strict`, OperatorWord, nil}, + {`byte|[dq]?word`, KeywordType, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go b/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go new file mode 100644 index 00000000..1a6a37bd --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/newspeak.go @@ -0,0 +1,55 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Newspeak lexer. +var Newspeak = internal.Register(MustNewLexer( + &Config{ + Name: "Newspeak", + Aliases: []string{"newspeak"}, + Filenames: []string{"*.ns2"}, + MimeTypes: []string{"text/x-newspeak"}, + }, + Rules{ + "root": { + {`\b(Newsqueak2)\b`, KeywordDeclaration, nil}, + {`'[^']*'`, LiteralString, nil}, + {`\b(class)(\s+)(\w+)(\s*)`, ByGroups(KeywordDeclaration, Text, NameClass, Text), nil}, + {`\b(mixin|self|super|private|public|protected|nil|true|false)\b`, Keyword, nil}, + {`(\w+\:)(\s*)([a-zA-Z_]\w+)`, ByGroups(NameFunction, Text, NameVariable), nil}, + {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, + {`<\w+>`, CommentSpecial, nil}, + Include("expressionstat"), + Include("whitespace"), + }, + "expressionstat": { + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`:\w+`, NameVariable, nil}, + {`(\w+)(::)`, ByGroups(NameVariable, Operator), nil}, + {`\w+:`, NameFunction, nil}, + {`\w+`, NameVariable, nil}, + {`\(|\)`, Punctuation, nil}, + {`\[|\]`, Punctuation, nil}, + {`\{|\}`, Punctuation, nil}, + {`(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)`, Operator, nil}, + {`\.|;`, Punctuation, nil}, + Include("whitespace"), + Include("literals"), + }, + "literals": { + {`\$.`, LiteralString, nil}, + {`'[^']*'`, LiteralString, nil}, + {`#'[^']*'`, LiteralStringSymbol, nil}, + {`#\w+:?`, LiteralStringSymbol, nil}, + {`#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+`, LiteralStringSymbol, nil}, + }, + "whitespace": { + {`\s+`, Text, nil}, + {`"[^"]*"`, Comment, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go b/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go new file mode 100644 index 00000000..840d100b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nginx.go @@ -0,0 +1,47 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nginx Configuration File lexer. +var Nginx = internal.Register(MustNewLexer( + &Config{ + Name: "Nginx configuration file", + Aliases: []string{"nginx"}, + Filenames: []string{"nginx.conf"}, + MimeTypes: []string{"text/x-nginx-conf"}, + }, + Rules{ + "root": { + {`(include)(\s+)([^\s;]+)`, ByGroups(Keyword, Text, Name), nil}, + {`[^\s;#]+`, Keyword, Push("stmt")}, + Include("base"), + }, + "block": { + {`\}`, Punctuation, Pop(2)}, + {`[^\s;#]+`, KeywordNamespace, Push("stmt")}, + Include("base"), + }, + "stmt": { + {`\{`, Punctuation, Push("block")}, + {`;`, Punctuation, Pop(1)}, + Include("base"), + }, + "base": { + {`#.*\n`, CommentSingle, nil}, + {`on|off`, NameConstant, nil}, + {`\$[^\s;#()]+`, NameVariable, nil}, + {`([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Punctuation, LiteralNumberInteger), nil}, + {`[a-z-]+/[a-z-+]+`, LiteralString, nil}, + {`[0-9]+[km]?\b`, LiteralNumberInteger, nil}, + {`(~)(\s*)([^\s{]+)`, ByGroups(Punctuation, Text, LiteralStringRegex), nil}, + {`[:=~]`, Punctuation, nil}, + {`[^\s;#{}$]+`, LiteralString, nil}, + {`/[^\s;#]*`, Name, nil}, + {`\s+`, Text, nil}, + {`[$;]`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/n/nim.go b/vendor/github.com/alecthomas/chroma/lexers/n/nim.go new file mode 100644 index 00000000..1f7ec792 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/n/nim.go @@ -0,0 +1,92 @@ +package n + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Nim lexer. +var Nim = internal.Register(MustNewLexer( + &Config{ + Name: "Nim", + Aliases: []string{"nim", "nimrod"}, + Filenames: []string{"*.nim", "*.nimrod"}, + MimeTypes: []string{"text/x-nim"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`##.*$`, LiteralStringDoc, nil}, + {`#.*$`, Comment, nil}, + {`[*=><+\-/@$~&%!?|\\\[\]]`, Operator, nil}, + {"\\.\\.|\\.|,|\\[\\.|\\.\\]|\\{\\.|\\.\\}|\\(\\.|\\.\\)|\\{|\\}|\\(|\\)|:|\\^|`|;", Punctuation, nil}, + {`(?:[\w]+)"`, LiteralString, Push("rdqs")}, + {`"""`, LiteralString, Push("tdqs")}, + {`"`, LiteralString, Push("dqs")}, + {`'`, LiteralStringChar, Push("chars")}, + {`(a_?n_?d_?|o_?r_?|n_?o_?t_?|x_?o_?r_?|s_?h_?l_?|s_?h_?r_?|d_?i_?v_?|m_?o_?d_?|i_?n_?|n_?o_?t_?i_?n_?|i_?s_?|i_?s_?n_?o_?t_?)\b`, OperatorWord, nil}, + {`(p_?r_?o_?c_?\s)(?![(\[\]])`, Keyword, Push("funcname")}, + {`(a_?d_?d_?r_?|a_?n_?d_?|a_?s_?|a_?s_?m_?|a_?t_?o_?m_?i_?c_?|b_?i_?n_?d_?|b_?l_?o_?c_?k_?|b_?r_?e_?a_?k_?|c_?a_?s_?e_?|c_?a_?s_?t_?|c_?o_?n_?c_?e_?p_?t_?|c_?o_?n_?s_?t_?|c_?o_?n_?t_?i_?n_?u_?e_?|c_?o_?n_?v_?e_?r_?t_?e_?r_?|d_?e_?f_?e_?r_?|d_?i_?s_?c_?a_?r_?d_?|d_?i_?s_?t_?i_?n_?c_?t_?|d_?i_?v_?|d_?o_?|e_?l_?i_?f_?|e_?l_?s_?e_?|e_?n_?d_?|e_?n_?u_?m_?|e_?x_?c_?e_?p_?t_?|e_?x_?p_?o_?r_?t_?|f_?i_?n_?a_?l_?l_?y_?|f_?o_?r_?|f_?u_?n_?c_?|i_?f_?|i_?n_?|y_?i_?e_?l_?d_?|i_?n_?t_?e_?r_?f_?a_?c_?e_?|i_?s_?|i_?s_?n_?o_?t_?|i_?t_?e_?r_?a_?t_?o_?r_?|l_?e_?t_?|m_?a_?c_?r_?o_?|m_?e_?t_?h_?o_?d_?|m_?i_?x_?i_?n_?|m_?o_?d_?|n_?o_?t_?|n_?o_?t_?i_?n_?|o_?b_?j_?e_?c_?t_?|o_?f_?|o_?r_?|o_?u_?t_?|p_?r_?o_?c_?|p_?t_?r_?|r_?a_?i_?s_?e_?|r_?e_?f_?|r_?e_?t_?u_?r_?n_?|s_?h_?a_?r_?e_?d_?|s_?h_?l_?|s_?h_?r_?|s_?t_?a_?t_?i_?c_?|t_?e_?m_?p_?l_?a_?t_?e_?|t_?r_?y_?|t_?u_?p_?l_?e_?|t_?y_?p_?e_?|w_?h_?e_?n_?|w_?h_?i_?l_?e_?|w_?i_?t_?h_?|w_?i_?t_?h_?o_?u_?t_?|x_?o_?r_?)\b`, Keyword, nil}, + {`(f_?r_?o_?m_?|i_?m_?p_?o_?r_?t_?|i_?n_?c_?l_?u_?d_?e_?)\b`, KeywordNamespace, nil}, + {`(v_?a_?r)\b`, KeywordDeclaration, nil}, + {`(i_?n_?t_?|i_?n_?t_?8_?|i_?n_?t_?1_?6_?|i_?n_?t_?3_?2_?|i_?n_?t_?6_?4_?|f_?l_?o_?a_?t_?|f_?l_?o_?a_?t_?3_?2_?|f_?l_?o_?a_?t_?6_?4_?|b_?o_?o_?l_?|c_?h_?a_?r_?|r_?a_?n_?g_?e_?|a_?r_?r_?a_?y_?|s_?e_?q_?|s_?e_?t_?|s_?t_?r_?i_?n_?g_?)\b`, KeywordType, nil}, + {`(n_?i_?l_?|t_?r_?u_?e_?|f_?a_?l_?s_?e_?)\b`, KeywordPseudo, nil}, + {`\b_\b`, Name, nil}, // Standalone _ used as discardable variable identifier + {`\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*`, Name, nil}, + {`[0-9][0-9_]*(?=([e.]|\'f(32|64)))`, LiteralNumberFloat, Push("float-suffix", "float-number")}, + {`0x[a-f0-9][a-f0-9_]*`, LiteralNumberHex, Push("int-suffix")}, + {`0b[01][01_]*`, LiteralNumberBin, Push("int-suffix")}, + {`0o[0-7][0-7_]*`, LiteralNumberOct, Push("int-suffix")}, + {`[0-9][0-9_]*`, LiteralNumberInteger, Push("int-suffix")}, + {`\s+`, Text, nil}, + {`.+$`, Error, nil}, + }, + "chars": { + {`\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})`, LiteralStringEscape, nil}, + {`'`, LiteralStringChar, Pop(1)}, + {`.`, LiteralStringChar, nil}, + }, + "strings": { + {`(? <= < >= > *")...), Operator, nil}, + {`[;:]`, Punctuation, nil}, + }, + "comment": { + {`\*/`, CommentMultiline, Pop(1)}, + {`.|\n`, CommentMultiline, nil}, + }, + "paren": { + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "list": { + {`\]`, Punctuation, Pop(1)}, + Include("root"), + }, + "qstring": { + {`"`, StringDouble, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + {`\\.`, StringEscape, nil}, + {`.|\n`, StringDouble, nil}, + }, + "istring": { + {`''\$`, StringEscape, nil}, // "$" + {`'''`, StringEscape, nil}, // "''" + {`''\\.`, StringEscape, nil}, // "\." + {`''`, StringSingle, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + // The next rule is important: "$" escapes any symbol except "{"! + {`\$.`, StringSingle, nil}, // "$." + {`.|\n`, StringSingle, nil}, + }, + "scope": { + {`}:`, Punctuation, Pop(1)}, + {`}`, Punctuation, Pop(1)}, + {`in` + nixb, Keyword, Pop(1)}, + {`\${`, StringInterpol, Push("interpol")}, + Include("root"), // "==" has to be above "=" + {Words(``, ``, strings.Fields("= ? ,")...), Operator, nil}, + }, + "interpol": { + {`}`, StringInterpol, Pop(1)}, + Include("root"), + }, + "id": { + {`[a-zA-Z_][a-zA-Z0-9_'-]*`, Name, nil}, + }, + "uri": { + {`[a-zA-Z][a-zA-Z0-9+.-]*:[a-zA-Z0-9%/?:@&=+$,_.!~*'-]+`, StringDoc, nil}, + }, + "path": { + {`[a-zA-Z0-9._+-]*(/[a-zA-Z0-9._+-]+)+`, StringRegex, nil}, + {`~(/[a-zA-Z0-9._+-]+)+/?`, StringRegex, nil}, + {`<[a-zA-Z0-9._+-]+(/[a-zA-Z0-9._+-]+)*>`, StringRegex, nil}, + }, + "int": { + {`-?[0-9]+` + nixb, NumberInteger, nil}, + }, + "float": { + {`-?(([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?` + nixb, NumberFloat, nil}, + }, + "space": { + {`[ \t\r\n]+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go b/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go new file mode 100644 index 00000000..e3d0b1c4 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/objectivec.go @@ -0,0 +1,165 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Objective-C lexer. +var ObjectiveC = internal.Register(MustNewLexer( + &Config{ + Name: "Objective-C", + Aliases: []string{"objective-c", "objectivec", "obj-c", "objc"}, + Filenames: []string{"*.m", "*.h"}, + MimeTypes: []string{"text/x-objective-c"}, + }, + Rules{ + "statements": { + {`@"`, LiteralString, Push("string")}, + {`@(YES|NO)`, LiteralNumber, nil}, + {`@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil}, + {`@(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`@0x[0-9a-fA-F]+[Ll]?`, LiteralNumberHex, nil}, + {`@0[0-7]+[Ll]?`, LiteralNumberOct, nil}, + {`@\d+[Ll]?`, LiteralNumberInteger, nil}, + {`@\(`, Literal, Push("literal_number")}, + {`@\[`, Literal, Push("literal_array")}, + {`@\{`, Literal, Push("literal_dictionary")}, + {Words(``, `\b`, `@selector`, `@private`, `@protected`, `@public`, `@encode`, `@synchronized`, `@try`, `@throw`, `@catch`, `@finally`, `@end`, `@property`, `@synthesize`, `__bridge`, `__bridge_transfer`, `__autoreleasing`, `__block`, `__weak`, `__strong`, `weak`, `strong`, `copy`, `retain`, `assign`, `unsafe_unretained`, `atomic`, `nonatomic`, `readonly`, `readwrite`, `setter`, `getter`, `typeof`, `in`, `out`, `inout`, `release`, `class`, `@dynamic`, `@optional`, `@required`, `@autoreleasepool`), Keyword, nil}, + {Words(``, `\b`, `id`, `instancetype`, `Class`, `IMP`, `SEL`, `BOOL`, `IBOutlet`, `IBAction`, `unichar`), KeywordType, nil}, + {`@(true|false|YES|NO)\n`, NameBuiltin, nil}, + {`(YES|NO|nil|self|super)\b`, NameBuiltin, nil}, + {`(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b`, KeywordType, nil}, + {`(TRUE|FALSE)\b`, NameBuiltin, nil}, + {`(@interface|@implementation)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_classname")}, + {`(@class|@protocol)(\s+)`, ByGroups(Keyword, Text), Push("#pop", "oc_forward_classname")}, + {`@`, Punctuation, nil}, + {`(L?)(")`, ByGroups(LiteralStringAffix, LiteralString), Push("string")}, + {`(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')`, ByGroups(LiteralStringAffix, LiteralStringChar, LiteralStringChar, LiteralStringChar), nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.]`, Punctuation, nil}, + {Words(``, `\b`, `asm`, `auto`, `break`, `case`, `const`, `continue`, `default`, `do`, `else`, `enum`, `extern`, `for`, `goto`, `if`, `register`, `restricted`, `return`, `sizeof`, `static`, `struct`, `switch`, `typedef`, `union`, `volatile`, `while`), Keyword, nil}, + {`(bool|int|long|float|short|double|char|unsigned|signed|void)\b`, KeywordType, nil}, + {Words(``, `\b`, `inline`, `_inline`, `__inline`, `naked`, `restrict`, `thread`, `typename`), KeywordReserved, nil}, + {`(__m(128i|128d|128|64))\b`, KeywordReserved, nil}, + {Words(`__`, `\b`, `asm`, `int8`, `based`, `except`, `int16`, `stdcall`, `cdecl`, `fastcall`, `int32`, `declspec`, `finally`, `int64`, `try`, `leave`, `wchar_t`, `w64`, `unaligned`, `raise`, `noop`, `identifier`, `forceinline`, `assume`), KeywordReserved, nil}, + {`(true|false|NULL)\b`, NameBuiltin, nil}, + {`([a-zA-Z_]\w*)(\s*)(:)(?!:)`, ByGroups(NameLabel, Text, Punctuation), nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "oc_classname": { + {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)`, ByGroups(NameClass, Text, NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?`, ByGroups(NameClass, Text, NameClass), Pop(1)}, + {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)`, ByGroups(NameClass, Text, NameLabel, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))`, ByGroups(NameClass, Text, NameLabel), Pop(1)}, + {`([a-zA-Z$_][\w$]*)(\s*)(\{)`, ByGroups(NameClass, Text, Punctuation), Push("#pop", "oc_ivars")}, + {`([a-zA-Z$_][\w$]*)`, NameClass, Pop(1)}, + }, + "oc_forward_classname": { + {`([a-zA-Z$_][\w$]*)(\s*,\s*)`, ByGroups(NameClass, Text), Push("oc_forward_classname")}, + {`([a-zA-Z$_][\w$]*)(\s*;?)`, ByGroups(NameClass, Text), Pop(1)}, + }, + "oc_ivars": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "root": { + {`^([-+])(\s*)(\(.*?\))?(\s*)([a-zA-Z$_][\w$]*:?)`, ByGroups(Punctuation, Text, UsingSelf("root"), Text, NameFunction), Push("method")}, + Include("whitespace"), + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), Push("function")}, + {`((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)`, ByGroups(UsingSelf("root"), NameFunction, UsingSelf("root"), UsingSelf("root"), Punctuation), nil}, + Default(Push("statement")), + }, + "method": { + Include("whitespace"), + {`,`, Punctuation, nil}, + {`\.\.\.`, Punctuation, nil}, + {`(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)`, ByGroups(UsingSelf("root"), Text, NameVariable), nil}, + {`[a-zA-Z$_][\w$]*:`, NameFunction, nil}, + {`;`, Punctuation, Pop(1)}, + {`\{`, Punctuation, Push("function")}, + Default(Pop(1)), + }, + "literal_number": { + {`\(`, Punctuation, Push("literal_number_inner")}, + {`\)`, Literal, Pop(1)}, + Include("statement"), + }, + "literal_number_inner": { + {`\(`, Punctuation, Push()}, + {`\)`, Punctuation, Pop(1)}, + Include("statement"), + }, + "literal_array": { + {`\[`, Punctuation, Push("literal_array_inner")}, + {`\]`, Literal, Pop(1)}, + Include("statement"), + }, + "literal_array_inner": { + {`\[`, Punctuation, Push()}, + {`\]`, Punctuation, Pop(1)}, + Include("statement"), + }, + "literal_dictionary": { + {`\}`, Literal, Pop(1)}, + Include("statement"), + }, + "whitespace": { + {`^#if\s+0`, CommentPreproc, Push("if0")}, + {`^#`, CommentPreproc, Push("macro")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("if0")}, + {`^(\s*(?:/[*].*?[*]/\s*)?)(#)`, ByGroups(UsingSelf("root"), CommentPreproc), Push("macro")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`//(\n|[\w\W]*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*][\w\W]*?[*](\\\n)?/`, CommentMultiline, nil}, + {`/(\\\n)?[*][\w\W]*`, CommentMultiline, nil}, + }, + "statement": { + Include("whitespace"), + Include("statements"), + {`[{}]`, Punctuation, nil}, + {`;`, Punctuation, Pop(1)}, + }, + "function": { + Include("whitespace"), + Include("statements"), + {`;`, Punctuation, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)`, ByGroups(CommentPreproc, Text, CommentPreprocFile), nil}, + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "if0": { + {`^\s*#if.*?(?|\\[<|\\[|\\?\\?|\\?|>\\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\\.\\.|\\.|->|-\\.|-|,|\\+|\\*|\\)|\\(|&&|&|#|!=)", Operator, nil}, + {`([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]`, Operator, nil}, + {`\b(and|asr|land|lor|lsl|lxor|mod|or)\b`, OperatorWord, nil}, + {`\b(unit|int|float|bool|string|char|list|array)\b`, KeywordType, nil}, + {`[^\W\d][\w']*`, Name, nil}, + {`-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)`, LiteralNumberFloat, nil}, + {`0[xX][\da-fA-F][\da-fA-F_]*`, LiteralNumberHex, nil}, + {`0[oO][0-7][0-7_]*`, LiteralNumberOct, nil}, + {`0[bB][01][01_]*`, LiteralNumberBin, nil}, + {`\d[\d_]*`, LiteralNumberInteger, nil}, + {`'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'`, LiteralStringChar, nil}, + {`'.'`, LiteralStringChar, nil}, + {`'`, Keyword, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`[~?][a-z][\w\']*:`, NameVariable, nil}, + }, + "comment": { + {`[^(*)]+`, Comment, nil}, + {`\(\*`, Comment, Push()}, + {`\*\)`, Comment, Pop(1)}, + {`[(*)]`, Comment, nil}, + }, + "string": { + {`[^\\"]+`, LiteralStringDouble, nil}, + Include("escape-sequence"), + {`\\\n`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "dotted": { + {`\s+`, Text, nil}, + {`\.`, Punctuation, nil}, + {`[A-Z][\w\']*(?=\s*\.)`, NameNamespace, nil}, + {`[A-Z][\w\']*`, NameClass, Pop(1)}, + {`[a-z_][\w\']*`, Name, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/octave.go b/vendor/github.com/alecthomas/chroma/lexers/o/octave.go new file mode 100644 index 00000000..c23b586f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/octave.go @@ -0,0 +1,46 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Octave lexer. +var Octave = internal.Register(MustNewLexer( + &Config{ + Name: "Octave", + Aliases: []string{"octave"}, + Filenames: []string{"*.m"}, + MimeTypes: []string{"text/octave"}, + }, + Rules{ + "root": { + {`[%#].*$`, Comment, nil}, + {`^\s*function`, Keyword, Push("deffunc")}, + {Words(``, `\b`, `__FILE__`, `__LINE__`, `break`, `case`, `catch`, `classdef`, `continue`, `do`, `else`, `elseif`, `end`, `end_try_catch`, `end_unwind_protect`, `endclassdef`, `endevents`, `endfor`, `endfunction`, `endif`, `endmethods`, `endproperties`, `endswitch`, `endwhile`, `events`, `for`, `function`, `get`, `global`, `if`, `methods`, `otherwise`, `persistent`, `properties`, `return`, `set`, `static`, `switch`, `try`, `until`, `unwind_protect`, `unwind_protect_cleanup`, `while`), Keyword, nil}, + {Words(``, `\b`, `addlistener`, `addpath`, `addproperty`, `all`, `and`, `any`, `argnames`, `argv`, `assignin`, `atexit`, `autoload`, `available_graphics_toolkits`, `beep_on_error`, `bitand`, `bitmax`, `bitor`, `bitshift`, `bitxor`, `cat`, `cell`, `cellstr`, `char`, `class`, `clc`, `columns`, `command_line_path`, `completion_append_char`, `completion_matches`, `complex`, `confirm_recursive_rmdir`, `cputime`, `crash_dumps_octave_core`, `ctranspose`, `cumprod`, `cumsum`, `debug_on_error`, `debug_on_interrupt`, `debug_on_warning`, `default_save_options`, `dellistener`, `diag`, `diff`, `disp`, `doc_cache_file`, `do_string_escapes`, `double`, `drawnow`, `e`, `echo_executing_commands`, `eps`, `eq`, `errno`, `errno_list`, `error`, `eval`, `evalin`, `exec`, `exist`, `exit`, `eye`, `false`, `fclear`, `fclose`, `fcntl`, `fdisp`, `feof`, `ferror`, `feval`, `fflush`, `fgetl`, `fgets`, `fieldnames`, `file_in_loadpath`, `file_in_path`, `filemarker`, `filesep`, `find_dir_in_path`, `fixed_point_format`, `fnmatch`, `fopen`, `fork`, `formula`, `fprintf`, `fputs`, `fread`, `freport`, `frewind`, `fscanf`, `fseek`, `fskipl`, `ftell`, `functions`, `fwrite`, `ge`, `genpath`, `get`, `getegid`, `getenv`, `geteuid`, `getgid`, `getpgrp`, `getpid`, `getppid`, `getuid`, `glob`, `gt`, `gui_mode`, `history_control`, `history_file`, `history_size`, `history_timestamp_format_string`, `home`, `horzcat`, `hypot`, `ifelse`, `ignore_function_time_stamp`, `inferiorto`, `info_file`, `info_program`, `inline`, `input`, `intmax`, `intmin`, `ipermute`, `is_absolute_filename`, `isargout`, `isbool`, `iscell`, `iscellstr`, `ischar`, `iscomplex`, `isempty`, `isfield`, `isfloat`, `isglobal`, `ishandle`, `isieee`, `isindex`, `isinteger`, `islogical`, `ismatrix`, `ismethod`, `isnull`, `isnumeric`, `isobject`, `isreal`, `is_rooted_relative_filename`, `issorted`, `isstruct`, `isvarname`, `kbhit`, `keyboard`, `kill`, `lasterr`, `lasterror`, `lastwarn`, `ldivide`, `le`, `length`, `link`, `linspace`, `logical`, `lstat`, `lt`, `make_absolute_filename`, `makeinfo_program`, `max_recursion_depth`, `merge`, `methods`, `mfilename`, `minus`, `mislocked`, `mkdir`, `mkfifo`, `mkstemp`, `mldivide`, `mlock`, `mouse_wheel_zoom`, `mpower`, `mrdivide`, `mtimes`, `munlock`, `nargin`, `nargout`, `native_float_format`, `ndims`, `ne`, `nfields`, `nnz`, `norm`, `not`, `numel`, `nzmax`, `octave_config_info`, `octave_core_file_limit`, `octave_core_file_name`, `octave_core_file_options`, `ones`, `or`, `output_max_field_width`, `output_precision`, `page_output_immediately`, `page_screen_output`, `path`, `pathsep`, `pause`, `pclose`, `permute`, `pi`, `pipe`, `plus`, `popen`, `power`, `print_empty_dimensions`, `printf`, `print_struct_array_contents`, `prod`, `program_invocation_name`, `program_name`, `putenv`, `puts`, `pwd`, `quit`, `rats`, `rdivide`, `readdir`, `readlink`, `read_readline_init_file`, `realmax`, `realmin`, `rehash`, `rename`, `repelems`, `re_read_readline_init_file`, `reset`, `reshape`, `resize`, `restoredefaultpath`, `rethrow`, `rmdir`, `rmfield`, `rmpath`, `rows`, `save_header_format_string`, `save_precision`, `saving_history`, `scanf`, `set`, `setenv`, `shell_cmd`, `sighup_dumps_octave_core`, `sigterm_dumps_octave_core`, `silent_functions`, `single`, `size`, `size_equal`, `sizemax`, `sizeof`, `sleep`, `source`, `sparse_auto_mutate`, `split_long_rows`, `sprintf`, `squeeze`, `sscanf`, `stat`, `stderr`, `stdin`, `stdout`, `strcmp`, `strcmpi`, `string_fill_char`, `strncmp`, `strncmpi`, `struct`, `struct_levels_to_print`, `strvcat`, `subsasgn`, `subsref`, `sum`, `sumsq`, `superiorto`, `suppress_verbose_help_message`, `symlink`, `system`, `tic`, `tilde_expand`, `times`, `tmpfile`, `tmpnam`, `toc`, `toupper`, `transpose`, `true`, `typeinfo`, `umask`, `uminus`, `uname`, `undo_string_escapes`, `unlink`, `uplus`, `upper`, `usage`, `usleep`, `vec`, `vectorize`, `vertcat`, `waitpid`, `warning`, `warranty`, `whos_line_format`, `yes_or_no`, `zeros`, `inf`, `Inf`, `nan`, `NaN`, `close`, `load`, `who`, `whos`, `accumarray`, `accumdim`, `acosd`, `acotd`, `acscd`, `addtodate`, `allchild`, `ancestor`, `anova`, `arch_fit`, `arch_rnd`, `arch_test`, `area`, `arma_rnd`, `arrayfun`, `ascii`, `asctime`, `asecd`, `asind`, `assert`, `atand`, `autoreg_matrix`, `autumn`, `axes`, `axis`, `bar`, `barh`, `bartlett`, `bartlett_test`, `beep`, `betacdf`, `betainv`, `betapdf`, `betarnd`, `bicgstab`, `bicubic`, `binary`, `binocdf`, `binoinv`, `binopdf`, `binornd`, `bitcmp`, `bitget`, `bitset`, `blackman`, `blanks`, `blkdiag`, `bone`, `box`, `brighten`, `calendar`, `cast`, `cauchy_cdf`, `cauchy_inv`, `cauchy_pdf`, `cauchy_rnd`, `caxis`, `celldisp`, `center`, `cgs`, `chisquare_test_homogeneity`, `chisquare_test_independence`, `circshift`, `cla`, `clabel`, `clf`, `clock`, `cloglog`, `closereq`, `colon`, `colorbar`, `colormap`, `colperm`, `comet`, `common_size`, `commutation_matrix`, `compan`, `compare_versions`, `compass`, `computer`, `cond`, `condest`, `contour`, `contourc`, `contourf`, `contrast`, `conv`, `convhull`, `cool`, `copper`, `copyfile`, `cor`, `corrcoef`, `cor_test`, `cosd`, `cotd`, `cov`, `cplxpair`, `cross`, `cscd`, `cstrcat`, `csvread`, `csvwrite`, `ctime`, `cumtrapz`, `curl`, `cut`, `cylinder`, `date`, `datenum`, `datestr`, `datetick`, `datevec`, `dblquad`, `deal`, `deblank`, `deconv`, `delaunay`, `delaunayn`, `delete`, `demo`, `detrend`, `diffpara`, `diffuse`, `dir`, `discrete_cdf`, `discrete_inv`, `discrete_pdf`, `discrete_rnd`, `display`, `divergence`, `dlmwrite`, `dos`, `dsearch`, `dsearchn`, `duplication_matrix`, `durbinlevinson`, `ellipsoid`, `empirical_cdf`, `empirical_inv`, `empirical_pdf`, `empirical_rnd`, `eomday`, `errorbar`, `etime`, `etreeplot`, `example`, `expcdf`, `expinv`, `expm`, `exppdf`, `exprnd`, `ezcontour`, `ezcontourf`, `ezmesh`, `ezmeshc`, `ezplot`, `ezpolar`, `ezsurf`, `ezsurfc`, `factor`, `factorial`, `fail`, `fcdf`, `feather`, `fftconv`, `fftfilt`, `fftshift`, `figure`, `fileattrib`, `fileparts`, `fill`, `findall`, `findobj`, `findstr`, `finv`, `flag`, `flipdim`, `fliplr`, `flipud`, `fpdf`, `fplot`, `fractdiff`, `freqz`, `freqz_plot`, `frnd`, `fsolve`, `f_test_regression`, `ftp`, `fullfile`, `fzero`, `gamcdf`, `gaminv`, `gampdf`, `gamrnd`, `gca`, `gcbf`, `gcbo`, `gcf`, `genvarname`, `geocdf`, `geoinv`, `geopdf`, `geornd`, `getfield`, `ginput`, `glpk`, `gls`, `gplot`, `gradient`, `graphics_toolkit`, `gray`, `grid`, `griddata`, `griddatan`, `gtext`, `gunzip`, `gzip`, `hadamard`, `hamming`, `hankel`, `hanning`, `hggroup`, `hidden`, `hilb`, `hist`, `histc`, `hold`, `hot`, `hotelling_test`, `housh`, `hsv`, `hurst`, `hygecdf`, `hygeinv`, `hygepdf`, `hygernd`, `idivide`, `ifftshift`, `image`, `imagesc`, `imfinfo`, `imread`, `imshow`, `imwrite`, `index`, `info`, `inpolygon`, `inputname`, `interpft`, `interpn`, `intersect`, `invhilb`, `iqr`, `isa`, `isdefinite`, `isdir`, `is_duplicate_entry`, `isequal`, `isequalwithequalnans`, `isfigure`, `ishermitian`, `ishghandle`, `is_leap_year`, `isletter`, `ismac`, `ismember`, `ispc`, `isprime`, `isprop`, `isscalar`, `issquare`, `isstrprop`, `issymmetric`, `isunix`, `is_valid_file_id`, `isvector`, `jet`, `kendall`, `kolmogorov_smirnov_cdf`, `kolmogorov_smirnov_test`, `kruskal_wallis_test`, `krylov`, `kurtosis`, `laplace_cdf`, `laplace_inv`, `laplace_pdf`, `laplace_rnd`, `legend`, `legendre`, `license`, `line`, `linkprop`, `list_primes`, `loadaudio`, `loadobj`, `logistic_cdf`, `logistic_inv`, `logistic_pdf`, `logistic_rnd`, `logit`, `loglog`, `loglogerr`, `logm`, `logncdf`, `logninv`, `lognpdf`, `lognrnd`, `logspace`, `lookfor`, `ls_command`, `lsqnonneg`, `magic`, `mahalanobis`, `manova`, `matlabroot`, `mcnemar_test`, `mean`, `meansq`, `median`, `menu`, `mesh`, `meshc`, `meshgrid`, `meshz`, `mexext`, `mget`, `mkpp`, `mode`, `moment`, `movefile`, `mpoles`, `mput`, `namelengthmax`, `nargchk`, `nargoutchk`, `nbincdf`, `nbininv`, `nbinpdf`, `nbinrnd`, `nchoosek`, `ndgrid`, `newplot`, `news`, `nonzeros`, `normcdf`, `normest`, `norminv`, `normpdf`, `normrnd`, `now`, `nthroot`, `null`, `ocean`, `ols`, `onenormest`, `optimget`, `optimset`, `orderfields`, `orient`, `orth`, `pack`, `pareto`, `parseparams`, `pascal`, `patch`, `pathdef`, `pcg`, `pchip`, `pcolor`, `pcr`, `peaks`, `periodogram`, `perl`, `perms`, `pie`, `pink`, `planerot`, `playaudio`, `plot`, `plotmatrix`, `plotyy`, `poisscdf`, `poissinv`, `poisspdf`, `poissrnd`, `polar`, `poly`, `polyaffine`, `polyarea`, `polyderiv`, `polyfit`, `polygcd`, `polyint`, `polyout`, `polyreduce`, `polyval`, `polyvalm`, `postpad`, `powerset`, `ppder`, `ppint`, `ppjumps`, `ppplot`, `ppval`, `pqpnonneg`, `prepad`, `primes`, `print`, `print_usage`, `prism`, `probit`, `qp`, `qqplot`, `quadcc`, `quadgk`, `quadl`, `quadv`, `quiver`, `qzhess`, `rainbow`, `randi`, `range`, `rank`, `ranks`, `rat`, `reallog`, `realpow`, `realsqrt`, `record`, `rectangle_lw`, `rectangle_sw`, `rectint`, `refresh`, `refreshdata`, `regexptranslate`, `repmat`, `residue`, `ribbon`, `rindex`, `roots`, `rose`, `rosser`, `rotdim`, `rref`, `run`, `run_count`, `rundemos`, `run_test`, `runtests`, `saveas`, `saveaudio`, `saveobj`, `savepath`, `scatter`, `secd`, `semilogx`, `semilogxerr`, `semilogy`, `semilogyerr`, `setaudio`, `setdiff`, `setfield`, `setxor`, `shading`, `shift`, `shiftdim`, `sign_test`, `sinc`, `sind`, `sinetone`, `sinewave`, `skewness`, `slice`, `sombrero`, `sortrows`, `spaugment`, `spconvert`, `spdiags`, `spearman`, `spectral_adf`, `spectral_xdf`, `specular`, `speed`, `spencer`, `speye`, `spfun`, `sphere`, `spinmap`, `spline`, `spones`, `sprand`, `sprandn`, `sprandsym`, `spring`, `spstats`, `spy`, `sqp`, `stairs`, `statistics`, `std`, `stdnormal_cdf`, `stdnormal_inv`, `stdnormal_pdf`, `stdnormal_rnd`, `stem`, `stft`, `strcat`, `strchr`, `strjust`, `strmatch`, `strread`, `strsplit`, `strtok`, `strtrim`, `strtrunc`, `structfun`, `studentize`, `subplot`, `subsindex`, `subspace`, `substr`, `substruct`, `summer`, `surf`, `surface`, `surfc`, `surfl`, `surfnorm`, `svds`, `swapbytes`, `sylvester_matrix`, `symvar`, `synthesis`, `table`, `tand`, `tar`, `tcdf`, `tempdir`, `tempname`, `test`, `text`, `textread`, `textscan`, `tinv`, `title`, `toeplitz`, `tpdf`, `trace`, `trapz`, `treelayout`, `treeplot`, `triangle_lw`, `triangle_sw`, `tril`, `trimesh`, `triplequad`, `triplot`, `trisurf`, `triu`, `trnd`, `tsearchn`, `t_test`, `t_test_regression`, `type`, `unidcdf`, `unidinv`, `unidpdf`, `unidrnd`, `unifcdf`, `unifinv`, `unifpdf`, `unifrnd`, `union`, `unique`, `unix`, `unmkpp`, `unpack`, `untabify`, `untar`, `unwrap`, `unzip`, `u_test`, `validatestring`, `vander`, `var`, `var_test`, `vech`, `ver`, `version`, `view`, `voronoi`, `voronoin`, `waitforbuttonpress`, `wavread`, `wavwrite`, `wblcdf`, `wblinv`, `wblpdf`, `wblrnd`, `weekday`, `welch_test`, `what`, `white`, `whitebg`, `wienrnd`, `wilcoxon_test`, `wilkinson`, `winter`, `xlabel`, `xlim`, `ylabel`, `yulewalker`, `zip`, `zlabel`, `z_test`, `airy`, `amd`, `balance`, `besselh`, `besseli`, `besselj`, `besselk`, `bessely`, `bitpack`, `bsxfun`, `builtin`, `ccolamd`, `cellfun`, `cellslices`, `chol`, `choldelete`, `cholinsert`, `cholinv`, `cholshift`, `cholupdate`, `colamd`, `colloc`, `convhulln`, `convn`, `csymamd`, `cummax`, `cummin`, `daspk`, `daspk_options`, `dasrt`, `dasrt_options`, `dassl`, `dassl_options`, `dbclear`, `dbdown`, `dbstack`, `dbstatus`, `dbstop`, `dbtype`, `dbup`, `dbwhere`, `det`, `dlmread`, `dmperm`, `dot`, `eig`, `eigs`, `endgrent`, `endpwent`, `etree`, `fft`, `fftn`, `fftw`, `filter`, `find`, `full`, `gcd`, `getgrent`, `getgrgid`, `getgrnam`, `getpwent`, `getpwnam`, `getpwuid`, `getrusage`, `givens`, `gmtime`, `gnuplot_binary`, `hess`, `ifft`, `ifftn`, `inv`, `isdebugmode`, `issparse`, `kron`, `localtime`, `lookup`, `lsode`, `lsode_options`, `lu`, `luinc`, `luupdate`, `matrix_type`, `max`, `min`, `mktime`, `pinv`, `qr`, `qrdelete`, `qrinsert`, `qrshift`, `qrupdate`, `quad`, `quad_options`, `qz`, `rand`, `rande`, `randg`, `randn`, `randp`, `randperm`, `rcond`, `regexp`, `regexpi`, `regexprep`, `schur`, `setgrent`, `setpwent`, `sort`, `spalloc`, `sparse`, `spparms`, `sprank`, `sqrtm`, `strfind`, `strftime`, `strptime`, `strrep`, `svd`, `svd_driver`, `syl`, `symamd`, `symbfact`, `symrcm`, `time`, `tsearch`, `typecast`, `urlread`, `urlwrite`, `abs`, `acos`, `acosh`, `acot`, `acoth`, `acsc`, `acsch`, `angle`, `arg`, `asec`, `asech`, `asin`, `asinh`, `atan`, `atanh`, `beta`, `betainc`, `betaln`, `bincoeff`, `cbrt`, `ceil`, `conj`, `cos`, `cosh`, `cot`, `coth`, `csc`, `csch`, `erf`, `erfc`, `erfcx`, `erfinv`, `exp`, `finite`, `fix`, `floor`, `fmod`, `gamma`, `gammainc`, `gammaln`, `imag`, `isalnum`, `isalpha`, `isascii`, `iscntrl`, `isdigit`, `isfinite`, `isgraph`, `isinf`, `islower`, `isna`, `isnan`, `isprint`, `ispunct`, `isspace`, `isupper`, `isxdigit`, `lcm`, `lgamma`, `log`, `lower`, `mod`, `real`, `rem`, `round`, `roundb`, `sec`, `sech`, `sign`, `sin`, `sinh`, `sqrt`, `tan`, `tanh`, `toascii`, `tolower`, `xor`), NameBuiltin, nil}, + {Words(``, `\b`, `EDITOR`, `EXEC_PATH`, `I`, `IMAGE_PATH`, `NA`, `OCTAVE_HOME`, `OCTAVE_VERSION`, `PAGER`, `PAGER_FLAGS`, `SEEK_CUR`, `SEEK_END`, `SEEK_SET`, `SIG`, `S_ISBLK`, `S_ISCHR`, `S_ISDIR`, `S_ISFIFO`, `S_ISLNK`, `S_ISREG`, `S_ISSOCK`, `WCONTINUE`, `WCOREDUMP`, `WEXITSTATUS`, `WIFCONTINUED`, `WIFEXITED`, `WIFSIGNALED`, `WIFSTOPPED`, `WNOHANG`, `WSTOPSIG`, `WTERMSIG`, `WUNTRACED`), NameConstant, nil}, + {`-=|!=|!|/=|--`, Operator, nil}, + {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, + {`\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*`, Operator, nil}, + {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, + {`[\[\](){}:@.,]`, Punctuation, nil}, + {`=|:|;`, Punctuation, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+[eEf][+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`(?<=[\w)\].])\'+`, Operator, nil}, + {`(?=|>|&&|\|\|`, Operator, nil}, + {`\$(f[asn]|t|vp[rtd]|children)`, NameVariableMagic, nil}, + {Words(``, `\b`, `PI`, `undef`), KeywordConstant, nil}, + {`(use|include)((?:\s|\\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("includes")}, + {`(module)(\s*)([^\s\(]+)`, ByGroups(KeywordNamespace, Text, NameNamespace), nil}, + {`(function)(\s*)([^\s\(]+)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, + {`\b(true|false)\b`, Literal, nil}, + {`\b(function|module|include|use|for|intersection_for|if|else|return)\b`, Keyword, nil}, + {`\b(circle|square|polygon|text|sphere|cube|cylinder|polyhedron|translate|rotate|scale|resize|mirror|multmatrix|color|offset|hull|minkowski|union|difference|intersection|abs|sign|sin|cos|tan|acos|asin|atan|atan2|floor|round|ceil|ln|log|pow|sqrt|exp|rands|min|max|concat|lookup|str|chr|search|version|version_num|norm|cross|parent_module|echo|import|import_dxf|dxf_linear_extrude|linear_extrude|rotate_extrude|surface|projection|render|dxf_cross|dxf_dim|let|assign|len)\b`, NameBuiltin, nil}, + {`\bchildren\b`, NameBuiltinPseudo, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`-?\d+(\.\d+)?(e[+-]?\d+)?`, Number, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "includes": { + {"(<)([^>]*)(>)", ByGroups(Punctuation, CommentPreprocFile, Punctuation), nil}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/o/org.go b/vendor/github.com/alecthomas/chroma/lexers/o/org.go new file mode 100644 index 00000000..1971602c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/o/org.go @@ -0,0 +1,102 @@ +package o + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Org mode lexer. +var Org = internal.Register(MustNewLexer( + &Config{ + Name: "Org Mode", + Aliases: []string{"org", "orgmode"}, + Filenames: []string{"*.org"}, + MimeTypes: []string{"text/org"}, // https://lists.gnu.org/r/emacs-orgmode/2017-09/msg00087.html + }, + Rules{ + "root": { + {`^# .*$`, Comment, nil}, + // Headings + {`^(\*)( COMMENT)( .*)$`, ByGroups(GenericHeading, NameEntity, GenericStrong), nil}, + {`^(\*\*+)( COMMENT)( .*)$`, ByGroups(GenericSubheading, NameEntity, Text), nil}, + {`^(\*)( DONE)( .*)$`, ByGroups(GenericHeading, LiteralStringRegex, GenericStrong), nil}, + {`^(\*\*+)( DONE)( .*)$`, ByGroups(GenericSubheading, LiteralStringRegex, Text), nil}, + {`^(\*)( TODO)( .*)$`, ByGroups(GenericHeading, Error, GenericStrong), nil}, + {`^(\*\*+)( TODO)( .*)$`, ByGroups(GenericSubheading, Error, Text), nil}, + {`^(\*)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericHeading, GenericStrong, GenericEmph), nil}, // Level 1 heading with tags + {`^(\*)( .+)$`, ByGroups(GenericHeading, GenericStrong), nil}, // // Level 1 heading with NO tags + {`^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$`, ByGroups(GenericSubheading, Text, GenericEmph), nil}, // Level 2+ heading with tags + {`^(\*\*+)( .+)$`, ByGroups(GenericSubheading, Text), nil}, // Level 2+ heading with NO tags + // Checkbox lists + {`^( *)([+-] )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )(\[[ X]\])( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + // Definition lists + {`^( *)([+-] )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )([^ \n]+ ::)( .+)$`, ByGroups(Text, Keyword, Keyword, UsingSelf("inline")), nil}, + // Unordered lists + {`^( *)([+-] )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + {`^( +)(\* )(.+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + // Ordered lists + {`^( *)([0-9]+[.)])( \[@[0-9]+\])( .+)$`, ByGroups(Text, Keyword, GenericEmph, UsingSelf("inline")), nil}, + {`^( *)([0-9]+[.)])( .+)$`, ByGroups(Text, Keyword, UsingSelf("inline")), nil}, + // Dynamic Blocks + {`(?i)^( *#\+begin: )([^ ]+)([\w\W]*?\n)([\w\W]*?)(^ *#\+end: *$)`, ByGroups(Comment, CommentSpecial, Comment, UsingSelf("inline"), Comment), nil}, + // Blocks + // - Comment Blocks + {`(?i)^( *#\+begin_comment *\n)([\w\W]*?)(^ *#\+end_comment *$)`, ByGroups(Comment, Comment, Comment), nil}, + // - Src Blocks + {`(?i)^( *#\+begin_src )([^ \n]+)(.*?\n)([\w\W]*?)(^ *#\+end_src *$)`, + UsingByGroup( + internal.Get, + 2, 4, + Comment, CommentSpecial, Comment, Text, Comment, + ), + nil, + }, + // - Export Blocks + {`(?i)^( *#\+begin_export )(\w+)( *\n)([\w\W]*?)(^ *#\+end_export *$)`, + UsingByGroup( + internal.Get, + 2, 4, + Comment, CommentSpecial, Text, Text, Comment, + ), + nil, + }, + // - Org Special, Example, Verse, etc. Blocks + {`(?i)^( *#\+begin_)(\w+)( *\n)([\w\W]*?)(^ *#\+end_\2)( *$)`, ByGroups(Comment, Comment, Text, Text, Comment, Text), nil}, + // Keywords + {`^(#\+\w+)(:.*)$`, ByGroups(CommentSpecial, Comment), nil}, // Other Org keywords like #+title + // Properties and Drawers + {`(?i)^( *:\w+: *\n)([\w\W]*?)(^ *:end: *$)`, ByGroups(Comment, CommentSpecial, Comment), nil}, + // Line break operator + {`^(.*)(\\\\)$`, ByGroups(UsingSelf("inline"), Operator), nil}, + // Deadline/Scheduled + {`(?i)^( *(?:DEADLINE|SCHEDULED): )(<[^<>]+?> *)$`, ByGroups(Comment, CommentSpecial), nil}, // DEADLINE/SCHEDULED: + // DONE state CLOSED + {`(?i)^( *CLOSED: )(\[[^][]+?\] *)$`, ByGroups(Comment, CommentSpecial), nil}, // CLOSED: [datestamp] + // All other lines + Include("inline"), + }, + "inline": { + {`(\s)*(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))`, ByGroups(Text, GenericStrong, Text), nil}, // Bold + {`(\s)*(/[^/]+?/)((?=\W|\n|$))`, ByGroups(Text, GenericEmph, Text), nil}, // Italic + {`(\s)*(=[^\n=]+?=)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Verbatim + {`(\s)*(~[^\n~]+?~)((?=\W|\n|$))`, ByGroups(Text, NameClass, Text), nil}, // Code + {`(\s)*(\+[^+]+?\+)((?=\W|\n|$))`, ByGroups(Text, GenericDeleted, Text), nil}, // Strikethrough + {`(\s)*(_[^_]+?_)((?=\W|\n|$))`, ByGroups(Text, GenericUnderline, Text), nil}, // Underline + {`(<)([^<>]+?)(>)`, ByGroups(Text, String, Text), nil}, // + {`[{]{3}[^}]+[}]{3}`, NameBuiltin, nil}, // {{{macro(foo,1)}}} + {`([^[])(\[fn:)([^]]+?)(\])([^]])`, ByGroups(Text, NameBuiltinPseudo, LiteralString, NameBuiltinPseudo, Text), nil}, // [fn:1] + // Links + {`(\[\[)([^][]+?)(\]\[)([^][]+)(\]\])`, ByGroups(Text, NameAttribute, Text, NameTag, Text), nil}, // [[link][descr]] + {`(\[\[)([^][]+?)(\]\])`, ByGroups(Text, NameAttribute, Text), nil}, // [[link]] + {`(<<)([^<>]+?)(>>)`, ByGroups(Text, NameAttribute, Text), nil}, // <> + // Tables + {`^( *)(\|[ -].*?[ -]\|)$`, ByGroups(Text, String), nil}, + // Blank lines, newlines + {`\n`, Text, nil}, + // Any other text + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go b/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go new file mode 100644 index 00000000..807b67c5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pacman.go @@ -0,0 +1,26 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pacmanconf lexer. +var Pacmanconf = internal.Register(MustNewLexer( + &Config{ + Name: "PacmanConf", + Aliases: []string{"pacmanconf"}, + Filenames: []string{"pacman.conf"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`#.*$`, CommentSingle, nil}, + {`^\s*\[.*?\]\s*$`, Keyword, nil}, + {`(\w+)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, + {`^(\s*)(\w+)(\s*)$`, ByGroups(Text, NameAttribute, Text), nil}, + {Words(``, `\b`, `$repo`, `$arch`, `%o`, `%u`), NameVariable, nil}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/perl.go b/vendor/github.com/alecthomas/chroma/lexers/p/perl.go new file mode 100644 index 00000000..0a2b35b9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/perl.go @@ -0,0 +1,138 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Perl lexer. +var Perl = internal.Register(MustNewLexer( + &Config{ + Name: "Perl", + Aliases: []string{"perl", "pl"}, + Filenames: []string{"*.pl", "*.pm", "*.t"}, + MimeTypes: []string{"text/x-perl", "application/x-perl"}, + DotAll: true, + }, + Rules{ + "balanced-regex": { + {`/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`!(\\\\|\\[^\\]|[^\\!])*![egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\{(\\\\|\\[^\\]|[^\\}])*\}[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, Pop(1)}, + {`\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*`, LiteralStringRegex, Pop(1)}, + }, + "root": { + {`\A\#!.+?$`, CommentHashbang, nil}, + {`\#.*?$`, CommentSingle, nil}, + {`^=[a-zA-Z0-9]+\s+.*?\n=cut`, CommentMultiline, nil}, + {Words(``, `\b`, `case`, `continue`, `do`, `else`, `elsif`, `for`, `foreach`, `if`, `last`, `my`, `next`, `our`, `redo`, `reset`, `then`, `unless`, `until`, `while`, `print`, `new`, `BEGIN`, `CHECK`, `INIT`, `END`, `return`), Keyword, nil}, + {`(format)(\s+)(\w+)(\s*)(=)(\s*\n)`, ByGroups(Keyword, Text, Name, Text, Punctuation, Text), Push("format")}, + {`(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b`, OperatorWord, nil}, + {`s/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*`, LiteralStringRegex, nil}, + {`s!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*`, LiteralStringRegex, nil}, + {`s\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*`, LiteralStringRegex, nil}, + {`s@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*`, LiteralStringRegex, nil}, + {`s%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*`, LiteralStringRegex, nil}, + {`s\{(\\\\|\\[^\\]|[^\\}])*\}\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s<(\\\\|\\[^\\]|[^\\>])*>\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s\[(\\\\|\\[^\\]|[^\\\]])*\]\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`s\((\\\\|\\[^\\]|[^\\)])*\)\s*`, LiteralStringRegex, Push("balanced-regex")}, + {`m?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*`, LiteralStringRegex, nil}, + {`m(?=[/!\\{<\[(@%$])`, LiteralStringRegex, Push("balanced-regex")}, + {`((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*`, LiteralStringRegex, nil}, + {`\s+`, Text, nil}, + {Words(``, `\b`, `abs`, `accept`, `alarm`, `atan2`, `bind`, `binmode`, `bless`, `caller`, `chdir`, `chmod`, `chomp`, `chop`, `chown`, `chr`, `chroot`, `close`, `closedir`, `connect`, `continue`, `cos`, `crypt`, `dbmclose`, `dbmopen`, `defined`, `delete`, `die`, `dump`, `each`, `endgrent`, `endhostent`, `endnetent`, `endprotoent`, `endpwent`, `endservent`, `eof`, `eval`, `exec`, `exists`, `exit`, `exp`, `fcntl`, `fileno`, `flock`, `fork`, `format`, `formline`, `getc`, `getgrent`, `getgrgid`, `getgrnam`, `gethostbyaddr`, `gethostbyname`, `gethostent`, `getlogin`, `getnetbyaddr`, `getnetbyname`, `getnetent`, `getpeername`, `getpgrp`, `getppid`, `getpriority`, `getprotobyname`, `getprotobynumber`, `getprotoent`, `getpwent`, `getpwnam`, `getpwuid`, `getservbyname`, `getservbyport`, `getservent`, `getsockname`, `getsockopt`, `glob`, `gmtime`, `goto`, `grep`, `hex`, `import`, `index`, `int`, `ioctl`, `join`, `keys`, `kill`, `last`, `lc`, `lcfirst`, `length`, `link`, `listen`, `local`, `localtime`, `log`, `lstat`, `map`, `mkdir`, `msgctl`, `msgget`, `msgrcv`, `msgsnd`, `my`, `next`, `oct`, `open`, `opendir`, `ord`, `our`, `pack`, `pipe`, `pop`, `pos`, `printf`, `prototype`, `push`, `quotemeta`, `rand`, `read`, `readdir`, `readline`, `readlink`, `readpipe`, `recv`, `redo`, `ref`, `rename`, `reverse`, `rewinddir`, `rindex`, `rmdir`, `scalar`, `seek`, `seekdir`, `select`, `semctl`, `semget`, `semop`, `send`, `setgrent`, `sethostent`, `setnetent`, `setpgrp`, `setpriority`, `setprotoent`, `setpwent`, `setservent`, `setsockopt`, `shift`, `shmctl`, `shmget`, `shmread`, `shmwrite`, `shutdown`, `sin`, `sleep`, `socket`, `socketpair`, `sort`, `splice`, `split`, `sprintf`, `sqrt`, `srand`, `stat`, `study`, `substr`, `symlink`, `syscall`, `sysopen`, `sysread`, `sysseek`, `system`, `syswrite`, `tell`, `telldir`, `tie`, `tied`, `time`, `times`, `tr`, `truncate`, `uc`, `ucfirst`, `umask`, `undef`, `unlink`, `unpack`, `unshift`, `untie`, `utime`, `values`, `vec`, `wait`, `waitpid`, `wantarray`, `warn`, `write`), NameBuiltin, nil}, + {`((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b`, NameBuiltinPseudo, nil}, + {`(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Text), nil}, + {`__END__`, CommentPreproc, Push("end-part")}, + {`\$\^[ADEFHILMOPSTWX]`, NameVariableGlobal, nil}, + {"\\$[\\\\\\\"\\[\\]'&`+*.,;=%~?@$!<>(^|/-](?!\\w)", NameVariableGlobal, nil}, + {`[$@%#]+`, NameVariable, Push("varname")}, + {`0_?[0-7]+(_[0-7]+)*`, LiteralNumberOct, nil}, + {`0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*`, LiteralNumberHex, nil}, + {`0b[01]+(_[01]+)*`, LiteralNumberBin, nil}, + {`(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?`, LiteralNumberFloat, nil}, + {`(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*`, LiteralNumberFloat, nil}, + {`\d+(_\d+)*`, LiteralNumberInteger, nil}, + {`'(\\\\|\\[^\\]|[^'\\])*'`, LiteralString, nil}, + {`"(\\\\|\\[^\\]|[^"\\])*"`, LiteralString, nil}, + {"`(\\\\\\\\|\\\\[^\\\\]|[^`\\\\])*`", LiteralStringBacktick, nil}, + {`<([^\s>]+)>`, LiteralStringRegex, nil}, + {`(q|qq|qw|qr|qx)\{`, LiteralStringOther, Push("cb-string")}, + {`(q|qq|qw|qr|qx)\(`, LiteralStringOther, Push("rb-string")}, + {`(q|qq|qw|qr|qx)\[`, LiteralStringOther, Push("sb-string")}, + {`(q|qq|qw|qr|qx)\<`, LiteralStringOther, Push("lt-string")}, + {`(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2`, LiteralStringOther, nil}, + {`(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(sub)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {Words(``, `\b`, `no`, `package`, `require`, `use`), Keyword, nil}, + {`(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&^|!\\~]=?`, Operator, nil}, + {`[()\[\]:;,<>/?{}]`, Punctuation, nil}, + {`(?=\w)`, Name, Push("name")}, + }, + "format": { + {`\.\n`, LiteralStringInterpol, Pop(1)}, + {`[^\n]*\n`, LiteralStringInterpol, nil}, + }, + "varname": { + {`\s+`, Text, nil}, + {`\{`, Punctuation, Pop(1)}, + {`\)|,`, Punctuation, Pop(1)}, + {`\w+::`, NameNamespace, nil}, + {`[\w:]+`, NameVariable, Pop(1)}, + }, + "name": { + {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)`, NameNamespace, Pop(1)}, + {`[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::`, NameNamespace, Pop(1)}, + {`[\w:]+`, Name, Pop(1)}, + {`[A-Z_]+(?=\W)`, NameConstant, Pop(1)}, + {`(?=\W)`, Text, Pop(1)}, + }, + "funcname": { + {`[a-zA-Z_]\w*[!?]?`, NameFunction, nil}, + {`\s+`, Text, nil}, + {`(\([$@%]*\))(\s*)`, ByGroups(Punctuation, Text), nil}, + {`;`, Punctuation, Pop(1)}, + {`.*?\{`, Punctuation, Pop(1)}, + }, + "cb-string": { + {`\\[{}\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push("cb-string")}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[^{}\\]+`, LiteralStringOther, nil}, + }, + "rb-string": { + {`\\[()\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push("rb-string")}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[^()]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\[\]\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push("sb-string")}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[^\[\]]+`, LiteralStringOther, nil}, + }, + "lt-string": { + {`\\[<>\\]`, LiteralStringOther, nil}, + {`\\`, LiteralStringOther, nil}, + {`\<`, LiteralStringOther, Push("lt-string")}, + {`\>`, LiteralStringOther, Pop(1)}, + {`[^<>]+`, LiteralStringOther, nil}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pig.go new file mode 100644 index 00000000..0dbc591a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pig.go @@ -0,0 +1,57 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pig lexer. +var Pig = internal.Register(MustNewLexer( + &Config{ + Name: "Pig", + Aliases: []string{"pig"}, + Filenames: []string{"*.pig"}, + MimeTypes: []string{"text/x-pig"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`--.*`, Comment, nil}, + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'`, LiteralString, nil}, + Include("keywords"), + Include("types"), + Include("builtins"), + Include("punct"), + Include("operators"), + {`[0-9]*\.[0-9]+(e[0-9]+)?[fd]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + {`([a-z_]\w*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[()#:]`, Text, nil}, + {`[^(:#\'")\s]+`, Text, nil}, + {`\S+\s+`, Text, nil}, + }, + "keywords": { + {`(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|%declare|%default|define|dense|desc|describe|distinct|du|dump|eval|exex|explain|filter|flatten|foreach|full|generate|group|help|if|illustrate|import|inner|input|into|is|join|kill|left|limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|outer|output|parallel|pig|pwd|quit|register|returns|right|rm|rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|stream|through|union|using|void)\b`, Keyword, nil}, + }, + "builtins": { + {`(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|TOKENIZE)\b`, NameBuiltin, nil}, + }, + "types": { + {`(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|int|long|tuple)\b`, KeywordType, nil}, + }, + "punct": { + {`[;(){}\[\]]`, Punctuation, nil}, + }, + "operators": { + {`[#=,./%+\-?]`, Operator, nil}, + {`(eq|gt|lt|gte|lte|neq|matches)\b`, Operator, nil}, + {`(==|<=|<|>=|>|!=)`, Operator, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go b/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go new file mode 100644 index 00000000..0a4872e7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/pkgconfig.go @@ -0,0 +1,41 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pkgconfig lexer. +var Pkgconfig = internal.Register(MustNewLexer( + &Config{ + Name: "PkgConfig", + Aliases: []string{"pkgconfig"}, + Filenames: []string{"*.pc"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`#.*$`, CommentSingle, nil}, + {`^(\w+)(=)`, ByGroups(NameAttribute, Operator), nil}, + {`^([\w.]+)(:)`, ByGroups(NameTag, Punctuation), Push("spvalue")}, + Include("interp"), + {`[^${}#=:\n.]+`, Text, nil}, + {`.`, Text, nil}, + }, + "interp": { + {`\$\$`, Text, nil}, + {`\$\{`, LiteralStringInterpol, Push("curly")}, + }, + "curly": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`\w+`, NameAttribute, nil}, + }, + "spvalue": { + Include("interp"), + {`#.*$`, CommentSingle, Pop(1)}, + {`\n`, Text, Pop(1)}, + {`[^${}#\n]+`, Text, nil}, + {`.`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go b/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go new file mode 100644 index 00000000..50b8f35d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/plaintext.go @@ -0,0 +1,16 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var Plaintext = internal.Register(MustNewLexer( + &Config{ + Name: "plaintext", + Aliases: []string{"text", "plain", "no-highlight"}, + Filenames: []string{"*.txt"}, + MimeTypes: []string{"text/plain"}, + }, + internal.PlaintextRules, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go b/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go new file mode 100644 index 00000000..a6068fd2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/plsql.go @@ -0,0 +1,58 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Pl/Pgsql lexer. +var PLpgSQL = internal.Register(MustNewLexer( + &Config{ + Name: "PL/pgSQL", + Aliases: []string{"plpgsql"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-plpgsql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\%[a-z]\w*\b`, NameBuiltin, nil}, + {`:=`, Operator, nil}, + {`\<\<[a-z]\w*\>\>`, NameLabel, nil}, + {`\#[a-z]\w*\b`, KeywordPseudo, nil}, + {`\s+`, Text, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, + {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`, `ALIAS`, `CONSTANT`, `DIAGNOSTICS`, `ELSIF`, `EXCEPTION`, `EXIT`, `FOREACH`, `GET`, `LOOP`, `NOTICE`, `OPEN`, `PERFORM`, `QUERY`, `RAISE`, `RETURN`, `REVERSE`, `SQLSTATE`, `WHILE`), Keyword, nil}, + {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, + {`::`, Operator, nil}, + {`\$\d+`, NameVariable, nil}, + {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, + {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, + // { `(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, ?? ??, nil }, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]{},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go b/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go new file mode 100644 index 00000000..3afa54de --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/postgres.go @@ -0,0 +1,77 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Postgresql Sql Dialect lexer. +var PostgreSQL = internal.Register(MustNewLexer( + &Config{ + Name: "PostgreSQL SQL dialect", + Aliases: []string{"postgresql", "postgres"}, + Filenames: []string{}, + MimeTypes: []string{"text/x-postgresql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b`, NameBuiltin, nil}, + {`(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)('?)(\w+)?('?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)`, + UsingByGroup( + internal.Get, + 6, 12, + Keyword, Text, Keyword, Text, // DO LANGUAGE + StringSingle, StringSingle, StringSingle, Text, // 'plpgsql' + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + StringHeredoc, // (code block) + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + ), + nil, + }, + {Words(``, `\b`, `ABORT`, `ABSOLUTE`, `ACCESS`, `ACTION`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALL`, `ALSO`, `ALTER`, `ALWAYS`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARRAY`, `AS`, `ASC`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATTRIBUTE`, `AUTHORIZATION`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BIGINT`, `BINARY`, `BIT`, `BOOLEAN`, `BOTH`, `BY`, `CACHE`, `CALLED`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CHAIN`, `CHAR`, `CHARACTER`, `CHARACTERISTICS`, `CHECK`, `CHECKPOINT`, `CLASS`, `CLOSE`, `CLUSTER`, `COALESCE`, `COLLATE`, `COLLATION`, `COLUMN`, `COMMENT`, `COMMENTS`, `COMMIT`, `COMMITTED`, `CONCURRENTLY`, `CONFIGURATION`, `CONNECTION`, `CONSTRAINT`, `CONSTRAINTS`, `CONTENT`, `CONTINUE`, `CONVERSION`, `COPY`, `COST`, `CREATE`, `CROSS`, `CSV`, `CURRENT`, `CURRENT_CATALOG`, `CURRENT_DATE`, `CURRENT_ROLE`, `CURRENT_SCHEMA`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CYCLE`, `DATA`, `DATABASE`, `DAY`, `DEALLOCATE`, `DEC`, `DECIMAL`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DESC`, `DICTIONARY`, `DISABLE`, `DISCARD`, `DISTINCT`, `DO`, `DOCUMENT`, `DOMAIN`, `DOUBLE`, `DROP`, `EACH`, `ELSE`, `ENABLE`, `ENCODING`, `ENCRYPTED`, `END`, `ENUM`, `ESCAPE`, `EVENT`, `EXCEPT`, `EXCLUDE`, `EXCLUDING`, `EXCLUSIVE`, `EXECUTE`, `EXISTS`, `EXPLAIN`, `EXTENSION`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FAMILY`, `FETCH`, `FILTER`, `FIRST`, `FLOAT`, `FOLLOWING`, `FOR`, `FORCE`, `FOREIGN`, `FORWARD`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `FUNCTIONS`, `GLOBAL`, `GRANT`, `GRANTED`, `GREATEST`, `GROUP`, `HANDLER`, `HAVING`, `HEADER`, `HOLD`, `HOUR`, `IDENTITY`, `IF`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDEXES`, `INHERIT`, `INHERITS`, `INITIALLY`, `INLINE`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTEAD`, `INT`, `INTEGER`, `INTERSECT`, `INTERVAL`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `JOIN`, `KEY`, `LABEL`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LC_COLLATE`, `LC_CTYPE`, `LEADING`, `LEAKPROOF`, `LEAST`, `LEFT`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCK`, `MAPPING`, `MATCH`, `MATERIALIZED`, `MAXVALUE`, `MINUTE`, `MINVALUE`, `MODE`, `MONTH`, `MOVE`, `NAME`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NEXT`, `NO`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NOWAIT`, `NULL`, `NULLIF`, `NULLS`, `NUMERIC`, `OBJECT`, `OF`, `OFF`, `OFFSET`, `OIDS`, `ON`, `ONLY`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OVER`, `OVERLAPS`, `OVERLAY`, `OWNED`, `OWNER`, `PARSER`, `PARTIAL`, `PARTITION`, `PASSING`, `PASSWORD`, `PLACING`, `PLANS`, `POLICY`, `POSITION`, `PRECEDING`, `PRECISION`, `PREPARE`, `PREPARED`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PROGRAM`, `QUOTE`, `RANGE`, `READ`, `REAL`, `REASSIGN`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFRESH`, `REINDEX`, `RELATIVE`, `RELEASE`, `RENAME`, `REPEATABLE`, `REPLACE`, `REPLICA`, `RESET`, `RESTART`, `RESTRICT`, `RETURNING`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROW`, `ROWS`, `RULE`, `SAVEPOINT`, `SCHEMA`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SEQUENCE`, `SEQUENCES`, `SERIALIZABLE`, `SERVER`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SMALLINT`, `SNAPSHOT`, `SOME`, `STABLE`, `STANDALONE`, `START`, `STATEMENT`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRIP`, `SUBSTRING`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `TABLE`, `TABLES`, `TABLESPACE`, `TEMP`, `TEMPLATE`, `TEMPORARY`, `TEXT`, `THEN`, `TIME`, `TIMESTAMP`, `TO`, `TRAILING`, `TRANSACTION`, `TREAT`, `TRIGGER`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `TYPES`, `UNBOUNDED`, `UNCOMMITTED`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNLOGGED`, `UNTIL`, `UPDATE`, `USER`, `USING`, `VACUUM`, `VALID`, `VALIDATE`, `VALIDATOR`, `VALUE`, `VALUES`, `VARCHAR`, `VARIADIC`, `VARYING`, `VERBOSE`, `VERSION`, `VIEW`, `VIEWS`, `VOLATILE`, `WHEN`, `WHERE`, `WHITESPACE`, `WINDOW`, `WITH`, `WITHIN`, `WITHOUT`, `WORK`, `WRAPPER`, `WRITE`, `XML`, `XMLATTRIBUTES`, `XMLCONCAT`, `XMLELEMENT`, `XMLEXISTS`, `XMLFOREST`, `XMLPARSE`, `XMLPI`, `XMLROOT`, `XMLSERIALIZE`, `YEAR`, `YES`, `ZONE`), Keyword, nil}, + {"[+*/<>=~!@#%^&|`?-]+", Operator, nil}, + {`::`, Operator, nil}, + {`\$\d+`, NameVariable, nil}, + {`([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`((?:E|U&)?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("string")}, + {`((?:U&)?)(")`, ByGroups(LiteralStringAffix, LiteralStringName), Push("quoted-ident")}, + {`(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)(\s+)(LANGUAGE)?(\s+)('?)(\w+)?('?)`, + UsingByGroup(internal.Get, + 12, 4, + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + StringHeredoc, // (code block) + StringHeredoc, StringHeredoc, StringHeredoc, // $tag$ + Text, Keyword, Text, // LANGUAGE + StringSingle, StringSingle, StringSingle, // 'type' + ), + nil, + }, + {`(?s)(\$)([^$]*)(\$)(.*?)(\$)(\2)(\$)`, LiteralStringHeredoc, nil}, + {`[a-z_]\w*`, Name, nil}, + {`:(['"]?)[a-z]\w*\b\1`, NameVariable, nil}, + {`[;:()\[\]{},.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go b/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go new file mode 100644 index 00000000..0cfa3a5f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/postscript.go @@ -0,0 +1,46 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Postscript lexer. +var Postscript = internal.Register(MustNewLexer( + &Config{ + Name: "PostScript", + Aliases: []string{"postscript", "postscr"}, + Filenames: []string{"*.ps", "*.eps"}, + MimeTypes: []string{"application/postscript"}, + }, + Rules{ + "root": { + {`^%!.+\n`, CommentPreproc, nil}, + {`%%.*\n`, CommentSpecial, nil}, + {`(^%.*\n){2,}`, CommentMultiline, nil}, + {`%.*\n`, CommentSingle, nil}, + {`\(`, LiteralString, Push("stringliteral")}, + {`[{}<>\[\]]`, Punctuation, nil}, + {`<[0-9A-Fa-f]+>(?=[()<>\[\]{}/%\s])`, LiteralNumberHex, nil}, + {`[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberOct, nil}, + {`(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])`, LiteralNumberFloat, nil}, + {`(\-|\+)?[0-9]+(?=[()<>\[\]{}/%\s])`, LiteralNumberInteger, nil}, + {`\/[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameVariable, nil}, + {`[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])`, NameFunction, nil}, + {`(false|true)(?=[()<>\[\]{}/%\s])`, KeywordConstant, nil}, + {`(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)(?=[()<>\[\]{}/%\s])`, KeywordReserved, nil}, + {Words(``, `(?=[()<>\[\]{}/%\s])`, `abs`, `add`, `aload`, `arc`, `arcn`, `array`, `atan`, `begin`, `bind`, `ceiling`, `charpath`, `clip`, `closepath`, `concat`, `concatmatrix`, `copy`, `cos`, `currentlinewidth`, `currentmatrix`, `currentpoint`, `curveto`, `cvi`, `cvs`, `def`, `defaultmatrix`, `dict`, `dictstackoverflow`, `div`, `dtransform`, `dup`, `end`, `exch`, `exec`, `exit`, `exp`, `fill`, `findfont`, `floor`, `get`, `getinterval`, `grestore`, `gsave`, `gt`, `identmatrix`, `idiv`, `idtransform`, `index`, `invertmatrix`, `itransform`, `length`, `lineto`, `ln`, `load`, `log`, `loop`, `matrix`, `mod`, `moveto`, `mul`, `neg`, `newpath`, `pathforall`, `pathbbox`, `pop`, `print`, `pstack`, `put`, `quit`, `rand`, `rangecheck`, `rcurveto`, `repeat`, `restore`, `rlineto`, `rmoveto`, `roll`, `rotate`, `round`, `run`, `save`, `scale`, `scalefont`, `setdash`, `setfont`, `setgray`, `setlinecap`, `setlinejoin`, `setlinewidth`, `setmatrix`, `setrgbcolor`, `shfill`, `show`, `showpage`, `sin`, `sqrt`, `stack`, `stringwidth`, `stroke`, `strokepath`, `sub`, `syntaxerror`, `transform`, `translate`, `truncate`, `typecheck`, `undefined`, `undefinedfilename`, `undefinedresult`), NameBuiltin, nil}, + {`\s+`, Text, nil}, + }, + "stringliteral": { + {`[^()\\]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("escape")}, + {`\(`, LiteralString, Push()}, + {`\)`, LiteralString, Pop(1)}, + }, + "escape": { + {`[0-8]{3}|n|r|t|b|f|\\|\(|\)`, LiteralStringEscape, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/povray.go b/vendor/github.com/alecthomas/chroma/lexers/p/povray.go new file mode 100644 index 00000000..6f1bb524 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/povray.go @@ -0,0 +1,35 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Povray lexer. +var Povray = internal.Register(MustNewLexer( + &Config{ + Name: "POVRay", + Aliases: []string{"pov"}, + Filenames: []string{"*.pov", "*.inc"}, + MimeTypes: []string{"text/x-povray"}, + }, + Rules{ + "root": { + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + {`//.*\n`, CommentSingle, nil}, + {`(?s)"(?:\\.|[^"\\])+"`, LiteralStringDouble, nil}, + {Words(`#`, `\b`, `break`, `case`, `debug`, `declare`, `default`, `define`, `else`, `elseif`, `end`, `error`, `fclose`, `fopen`, `for`, `if`, `ifdef`, `ifndef`, `include`, `local`, `macro`, `range`, `read`, `render`, `statistics`, `switch`, `undef`, `version`, `warning`, `while`, `write`), CommentPreproc, nil}, + {Words(`\b`, `\b`, `aa_level`, `aa_threshold`, `abs`, `acos`, `acosh`, `adaptive`, `adc_bailout`, `agate`, `agate_turb`, `all`, `alpha`, `ambient`, `ambient_light`, `angle`, `aperture`, `arc_angle`, `area_light`, `asc`, `asin`, `asinh`, `assumed_gamma`, `atan`, `atan2`, `atanh`, `atmosphere`, `atmospheric_attenuation`, `attenuating`, `average`, `background`, `black_hole`, `blue`, `blur_samples`, `bounded_by`, `box_mapping`, `bozo`, `break`, `brick`, `brick_size`, `brightness`, `brilliance`, `bumps`, `bumpy1`, `bumpy2`, `bumpy3`, `bump_map`, `bump_size`, `case`, `caustics`, `ceil`, `checker`, `chr`, `clipped_by`, `clock`, `color`, `color_map`, `colour`, `colour_map`, `component`, `composite`, `concat`, `confidence`, `conic_sweep`, `constant`, `control0`, `control1`, `cos`, `cosh`, `count`, `crackle`, `crand`, `cube`, `cubic_spline`, `cylindrical_mapping`, `debug`, `declare`, `default`, `degrees`, `dents`, `diffuse`, `direction`, `distance`, `distance_maximum`, `div`, `dust`, `dust_type`, `eccentricity`, `else`, `emitting`, `end`, `error`, `error_bound`, `exp`, `exponent`, `fade_distance`, `fade_power`, `falloff`, `falloff_angle`, `false`, `file_exists`, `filter`, `finish`, `fisheye`, `flatness`, `flip`, `floor`, `focal_point`, `fog`, `fog_alt`, `fog_offset`, `fog_type`, `frequency`, `gif`, `global_settings`, `glowing`, `gradient`, `granite`, `gray_threshold`, `green`, `halo`, `hexagon`, `hf_gray_16`, `hierarchy`, `hollow`, `hypercomplex`, `if`, `ifdef`, `iff`, `image_map`, `incidence`, `include`, `int`, `interpolate`, `inverse`, `ior`, `irid`, `irid_wavelength`, `jitter`, `lambda`, `leopard`, `linear`, `linear_spline`, `linear_sweep`, `location`, `log`, `looks_like`, `look_at`, `low_error_factor`, `mandel`, `map_type`, `marble`, `material_map`, `matrix`, `max`, `max_intersections`, `max_iteration`, `max_trace_level`, `max_value`, `metallic`, `min`, `minimum_reuse`, `mod`, `mortar`, `nearest_count`, `no`, `normal`, `normal_map`, `no_shadow`, `number_of_waves`, `octaves`, `off`, `offset`, `omega`, `omnimax`, `on`, `once`, `onion`, `open`, `orthographic`, `panoramic`, `pattern1`, `pattern2`, `pattern3`, `perspective`, `pgm`, `phase`, `phong`, `phong_size`, `pi`, `pigment`, `pigment_map`, `planar_mapping`, `png`, `point_at`, `pot`, `pow`, `ppm`, `precision`, `pwr`, `quadratic_spline`, `quaternion`, `quick_color`, `quick_colour`, `quilted`, `radial`, `radians`, `radiosity`, `radius`, `rainbow`, `ramp_wave`, `rand`, `range`, `reciprocal`, `recursion_limit`, `red`, `reflection`, `refraction`, `render`, `repeat`, `rgb`, `rgbf`, `rgbft`, `rgbt`, `right`, `ripples`, `rotate`, `roughness`, `samples`, `scale`, `scallop_wave`, `scattering`, `seed`, `shadowless`, `sin`, `sine_wave`, `sinh`, `sky`, `sky_sphere`, `slice`, `slope_map`, `smooth`, `specular`, `spherical_mapping`, `spiral`, `spiral1`, `spiral2`, `spotlight`, `spotted`, `sqr`, `sqrt`, `statistics`, `str`, `strcmp`, `strength`, `strlen`, `strlwr`, `strupr`, `sturm`, `substr`, `switch`, `sys`, `t`, `tan`, `tanh`, `test_camera_1`, `test_camera_2`, `test_camera_3`, `test_camera_4`, `texture`, `texture_map`, `tga`, `thickness`, `threshold`, `tightness`, `tile2`, `tiles`, `track`, `transform`, `translate`, `transmit`, `triangle_wave`, `true`, `ttf`, `turbulence`, `turb_depth`, `type`, `ultra_wide_angle`, `up`, `use_color`, `use_colour`, `use_index`, `u_steps`, `val`, `variance`, `vaxis_rotate`, `vcross`, `vdot`, `version`, `vlength`, `vnormalize`, `volume_object`, `volume_rendered`, `vol_with_light`, `vrotate`, `v_steps`, `warning`, `warp`, `water_level`, `waves`, `while`, `width`, `wood`, `wrinkles`, `yes`), Keyword, nil}, + {Words(``, `\b`, `bicubic_patch`, `blob`, `box`, `camera`, `cone`, `cubic`, `cylinder`, `difference`, `disc`, `height_field`, `intersection`, `julia_fractal`, `lathe`, `light_source`, `merge`, `mesh`, `object`, `plane`, `poly`, `polygon`, `prism`, `quadric`, `quartic`, `smooth_triangle`, `sor`, `sphere`, `superellipsoid`, `text`, `torus`, `triangle`, `union`), NameBuiltin, nil}, + {`[\[\](){}<>;,]`, Punctuation, nil}, + {`[-+*/=]`, Operator, nil}, + {`\b(x|y|z|u|v)\b`, NameBuiltinPseudo, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`[0-9]+\.[0-9]*`, LiteralNumberFloat, nil}, + {`\.[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`\s+`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go b/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go new file mode 100644 index 00000000..cee366e8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/powershell.go @@ -0,0 +1,64 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Powershell lexer. +var Powershell = internal.Register(MustNewLexer( + &Config{ + Name: "PowerShell", + Aliases: []string{"powershell", "posh", "ps1", "psm1"}, + Filenames: []string{"*.ps1", "*.psm1"}, + MimeTypes: []string{"text/x-powershell"}, + DotAll: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\(`, Punctuation, Push("child")}, + {`\s+`, Text, nil}, + {`^(\s*#[#\s]*)(\.(?:component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis))([^\n]*$)`, ByGroups(Comment, LiteralStringDoc, Comment), nil}, + {`#[^\n]*?$`, Comment, nil}, + {`(<|<)#`, CommentMultiline, Push("multline")}, + {`@"\n`, LiteralStringHeredoc, Push("heredoc-double")}, + {`@'\n.*?\n'@`, LiteralStringHeredoc, nil}, + {"`[\\'\"$@-]", Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string")}, + {`'([^']|'')*'`, LiteralStringSingle, nil}, + {`(\$|@@|@)((global|script|private|env):)?\w+`, NameVariable, nil}, + {`(while|validateset|validaterange|validatepattern|validatelength|validatecount|until|trap|switch|return|ref|process|param|parameter|in|if|global:|function|foreach|for|finally|filter|end|elseif|else|dynamicparam|do|default|continue|cmdletbinding|break|begin|alias|\?|%|#script|#private|#local|#global|mandatory|parametersetname|position|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|helpmessage|try|catch|throw)\b`, Keyword, nil}, + {`-(and|as|band|bnot|bor|bxor|casesensitive|ccontains|ceq|cge|cgt|cle|clike|clt|cmatch|cne|cnotcontains|cnotlike|cnotmatch|contains|creplace|eq|exact|f|file|ge|gt|icontains|ieq|ige|igt|ile|ilike|ilt|imatch|ine|inotcontains|inotlike|inotmatch|ireplace|is|isnot|le|like|lt|match|ne|not|notcontains|notlike|notmatch|or|regex|replace|wildcard)\b`, Operator, nil}, + {`(write|where|wait|use|update|unregister|undo|trace|test|tee|take|suspend|stop|start|split|sort|skip|show|set|send|select|scroll|resume|restore|restart|resolve|resize|reset|rename|remove|register|receive|read|push|pop|ping|out|new|move|measure|limit|join|invoke|import|group|get|format|foreach|export|expand|exit|enter|enable|disconnect|disable|debug|cxnew|copy|convertto|convertfrom|convert|connect|complete|compare|clear|checkpoint|aggregate|add)-[a-z_]\w*\b`, NameBuiltin, nil}, + {"\\[[a-z_\\[][\\w. `,\\[\\]]*\\]", NameConstant, nil}, + {`-[a-z_]\w*`, Name, nil}, + {`\w+`, Name, nil}, + {"[.,;@{}\\[\\]$()=+*/\\\\&%!~?^`|<>-]|::", Punctuation, nil}, + }, + "child": { + {`\)`, Punctuation, Pop(1)}, + Include("root"), + }, + "multline": { + {`[^#&.]+`, CommentMultiline, nil}, + {`#(>|>)`, CommentMultiline, Pop(1)}, + {`\.(component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis)`, LiteralStringDoc, nil}, + {`[#&.]`, CommentMultiline, nil}, + }, + "string": { + {"`[0abfnrtv'\\\"$`]", LiteralStringEscape, nil}, + {"[^$`\"]+", LiteralStringDouble, nil}, + {`\$\(`, Punctuation, Push("child")}, + {`""`, LiteralStringDouble, nil}, + {"[`$]", LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "heredoc-double": { + {`\n"@`, LiteralStringHeredoc, Pop(1)}, + {`\$\(`, Punctuation, Push("child")}, + {`[^@\n]+"]`, LiteralStringHeredoc, nil}, + {`.`, LiteralStringHeredoc, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go b/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go new file mode 100644 index 00000000..b5d91606 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/prolog.go @@ -0,0 +1,51 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Prolog lexer. +var Prolog = internal.Register(MustNewLexer( + &Config{ + Name: "Prolog", + Aliases: []string{"prolog"}, + Filenames: []string{"*.ecl", "*.prolog", "*.pro", "*.pl"}, + MimeTypes: []string{"text/x-prolog"}, + }, + Rules{ + "root": { + {`^#.*`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("nested-comment")}, + {`%.*`, CommentSingle, nil}, + {`0\'.`, LiteralStringChar, nil}, + {`0b[01]+`, LiteralNumberBin, nil}, + {`0o[0-7]+`, LiteralNumberOct, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`\d\d?\'[a-zA-Z0-9]+`, LiteralNumberInteger, nil}, + {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`[\[\](){}|.,;!]`, Punctuation, nil}, + {`:-|-->`, Punctuation, nil}, + {`"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"`, LiteralStringDouble, nil}, + {`'(?:''|[^'])*'`, LiteralStringAtom, nil}, + {`is\b`, Operator, nil}, + {`(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])`, Operator, nil}, + {`(mod|div|not)\b`, Operator, nil}, + {`_`, Keyword, nil}, + {`([a-z]+)(:)`, ByGroups(NameNamespace, Punctuation), nil}, + {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(:-|-->)`, ByGroups(NameFunction, Text, Operator), nil}, + {`([a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*)(\s*)(\()`, ByGroups(NameFunction, Text, Punctuation), nil}, + {`[a-zÀ-῿぀-퟿-￯][\w$À-῿぀-퟿-￯]*`, LiteralStringAtom, nil}, + {`[#&*+\-./:<=>?@\\^~¡-¿‐-〿]+`, LiteralStringAtom, nil}, + {`[A-Z_]\w*`, NameVariable, nil}, + {`\s+|[ -‏￰-￾￯]`, Text, nil}, + }, + "nested-comment": { + {`\*/`, CommentMultiline, Pop(1)}, + {`/\*`, CommentMultiline, Push()}, + {`[^*/]+`, CommentMultiline, nil}, + {`[*/]`, CommentMultiline, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go b/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go new file mode 100644 index 00000000..30140131 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/protobuf.go @@ -0,0 +1,53 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// ProtocolBuffer lexer. +var ProtocolBuffer = internal.Register(MustNewLexer( + &Config{ + Name: "Protocol Buffer", + Aliases: []string{"protobuf", "proto"}, + Filenames: []string{"*.proto"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`[ \t]+`, Text, nil}, + {`[,;{}\[\]()<>]`, Punctuation, nil}, + {`/(\\\n)?/(\n|(.|\n)*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?\*(.|\n)*?\*(\\\n)?/`, CommentMultiline, nil}, + {Words(`\b`, `\b`, `import`, `option`, `optional`, `required`, `repeated`, `default`, `packed`, `ctype`, `extensions`, `to`, `max`, `rpc`, `returns`, `oneof`), Keyword, nil}, + {Words(``, `\b`, `int32`, `int64`, `uint32`, `uint64`, `sint32`, `sint64`, `fixed32`, `fixed64`, `sfixed32`, `sfixed64`, `float`, `double`, `bool`, `string`, `bytes`), KeywordType, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(package)(\s+)`, ByGroups(KeywordNamespace, Text), Push("package")}, + {`(message|extend)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("message")}, + {`(enum|group|service)(\s+)`, ByGroups(KeywordDeclaration, Text), Push("type")}, + {`\".*?\"`, LiteralString, nil}, + {`\'.*?\'`, LiteralString, nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`(\-?(inf|nan))\b`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+[LlUu]*`, LiteralNumberHex, nil}, + {`0[0-7]+[LlUu]*`, LiteralNumberOct, nil}, + {`\d+[LlUu]*`, LiteralNumberInteger, nil}, + {`[+-=]`, Operator, nil}, + {`([a-zA-Z_][\w.]*)([ \t]*)(=)`, ByGroups(Name, Text, Operator), nil}, + {`[a-zA-Z_][\w.]*`, Name, nil}, + }, + "package": { + {`[a-zA-Z_]\w*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "message": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "type": { + {`[a-zA-Z_]\w*`, Name, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go b/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go new file mode 100644 index 00000000..a1db0d27 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/puppet.go @@ -0,0 +1,56 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Puppet lexer. +var Puppet = internal.Register(MustNewLexer( + &Config{ + Name: "Puppet", + Aliases: []string{"puppet"}, + Filenames: []string{"*.pp"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + Include("comments"), + Include("keywords"), + Include("names"), + Include("numbers"), + Include("operators"), + Include("strings"), + {`[]{}:(),;[]`, Punctuation, nil}, + {`[^\S\n]+`, Text, nil}, + }, + "comments": { + {`\s*#.*$`, Comment, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + }, + "operators": { + {`(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)`, Operator, nil}, + {`(in|and|or|not)\b`, OperatorWord, nil}, + }, + "names": { + {`[a-zA-Z_]\w*`, NameAttribute, nil}, + {`(\$\S+)(\[)(\S+)(\])`, ByGroups(NameVariable, Punctuation, LiteralString, Punctuation), nil}, + {`\$\S+`, NameVariable, nil}, + }, + "numbers": { + {`(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?`, LiteralNumberFloat, nil}, + {`\d+[eE][+-]?[0-9]+j?`, LiteralNumberFloat, nil}, + {`0[0-7]+j?`, LiteralNumberOct, nil}, + {`0[xX][a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`\d+L`, LiteralNumberIntegerLong, nil}, + {`\d+j?`, LiteralNumberInteger, nil}, + }, + "keywords": { + {Words(`(?i)`, `\b`, `absent`, `alert`, `alias`, `audit`, `augeas`, `before`, `case`, `check`, `class`, `computer`, `configured`, `contained`, `create_resources`, `crit`, `cron`, `debug`, `default`, `define`, `defined`, `directory`, `else`, `elsif`, `emerg`, `err`, `exec`, `extlookup`, `fail`, `false`, `file`, `filebucket`, `fqdn_rand`, `generate`, `host`, `if`, `import`, `include`, `info`, `inherits`, `inline_template`, `installed`, `interface`, `k5login`, `latest`, `link`, `loglevel`, `macauthorization`, `mailalias`, `maillist`, `mcx`, `md5`, `mount`, `mounted`, `nagios_command`, `nagios_contact`, `nagios_contactgroup`, `nagios_host`, `nagios_hostdependency`, `nagios_hostescalation`, `nagios_hostextinfo`, `nagios_hostgroup`, `nagios_service`, `nagios_servicedependency`, `nagios_serviceescalation`, `nagios_serviceextinfo`, `nagios_servicegroup`, `nagios_timeperiod`, `node`, `noop`, `notice`, `notify`, `package`, `present`, `purged`, `realize`, `regsubst`, `resources`, `role`, `router`, `running`, `schedule`, `scheduled_task`, `search`, `selboolean`, `selmodule`, `service`, `sha1`, `shellquote`, `split`, `sprintf`, `ssh_authorized_key`, `sshkey`, `stage`, `stopped`, `subscribe`, `tag`, `tagged`, `template`, `tidy`, `true`, `undef`, `unmounted`, `user`, `versioncmp`, `vlan`, `warning`, `yumrepo`, `zfs`, `zone`, `zpool`), Keyword, nil}, + }, + "strings": { + {`"([^"])*"`, LiteralString, nil}, + {`'(\\'|[^'])*'`, LiteralString, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/p/python.go b/vendor/github.com/alecthomas/chroma/lexers/p/python.go new file mode 100644 index 00000000..77f2c8db --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/p/python.go @@ -0,0 +1,137 @@ +package p + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Python lexer. +var Python = internal.Register(MustNewLexer( + &Config{ + Name: "Python", + Aliases: []string{"python", "py", "sage"}, + Filenames: []string{"*.py", "*.pyw", "*.sc", "SConstruct", "SConscript", "*.tac", "*.sage"}, + MimeTypes: []string{"text/x-python", "application/x-python"}, + }, + Rules{ + "root": { + {`\n`, Text, nil}, + {`^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')`, ByGroups(Text, LiteralStringAffix, LiteralStringDoc), nil}, + {`[^\S\n]+`, Text, nil}, + {`\A#!.+$`, CommentHashbang, nil}, + {`#.*$`, CommentSingle, nil}, + {`[]{}:(),;[]`, Punctuation, nil}, + {`\\\n`, Text, nil}, + {`\\`, Text, nil}, + {`(in|is|and|or|not)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|[-~+/*%=<>&^|.]`, Operator, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, + {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + Include("builtins"), + Include("magicfuncs"), + Include("magicvars"), + Include("backtick"), + {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, `assert`, `break`, `continue`, `del`, `elif`, `else`, `except`, `exec`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `print`, `raise`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + }, + "builtins": { + {Words(`(?>|[-~+/*%=<>&^|.]`, Operator, nil}, + Include("keywords"), + {`(def)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("funcname")}, + {`(class)((?:\s|\\\s)+)`, ByGroups(Keyword, Text), Push("classname")}, + {`(from)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("fromimport")}, + {`(import)((?:\s|\\\s)+)`, ByGroups(KeywordNamespace, Text), Push("import")}, + Include("builtins"), + Include("magicfuncs"), + Include("magicvars"), + Include("backtick"), + {`([rR]|[uUbB][rR]|[rR][uUbB])(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("tdqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("tsqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Push("dqs")}, + {`([rR]|[uUbB][rR]|[rR][uUbB])(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Push("sqs")}, + {`([uUbB]?)(""")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "tdqs")}, + {`([uUbB]?)(''')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "tsqs")}, + {`([uUbB]?)(")`, ByGroups(LiteralStringAffix, LiteralStringDouble), Combined("stringescape", "dqs")}, + {`([uUbB]?)(')`, ByGroups(LiteralStringAffix, LiteralStringSingle), Combined("stringescape", "sqs")}, + Include("name"), + Include("numbers"), + }, + "keywords": { + {Words(``, `\b`, `assert`, `async`, `await`, `break`, `continue`, `del`, `elif`, `else`, `except`, `finally`, `for`, `global`, `if`, `lambda`, `pass`, `raise`, `nonlocal`, `return`, `try`, `while`, `yield`, `yield from`, `as`, `with`), Keyword, nil}, + {Words(``, `\b`, `True`, `False`, `None`), KeywordConstant, nil}, + }, + "builtins": { + {Words(`(?=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, + {`[^\\\'"%{\n]+`, LiteralStringSingle, nil}, + {`[\'"\\]`, LiteralStringSingle, nil}, + {`%|(\{{1,2})`, LiteralStringSingle, nil}, + }, + "strings-double": { + {`%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]`, LiteralStringInterpol, nil}, + {`\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\![sra])?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}`, LiteralStringInterpol, nil}, + {`[^\\\'"%{\n]+`, LiteralStringDouble, nil}, + {`[\'"\\]`, LiteralStringDouble, nil}, + {`%|(\{{1,2})`, LiteralStringDouble, nil}, + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`\\\\|\\"|\\\n`, LiteralStringEscape, nil}, + Include("strings-double"), + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`\\\\|\\'|\\\n`, LiteralStringEscape, nil}, + Include("strings-single"), + }, + "tdqs": { + {`"""`, LiteralStringDouble, Pop(1)}, + Include("strings-double"), + {`\n`, LiteralStringDouble, nil}, + }, + "tsqs": { + {`'''`, LiteralStringSingle, Pop(1)}, + Include("strings-single"), + {`\n`, LiteralStringSingle, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/q/qbasic.go b/vendor/github.com/alecthomas/chroma/lexers/q/qbasic.go new file mode 100644 index 00000000..fd47c080 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/q/qbasic.go @@ -0,0 +1,67 @@ +package q + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Qbasic lexer. +var Qbasic = internal.Register(MustNewLexer( + &Config{ + Name: "QBasic", + Aliases: []string{"qbasic", "basic"}, + Filenames: []string{"*.BAS", "*.bas"}, + MimeTypes: []string{"text/basic"}, + }, + Rules{ + "root": { + {`\n+`, Text, nil}, + {`\s+`, TextWhitespace, nil}, + {`^(\s*)(\d*)(\s*)(REM .*)$`, ByGroups(TextWhitespace, NameLabel, TextWhitespace, CommentSingle), nil}, + {`^(\s*)(\d+)(\s*)`, ByGroups(TextWhitespace, NameLabel, TextWhitespace), nil}, + {`(?=[\s]*)(\w+)(?=[\s]*=)`, NameVariableGlobal, nil}, + {`(?=[^"]*)\'.*$`, CommentSingle, nil}, + {`"[^\n"]*"`, LiteralStringDouble, nil}, + {`(END)(\s+)(FUNCTION|IF|SELECT|SUB)`, ByGroups(KeywordReserved, TextWhitespace, KeywordReserved), nil}, + {`(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)`, ByGroups(KeywordDeclaration, TextWhitespace, NameVariable, TextWhitespace, Name), nil}, + {`(DIM)(\s+)(SHARED)(\s+)([^\s(]+)`, ByGroups(KeywordDeclaration, TextWhitespace, NameVariable, TextWhitespace, NameVariableGlobal), nil}, + {`(DIM)(\s+)([^\s(]+)`, ByGroups(KeywordDeclaration, TextWhitespace, NameVariableGlobal), nil}, + {`^(\s*)([a-zA-Z_]+)(\s*)(\=)`, ByGroups(TextWhitespace, NameVariableGlobal, TextWhitespace, Operator), nil}, + {`(GOTO|GOSUB)(\s+)(\w+\:?)`, ByGroups(KeywordReserved, TextWhitespace, NameLabel), nil}, + {`(SUB)(\s+)(\w+\:?)`, ByGroups(KeywordReserved, TextWhitespace, NameLabel), nil}, + Include("declarations"), + Include("functions"), + Include("metacommands"), + Include("operators"), + Include("statements"), + Include("keywords"), + {`[a-zA-Z_]\w*[$@#&!]`, NameVariableGlobal, nil}, + {`[a-zA-Z_]\w*\:`, NameLabel, nil}, + {`\-?\d*\.\d+[@|#]?`, LiteralNumberFloat, nil}, + {`\-?\d+[@|#]`, LiteralNumberFloat, nil}, + {`\-?\d+#?`, LiteralNumberIntegerLong, nil}, + {`\-?\d+#?`, LiteralNumberInteger, nil}, + {`!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]`, Operator, nil}, + {`[\[\]{}(),;]`, Punctuation, nil}, + {`[\w]+`, NameVariableGlobal, nil}, + }, + "declarations": { + {`\b(DATA|LET)(?=\(|\b)`, KeywordDeclaration, nil}, + }, + "functions": { + {`\b(ABS|ASC|ATN|CDBL|CHR\$|CINT|CLNG|COMMAND\$|COS|CSNG|CSRLIN|CVD|CVDMBF|CVI|CVL|CVS|CVSMBF|DATE\$|ENVIRON\$|EOF|ERDEV|ERDEV\$|ERL|ERR|EXP|FILEATTR|FIX|FRE|FREEFILE|HEX\$|INKEY\$|INP|INPUT\$|INSTR|INT|IOCTL\$|LBOUND|LCASE\$|LEFT\$|LEN|LOC|LOF|LOG|LPOS|LTRIM\$|MID\$|MKD\$|MKDMBF\$|MKI\$|MKL\$|MKS\$|MKSMBF\$|OCT\$|PEEK|PEN|PLAY|PMAP|POINT|POS|RIGHT\$|RND|RTRIM\$|SADD|SCREEN|SEEK|SETMEM|SGN|SIN|SPACE\$|SPC|SQR|STICK|STR\$|STRIG|STRING\$|TAB|TAN|TIME\$|TIMER|UBOUND|UCASE\$|VAL|VARPTR|VARPTR\$|VARSEG)(?=\(|\b)`, KeywordReserved, nil}, + }, + "metacommands": { + {`\b(\$DYNAMIC|\$INCLUDE|\$STATIC)(?=\(|\b)`, KeywordConstant, nil}, + }, + "operators": { + {`\b(AND|EQV|IMP|NOT|OR|XOR)(?=\(|\b)`, OperatorWord, nil}, + }, + "statements": { + {`\b(BEEP|BLOAD|BSAVE|CALL|CALL\ ABSOLUTE|CALL\ INTERRUPT|CALLS|CHAIN|CHDIR|CIRCLE|CLEAR|CLOSE|CLS|COLOR|COM|COMMON|CONST|DATA|DATE\$|DECLARE|DEF\ FN|DEF\ SEG|DEFDBL|DEFINT|DEFLNG|DEFSNG|DEFSTR|DEF|DIM|DO|LOOP|DRAW|END|ENVIRON|ERASE|ERROR|EXIT|FIELD|FILES|FOR|NEXT|FUNCTION|GET|GOSUB|GOTO|IF|THEN|INPUT|INPUT\ \#|IOCTL|KEY|KEY|KILL|LET|LINE|LINE\ INPUT|LINE\ INPUT\ \#|LOCATE|LOCK|UNLOCK|LPRINT|LSET|MID\$|MKDIR|NAME|ON\ COM|ON\ ERROR|ON\ KEY|ON\ PEN|ON\ PLAY|ON\ STRIG|ON\ TIMER|ON\ UEVENT|ON|OPEN|OPEN\ COM|OPTION\ BASE|OUT|PAINT|PALETTE|PCOPY|PEN|PLAY|POKE|PRESET|PRINT|PRINT\ \#|PRINT\ USING|PSET|PUT|PUT|RANDOMIZE|READ|REDIM|REM|RESET|RESTORE|RESUME|RETURN|RMDIR|RSET|RUN|SCREEN|SEEK|SELECT\ CASE|SHARED|SHELL|SLEEP|SOUND|STATIC|STOP|STRIG|SUB|SWAP|SYSTEM|TIME\$|TIMER|TROFF|TRON|TYPE|UEVENT|UNLOCK|VIEW|WAIT|WHILE|WEND|WIDTH|WINDOW|WRITE)\b`, KeywordReserved, nil}, + }, + "keywords": { + {`\b(ACCESS|ALIAS|ANY|APPEND|AS|BASE|BINARY|BYVAL|CASE|CDECL|DOUBLE|ELSE|ELSEIF|ENDIF|INTEGER|IS|LIST|LOCAL|LONG|LOOP|MOD|NEXT|OFF|ON|OUTPUT|RANDOM|SIGNAL|SINGLE|STEP|STRING|THEN|TO|UNTIL|USING|WEND)\b`, Keyword, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/r.go b/vendor/github.com/alecthomas/chroma/lexers/r/r.go new file mode 100644 index 00000000..b2e205ed --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/r.go @@ -0,0 +1,70 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// R/S lexer. +var R = internal.Register(MustNewLexer( + &Config{ + Name: "R", + Aliases: []string{"splus", "s", "r"}, + Filenames: []string{"*.S", "*.R", "*.r", ".Rhistory", ".Rprofile", ".Renviron"}, + MimeTypes: []string{"text/S-plus", "text/S", "text/x-r-source", "text/x-r", "text/x-R", "text/x-r-history", "text/x-r-profile"}, + }, + Rules{ + "comments": { + {`#.*$`, CommentSingle, nil}, + }, + "valid_name": { + {`[a-zA-Z][\w.]*`, Text, nil}, + {`\.[a-zA-Z_][\w.]*`, Text, nil}, + }, + "punctuation": { + {`\[{1,2}|\]{1,2}|\(|\)|;|,`, Punctuation, nil}, + }, + "keywords": { + {Words(``, `(?![\w. =])`, `Arg`, `Conj`, `Cstack_info`, `Encoding`, `FALSE`, `Filter`, `Find`, `I`, `ISOdate`, `ISOdatetime`, `Im`, `Inf`, `La.svd`, `Map`, `Math.Date`, `Math.POSIXt`, `Math.data.frame`, `Math.difftime`, `Math.factor`, `Mod`, `NA_character_`, `NA_complex_`, `NA_real_`, `NCOL`, `NROW`, `NULLNA_integer_`, `NaN`, `Negate`, `NextMethod`, `Ops.Date`, `Ops.POSIXt`, `Ops.data.frame`, `Ops.difftime`, `Ops.factor`, `Ops.numeric_version`, `Ops.ordered`, `Position`, `R.Version`, `R.home`, `R.version`, `R.version.string`, `RNGkind`, `RNGversion`, `R_system_version`, `Re`, `Recall`, `Reduce`, `Summary.Date`, `Summary.POSIXct`, `Summary.POSIXlt`, `Summary.data.frame`, `Summary.difftime`, `Summary.factor`, `Summary.numeric_version`, `Summary.ordered`, `Sys.Date`, `Sys.chmod`, `Sys.getenv`, `Sys.getlocale`, `Sys.getpid`, `Sys.glob`, `Sys.info`, `Sys.localeconv`, `Sys.readlink`, `Sys.setFileTime`, `Sys.setenv`, `Sys.setlocale`, `Sys.sleep`, `Sys.time`, `Sys.timezone`, `Sys.umask`, `Sys.unsetenv`, `Sys.which`, `TRUE`, `UseMethod`, `Vectorize`, `abbreviate`, `abs`, `acos`, `acosh`, `addNA`, `addTaskCallback`, `agrep`, `alist`, `all`, `all.equal`, `all.equal.POSIXct`, `all.equal.character`, `all.equal.default`, `all.equal.factor`, `all.equal.formula`, `all.equal.language`, `all.equal.list`, `all.equal.numeric`, `all.equal.raw`, `all.names`, `all.vars`, `any`, `anyDuplicated`, `anyDuplicated.array`, `anyDuplicated.data.frame`, `anyDuplicated.default`, `anyDuplicated.matrix`, `aperm`, `aperm.default`, `aperm.table`, `append`, `apply`, `args`, `arrayInd`, `as.Date`, `as.Date.POSIXct`, `as.Date.POSIXlt`, `as.Date.character`, `as.Date.date`, `as.Date.dates`, `as.Date.default`, `as.Date.factor`, `as.Date.numeric`, `as.POSIXct`, `as.POSIXct.Date`, `as.POSIXct.POSIXlt`, `as.POSIXct.date`, `as.POSIXct.dates`, `as.POSIXct.default`, `as.POSIXct.numeric`, `as.POSIXlt`, `as.POSIXlt.Date`, `as.POSIXlt.POSIXct`, `as.POSIXlt.character`, `as.POSIXlt.date`, `as.POSIXlt.dates`, `as.POSIXlt.default`, `as.POSIXlt.factor`, `as.POSIXlt.numeric`, `as.array`, `as.array.default`, `as.call`, `as.character`, `as.character.Date`, `as.character.POSIXt`, `as.character.condition`, `as.character.default`, `as.character.error`, `as.character.factor`, `as.character.hexmode`, `as.character.numeric_version`, `as.character.octmode`, `as.character.srcref`, `as.complex`, `as.data.frame`, `as.data.frame.AsIs`, `as.data.frame.Date`, `as.data.frame.POSIXct`, `as.data.frame.POSIXlt`, `as.data.frame.array`, `as.data.frame.character`, `as.data.frame.complex`, `as.data.frame.data.frame`, `as.data.frame.default`, `as.data.frame.difftime`, `as.data.frame.factor`, `as.data.frame.integer`, `as.data.frame.list`, `as.data.frame.logical`, `as.data.frame.matrix`, `as.data.frame.model.matrix`, `as.data.frame.numeric`, `as.data.frame.numeric_version`, `as.data.frame.ordered`, `as.data.frame.raw`, `as.data.frame.table`, `as.data.frame.ts`, `as.data.frame.vector`, `as.difftime`, `as.double`, `as.double.POSIXlt`, `as.double.difftime`, `as.environment`, `as.expression`, `as.expression.default`, `as.factor`, `as.function`, `as.function.default`, `as.hexmode`, `as.integer`, `as.list`, `as.list.Date`, `as.list.POSIXct`, `as.list.data.frame`, `as.list.default`, `as.list.environment`, `as.list.factor`, `as.list.function`, `as.list.numeric_version`, `as.logical`, `as.logical.factor`, `as.matrix`, `as.matrix.POSIXlt`, `as.matrix.data.frame`, `as.matrix.default`, `as.matrix.noquote`, `as.name`, `as.null`, `as.null.default`, `as.numeric`, `as.numeric_version`, `as.octmode`, `as.ordered`, `as.package_version`, `as.pairlist`, `as.qr`, `as.raw`, `as.single`, `as.single.default`, `as.symbol`, `as.table`, `as.table.default`, `as.vector`, `as.vector.factor`, `asNamespace`, `asS3`, `asS4`, `asin`, `asinh`, `assign`, `atan`, `atan2`, `atanh`, `attachNamespace`, `attr`, `attr.all.equal`, `attributes`, `autoload`, `autoloader`, `backsolve`, `baseenv`, `basename`, `besselI`, `besselJ`, `besselK`, `besselY`, `beta`, `bindingIsActive`, `bindingIsLocked`, `bindtextdomain`, `bitwAnd`, `bitwNot`, `bitwOr`, `bitwShiftL`, `bitwShiftR`, `bitwXor`, `body`, `bquote`, `browser`, `browserCondition`, `browserSetDebug`, `browserText`, `builtins`, `by`, `by.data.frame`, `by.default`, `bzfile`, `c.Date`, `c.POSIXct`, `c.POSIXlt`, `c.noquote`, `c.numeric_version`, `call`, `callCC`, `capabilities`, `casefold`, `cat`, `category`, `cbind`, `cbind.data.frame`, `ceiling`, `char.expand`, `charToRaw`, `charmatch`, `chartr`, `check_tzones`, `chol`, `chol.default`, `chol2inv`, `choose`, `class`, `clearPushBack`, `close`, `close.connection`, `close.srcfile`, `close.srcfilealias`, `closeAllConnections`, `col`, `colMeans`, `colSums`, `colnames`, `commandArgs`, `comment`, `computeRestarts`, `conditionCall`, `conditionCall.condition`, `conditionMessage`, `conditionMessage.condition`, `conflicts`, `contributors`, `cos`, `cosh`, `crossprod`, `cummax`, `cummin`, `cumprod`, `cumsum`, `cut`, `cut.Date`, `cut.POSIXt`, `cut.default`, `dQuote`, `data.class`, `data.matrix`, `date`, `debug`, `debugonce`, `default.stringsAsFactors`, `delayedAssign`, `deparse`, `det`, `determinant`, `determinant.matrix`, `dget`, `diag`, `diff`, `diff.Date`, `diff.POSIXt`, `diff.default`, `difftime`, `digamma`, `dim`, `dim.data.frame`, `dimnames`, `dimnames.data.frame`, `dir`, `dir.create`, `dirname`, `do.call`, `dput`, `drop`, `droplevels`, `droplevels.data.frame`, `droplevels.factor`, `dump`, `duplicated`, `duplicated.POSIXlt`, `duplicated.array`, `duplicated.data.frame`, `duplicated.default`, `duplicated.matrix`, `duplicated.numeric_version`, `dyn.load`, `dyn.unload`, `eapply`, `eigen`, `else`, `emptyenv`, `enc2native`, `enc2utf8`, `encodeString`, `enquote`, `env.profile`, `environment`, `environmentIsLocked`, `environmentName`, `eval`, `eval.parent`, `evalq`, `exists`, `exp`, `expand.grid`, `expm1`, `expression`, `factor`, `factorial`, `fifo`, `file`, `file.access`, `file.append`, `file.choose`, `file.copy`, `file.create`, `file.exists`, `file.info`, `file.link`, `file.path`, `file.remove`, `file.rename`, `file.show`, `file.symlink`, `find.package`, `findInterval`, `findPackageEnv`, `findRestart`, `floor`, `flush`, `flush.connection`, `force`, `formals`, `format`, `format.AsIs`, `format.Date`, `format.POSIXct`, `format.POSIXlt`, `format.data.frame`, `format.default`, `format.difftime`, `format.factor`, `format.hexmode`, `format.info`, `format.libraryIQR`, `format.numeric_version`, `format.octmode`, `format.packageInfo`, `format.pval`, `format.summaryDefault`, `formatC`, `formatDL`, `forwardsolve`, `gamma`, `gc`, `gc.time`, `gcinfo`, `gctorture`, `gctorture2`, `get`, `getAllConnections`, `getCallingDLL`, `getCallingDLLe`, `getConnection`, `getDLLRegisteredRoutines`, `getDLLRegisteredRoutines.DLLInfo`, `getDLLRegisteredRoutines.character`, `getElement`, `getExportedValue`, `getHook`, `getLoadedDLLs`, `getNamespace`, `getNamespaceExports`, `getNamespaceImports`, `getNamespaceInfo`, `getNamespaceName`, `getNamespaceUsers`, `getNamespaceVersion`, `getNativeSymbolInfo`, `getOption`, `getRversion`, `getSrcLines`, `getTaskCallbackNames`, `geterrmessage`, `gettext`, `gettextf`, `getwd`, `gl`, `globalenv`, `gregexpr`, `grep`, `grepRaw`, `grepl`, `gsub`, `gzcon`, `gzfile`, `head`, `iconv`, `iconvlist`, `icuSetCollate`, `identical`, `identity`, `ifelse`, `importIntoEnv`, `in`, `inherits`, `intToBits`, `intToUtf8`, `interaction`, `interactive`, `intersect`, `inverse.rle`, `invisible`, `invokeRestart`, `invokeRestartInteractively`, `is.R`, `is.array`, `is.atomic`, `is.call`, `is.character`, `is.complex`, `is.data.frame`, `is.double`, `is.element`, `is.environment`, `is.expression`, `is.factor`, `is.finite`, `is.function`, `is.infinite`, `is.integer`, `is.language`, `is.list`, `is.loaded`, `is.logical`, `is.matrix`, `is.na`, `is.na.POSIXlt`, `is.na.data.frame`, `is.na.numeric_version`, `is.name`, `is.nan`, `is.null`, `is.numeric`, `is.numeric.Date`, `is.numeric.POSIXt`, `is.numeric.difftime`, `is.numeric_version`, `is.object`, `is.ordered`, `is.package_version`, `is.pairlist`, `is.primitive`, `is.qr`, `is.raw`, `is.recursive`, `is.single`, `is.symbol`, `is.table`, `is.unsorted`, `is.vector`, `isBaseNamespace`, `isIncomplete`, `isNamespace`, `isOpen`, `isRestart`, `isS4`, `isSeekable`, `isSymmetric`, `isSymmetric.matrix`, `isTRUE`, `isatty`, `isdebugged`, `jitter`, `julian`, `julian.Date`, `julian.POSIXt`, `kappa`, `kappa.default`, `kappa.lm`, `kappa.qr`, `kronecker`, `l10n_info`, `labels`, `labels.default`, `lapply`, `lazyLoad`, `lazyLoadDBexec`, `lazyLoadDBfetch`, `lbeta`, `lchoose`, `length`, `length.POSIXlt`, `letters`, `levels`, `levels.default`, `lfactorial`, `lgamma`, `library.dynam`, `library.dynam.unload`, `licence`, `license`, `list.dirs`, `list.files`, `list2env`, `load`, `loadNamespace`, `loadedNamespaces`, `loadingNamespaceInfo`, `local`, `lockBinding`, `lockEnvironment`, `log`, `log10`, `log1p`, `log2`, `logb`, `lower.tri`, `ls`, `make.names`, `make.unique`, `makeActiveBinding`, `mapply`, `margin.table`, `mat.or.vec`, `match`, `match.arg`, `match.call`, `match.fun`, `max`, `max.col`, `mean`, `mean.Date`, `mean.POSIXct`, `mean.POSIXlt`, `mean.default`, `mean.difftime`, `mem.limits`, `memCompress`, `memDecompress`, `memory.profile`, `merge`, `merge.data.frame`, `merge.default`, `message`, `mget`, `min`, `missing`, `mode`, `month.abb`, `month.name`, `months`, `months.Date`, `months.POSIXt`, `months.abb`, `months.nameletters`, `names`, `names.POSIXlt`, `namespaceExport`, `namespaceImport`, `namespaceImportClasses`, `namespaceImportFrom`, `namespaceImportMethods`, `nargs`, `nchar`, `ncol`, `new.env`, `ngettext`, `nlevels`, `noquote`, `norm`, `normalizePath`, `nrow`, `numeric_version`, `nzchar`, `objects`, `oldClass`, `on.exit`, `open`, `open.connection`, `open.srcfile`, `open.srcfilealias`, `open.srcfilecopy`, `options`, `order`, `ordered`, `outer`, `packBits`, `packageEvent`, `packageHasNamespace`, `packageStartupMessage`, `package_version`, `pairlist`, `parent.env`, `parent.frame`, `parse`, `parseNamespaceFile`, `paste`, `paste0`, `path.expand`, `path.package`, `pipe`, `pmatch`, `pmax`, `pmax.int`, `pmin`, `pmin.int`, `polyroot`, `pos.to.env`, `pretty`, `pretty.default`, `prettyNum`, `print`, `print.AsIs`, `print.DLLInfo`, `print.DLLInfoList`, `print.DLLRegisteredRoutines`, `print.Date`, `print.NativeRoutineList`, `print.POSIXct`, `print.POSIXlt`, `print.by`, `print.condition`, `print.connection`, `print.data.frame`, `print.default`, `print.difftime`, `print.factor`, `print.function`, `print.hexmode`, `print.libraryIQR`, `print.listof`, `print.noquote`, `print.numeric_version`, `print.octmode`, `print.packageInfo`, `print.proc_time`, `print.restart`, `print.rle`, `print.simple.list`, `print.srcfile`, `print.srcref`, `print.summary.table`, `print.summaryDefault`, `print.table`, `print.warnings`, `prmatrix`, `proc.time`, `prod`, `prop.table`, `provideDimnames`, `psigamma`, `pushBack`, `pushBackLength`, `q`, `qr`, `qr.Q`, `qr.R`, `qr.X`, `qr.coef`, `qr.default`, `qr.fitted`, `qr.qty`, `qr.qy`, `qr.resid`, `qr.solve`, `quarters`, `quarters.Date`, `quarters.POSIXt`, `quit`, `quote`, `range`, `range.default`, `rank`, `rapply`, `raw`, `rawConnection`, `rawConnectionValue`, `rawShift`, `rawToBits`, `rawToChar`, `rbind`, `rbind.data.frame`, `rcond`, `read.dcf`, `readBin`, `readChar`, `readLines`, `readRDS`, `readRenviron`, `readline`, `reg.finalizer`, `regexec`, `regexpr`, `registerS3method`, `registerS3methods`, `regmatches`, `remove`, `removeTaskCallback`, `rep`, `rep.Date`, `rep.POSIXct`, `rep.POSIXlt`, `rep.factor`, `rep.int`, `rep.numeric_version`, `rep_len`, `replace`, `replicate`, `requireNamespace`, `restartDescription`, `restartFormals`, `retracemem`, `rev`, `rev.default`, `rle`, `rm`, `round`, `round.Date`, `round.POSIXt`, `row`, `row.names`, `row.names.data.frame`, `row.names.default`, `rowMeans`, `rowSums`, `rownames`, `rowsum`, `rowsum.data.frame`, `rowsum.default`, `sQuote`, `sample`, `sample.int`, `sapply`, `save`, `save.image`, `saveRDS`, `scale`, `scale.default`, `scan`, `search`, `searchpaths`, `seek`, `seek.connection`, `seq`, `seq.Date`, `seq.POSIXt`, `seq.default`, `seq.int`, `seq_along`, `seq_len`, `sequence`, `serialize`, `set.seed`, `setHook`, `setNamespaceInfo`, `setSessionTimeLimit`, `setTimeLimit`, `setdiff`, `setequal`, `setwd`, `shQuote`, `showConnections`, `sign`, `signalCondition`, `signif`, `simpleCondition`, `simpleError`, `simpleMessage`, `simpleWarning`, `simplify2array`, `sin`, `single`, `sinh`, `sink`, `sink.number`, `slice.index`, `socketConnection`, `socketSelect`, `solve`, `solve.default`, `solve.qr`, `sort`, `sort.POSIXlt`, `sort.default`, `sort.int`, `sort.list`, `split`, `split.Date`, `split.POSIXct`, `split.data.frame`, `split.default`, `sprintf`, `sqrt`, `srcfile`, `srcfilealias`, `srcfilecopy`, `srcref`, `standardGeneric`, `stderr`, `stdin`, `stdout`, `stop`, `stopifnot`, `storage.mode`, `strftime`, `strptime`, `strsplit`, `strtoi`, `strtrim`, `structure`, `strwrap`, `sub`, `subset`, `subset.data.frame`, `subset.default`, `subset.matrix`, `substitute`, `substr`, `substring`, `sum`, `summary`, `summary.Date`, `summary.POSIXct`, `summary.POSIXlt`, `summary.connection`, `summary.data.frame`, `summary.default`, `summary.factor`, `summary.matrix`, `summary.proc_time`, `summary.srcfile`, `summary.srcref`, `summary.table`, `suppressMessages`, `suppressPackageStartupMessages`, `suppressWarnings`, `svd`, `sweep`, `sys.call`, `sys.calls`, `sys.frame`, `sys.frames`, `sys.function`, `sys.load.image`, `sys.nframe`, `sys.on.exit`, `sys.parent`, `sys.parents`, `sys.save.image`, `sys.source`, `sys.status`, `system`, `system.file`, `system.time`, `system2`, `t`, `t.data.frame`, `t.default`, `table`, `tabulate`, `tail`, `tan`, `tanh`, `tapply`, `taskCallbackManager`, `tcrossprod`, `tempdir`, `tempfile`, `testPlatformEquivalence`, `textConnection`, `textConnectionValue`, `toString`, `toString.default`, `tolower`, `topenv`, `toupper`, `trace`, `traceback`, `tracemem`, `tracingState`, `transform`, `transform.data.frame`, `transform.default`, `trigamma`, `trunc`, `trunc.Date`, `trunc.POSIXt`, `truncate`, `truncate.connection`, `try`, `tryCatch`, `typeof`, `unclass`, `undebug`, `union`, `unique`, `unique.POSIXlt`, `unique.array`, `unique.data.frame`, `unique.default`, `unique.matrix`, `unique.numeric_version`, `units`, `units.difftime`, `unix.time`, `unlink`, `unlist`, `unloadNamespace`, `unlockBinding`, `unname`, `unserialize`, `unsplit`, `untrace`, `untracemem`, `unz`, `upper.tri`, `url`, `utf8ToInt`, `vapply`, `version`, `warning`, `warnings`, `weekdays`, `weekdays.Date`, `weekdays.POSIXt`, `which`, `which.max`, `which.min`, `with`, `with.default`, `withCallingHandlers`, `withRestarts`, `withVisible`, `within`, `within.data.frame`, `within.list`, `write`, `write.dcf`, `writeBin`, `writeChar`, `writeLines`, `xor`, `xor.hexmode`, `xor.octmode`, `xpdrows.data.frame`, `xtfrm`, `xtfrm.AsIs`, `xtfrm.Date`, `xtfrm.POSIXct`, `xtfrm.POSIXlt`, `xtfrm.Surv`, `xtfrm.default`, `xtfrm.difftime`, `xtfrm.factor`, `xtfrm.numeric_version`, `xzfile`, `zapsmall`), KeywordPseudo, nil}, + {`(if|else|for|while|repeat|in|next|break|return|switch|function)(?![\w.])`, KeywordReserved, nil}, + {`(array|category|character|complex|double|function|integer|list|logical|matrix|numeric|vector|data.frame|c)(?![\w.])`, KeywordType, nil}, + {`(library|require|attach|detach|source)(?![\w.])`, KeywordNamespace, nil}, + }, + "operators": { + {`<>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?`, Operator, nil}, + {`\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}`, Operator, nil}, + }, + "builtin_symbols": { + {`(NULL|NA(_(integer|real|complex|character)_)?|letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))(?![\w.])`, KeywordConstant, nil}, + {`(T|F)\b`, NameBuiltinPseudo, nil}, + }, + "numbers": { + {`0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?`, LiteralNumberHex, nil}, + {`[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?`, LiteralNumber, nil}, + }, + "statements": { + Include("comments"), + {`\s+`, Text, nil}, + {"`.*?`", LiteralStringBacktick, nil}, + {`\'`, LiteralString, Push("string_squote")}, + {`\"`, LiteralString, Push("string_dquote")}, + Include("builtin_symbols"), + Include("numbers"), + Include("keywords"), + Include("punctuation"), + Include("operators"), + Include("valid_name"), + }, + "root": { + Include("statements"), + {`\{|\}`, Punctuation, nil}, + {`.`, Text, nil}, + }, + "string_squote": { + {`([^\'\\]|\\.)*\'`, LiteralString, Pop(1)}, + }, + "string_dquote": { + {`([^"\\]|\\.)*"`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/racket.go b/vendor/github.com/alecthomas/chroma/lexers/r/racket.go new file mode 100644 index 00000000..cf27ea10 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/racket.go @@ -0,0 +1,102 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Racket lexer. +var Racket = internal.Register(MustNewLexer( + &Config{ + Name: "Racket", + Aliases: []string{"racket", "rkt"}, + Filenames: []string{"*.rkt", "*.rktd", "*.rktl"}, + MimeTypes: []string{"text/x-racket", "application/x-racket"}, + }, + Rules{ + "root": { + {`[)\]}]`, Error, nil}, + {`(?!\Z)`, Text, Push("unquoted-datum")}, + }, + "datum": { + {`(?s)#;|#![ /]([^\\\n]|\\.)*`, Comment, nil}, + {`;[^\n\r…

]*`, CommentSingle, nil}, + {`#\|`, CommentMultiline, Push("block-comment")}, + {`\s+`, Text, nil}, + {"(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?\\d+(?=[()[\\]{}\",\\'`;\\s])", LiteralNumberInteger, Pop(1)}, + {"(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?(\\d+(\\.\\d*)?|\\.\\d+)([deflst][-+]?\\d+)?(?=[()[\\]{}\",\\'`;\\s])", LiteralNumberFloat, Pop(1)}, + {"(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?((?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)(?:[defls][-+]?\\d+)?)([-+](?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)(?:[defls][-+]?\\d+)?)?i)?|[-+](?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)(?:[defls][-+]?\\d+)?)?i)(?=[()[\\]{}\",\\'`;\\s])", LiteralNumber, Pop(1)}, + {"(?i)(#d)?((?:[-+]?(?:(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))(?:[defls][-+]?\\d+)?)|[-+](?:(?:inf|nan)\\.[0f]))([-+](?:(?:(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))(?:[defls][-+]?\\d+)?)|(?:(?:inf|nan)\\.[0f]))?i)?|[-+](?:(?:(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))(?:[defls][-+]?\\d+)?)|(?:(?:inf|nan)\\.[0f]))?i|(?:[-+]?(?:(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))(?:[defls][-+]?\\d+)?)|[-+](?:(?:inf|nan)\\.[0f]))@(?:[-+]?(?:(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))(?:[defls][-+]?\\d+)?)|[-+](?:(?:inf|nan)\\.[0f])))(?=[()[\\]{}\",\\'`;\\s])", LiteralNumberFloat, Pop(1)}, + {"(?i)(([-+]?(?:(?:\\d+(?:/\\d+|\\.\\d*)?|\\.\\d+)|(?:\\d+#+(?:\\.#*|/\\d+#*)?|\\.\\d+#+|\\d+(?:\\.\\d*#+|/\\d+#+)))t[-+]?\\d+)|[-+](inf|nan)\\.t)(?=[()[\\]{}\",\\'`;\\s])", LiteralNumberFloat, Pop(1)}, + {"(?i)(#[ei])?#b(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", LiteralNumberBin, Pop(1)}, + {"(?i)(#[ei])?#o(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", LiteralNumberOct, Pop(1)}, + {"(?i)(#[ei])?#x(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", LiteralNumberHex, Pop(1)}, + {"(?i)(#d)?#i(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", LiteralNumberFloat, Pop(1)}, + {`#?"`, LiteralStringDouble, Push("#pop", "string")}, + {`#<<(.+)\n(^(?!\1$).*$\n)*^\1$`, LiteralStringHeredoc, Pop(1)}, + {`#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})`, LiteralStringChar, Pop(1)}, + {`(?is)#\\([0-7]{3}|[a-z]+|.)`, LiteralStringChar, Pop(1)}, + {`(?s)#[pr]x#?"(\\?.)*?"`, LiteralStringRegex, Pop(1)}, + {`#(true|false|[tTfF])`, NameConstant, Pop(1)}, + {"#:(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", KeywordDeclaration, Pop(1)}, + {`(#lang |#!)(\S+)`, ByGroups(KeywordNamespace, NameNamespace), nil}, + {`#reader`, KeywordNamespace, Push("quoted-datum")}, + {"(?i)\\.(?=[()[\\]{}\",\\'`;\\s])|#c[is]|#['`]|#,@?", Operator, nil}, + {`'|#[s&]|#hash(eqv?)?|#\d*(?=[([{])`, Operator, Push("#pop", "quoted-datum")}, + }, + "datum*": { + {"`|,@?", Operator, nil}, + {"(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", LiteralStringSymbol, Pop(1)}, + {`[|\\]`, Error, nil}, + Default(Pop(1)), + }, + "list": { + {`[)\]}]`, Punctuation, Pop(1)}, + }, + "unquoted-datum": { + Include("datum"), + {"quote(?=[()[\\]{}\",\\'`;\\s])", Keyword, Push("#pop", "quoted-datum")}, + {"`", Operator, Push("#pop", "quasiquoted-datum")}, + {"quasiquote(?=[()[\\]{}\",\\'`;\\s])", Keyword, Push("#pop", "quasiquoted-datum")}, + {`[([{]`, Punctuation, Push("#pop", "unquoted-list")}, + {Words(``, "(?=[()[\\]{}\",\\'`;\\s])", `#%app`, `#%datum`, `#%declare`, `#%expression`, `#%module-begin`, `#%plain-app`, `#%plain-lambda`, `#%plain-module-begin`, `#%printing-module-begin`, `#%provide`, `#%require`, `#%stratified-body`, `#%top`, `#%top-interaction`, `#%variable-reference`, `->`, `->*`, `->*m`, `->d`, `->dm`, `->i`, `->m`, `...`, `:do-in`, `==`, `=>`, `_`, `absent`, `abstract`, `all-defined-out`, `all-from-out`, `and`, `any`, `augment`, `augment*`, `augment-final`, `augment-final*`, `augride`, `augride*`, `begin`, `begin-for-syntax`, `begin0`, `case`, `case->`, `case->m`, `case-lambda`, `class`, `class*`, `class-field-accessor`, `class-field-mutator`, `class/c`, `class/derived`, `combine-in`, `combine-out`, `command-line`, `compound-unit`, `compound-unit/infer`, `cond`, `cons/dc`, `contract`, `contract-out`, `contract-struct`, `contracted`, `define`, `define-compound-unit`, `define-compound-unit/infer`, `define-contract-struct`, `define-custom-hash-types`, `define-custom-set-types`, `define-for-syntax`, `define-local-member-name`, `define-logger`, `define-match-expander`, `define-member-name`, `define-module-boundary-contract`, `define-namespace-anchor`, `define-opt/c`, `define-sequence-syntax`, `define-serializable-class`, `define-serializable-class*`, `define-signature`, `define-signature-form`, `define-struct`, `define-struct/contract`, `define-struct/derived`, `define-syntax`, `define-syntax-rule`, `define-syntaxes`, `define-unit`, `define-unit-binding`, `define-unit-from-context`, `define-unit/contract`, `define-unit/new-import-export`, `define-unit/s`, `define-values`, `define-values-for-export`, `define-values-for-syntax`, `define-values/invoke-unit`, `define-values/invoke-unit/infer`, `define/augment`, `define/augment-final`, `define/augride`, `define/contract`, `define/final-prop`, `define/match`, `define/overment`, `define/override`, `define/override-final`, `define/private`, `define/public`, `define/public-final`, `define/pubment`, `define/subexpression-pos-prop`, `define/subexpression-pos-prop/name`, `delay`, `delay/idle`, `delay/name`, `delay/strict`, `delay/sync`, `delay/thread`, `do`, `else`, `except`, `except-in`, `except-out`, `export`, `extends`, `failure-cont`, `false`, `false/c`, `field`, `field-bound?`, `file`, `flat-murec-contract`, `flat-rec-contract`, `for`, `for*`, `for*/and`, `for*/async`, `for*/first`, `for*/fold`, `for*/fold/derived`, `for*/hash`, `for*/hasheq`, `for*/hasheqv`, `for*/last`, `for*/list`, `for*/lists`, `for*/mutable-set`, `for*/mutable-seteq`, `for*/mutable-seteqv`, `for*/or`, `for*/product`, `for*/set`, `for*/seteq`, `for*/seteqv`, `for*/stream`, `for*/sum`, `for*/vector`, `for*/weak-set`, `for*/weak-seteq`, `for*/weak-seteqv`, `for-label`, `for-meta`, `for-syntax`, `for-template`, `for/and`, `for/async`, `for/first`, `for/fold`, `for/fold/derived`, `for/hash`, `for/hasheq`, `for/hasheqv`, `for/last`, `for/list`, `for/lists`, `for/mutable-set`, `for/mutable-seteq`, `for/mutable-seteqv`, `for/or`, `for/product`, `for/set`, `for/seteq`, `for/seteqv`, `for/stream`, `for/sum`, `for/vector`, `for/weak-set`, `for/weak-seteq`, `for/weak-seteqv`, `gen:custom-write`, `gen:dict`, `gen:equal+hash`, `gen:set`, `gen:stream`, `generic`, `get-field`, `hash/dc`, `if`, `implies`, `import`, `include`, `include-at/relative-to`, `include-at/relative-to/reader`, `include/reader`, `inherit`, `inherit-field`, `inherit/inner`, `inherit/super`, `init`, `init-depend`, `init-field`, `init-rest`, `inner`, `inspect`, `instantiate`, `interface`, `interface*`, `invariant-assertion`, `invoke-unit`, `invoke-unit/infer`, `lambda`, `lazy`, `let`, `let*`, `let*-values`, `let-syntax`, `let-syntaxes`, `let-values`, `let/cc`, `let/ec`, `letrec`, `letrec-syntax`, `letrec-syntaxes`, `letrec-syntaxes+values`, `letrec-values`, `lib`, `link`, `local`, `local-require`, `log-debug`, `log-error`, `log-fatal`, `log-info`, `log-warning`, `match`, `match*`, `match*/derived`, `match-define`, `match-define-values`, `match-lambda`, `match-lambda*`, `match-lambda**`, `match-let`, `match-let*`, `match-let*-values`, `match-let-values`, `match-letrec`, `match-letrec-values`, `match/derived`, `match/values`, `member-name-key`, `mixin`, `module`, `module*`, `module+`, `nand`, `new`, `nor`, `object-contract`, `object/c`, `only`, `only-in`, `only-meta-in`, `open`, `opt/c`, `or`, `overment`, `overment*`, `override`, `override*`, `override-final`, `override-final*`, `parameterize`, `parameterize*`, `parameterize-break`, `parametric->/c`, `place`, `place*`, `place/context`, `planet`, `prefix`, `prefix-in`, `prefix-out`, `private`, `private*`, `prompt-tag/c`, `protect-out`, `provide`, `provide-signature-elements`, `provide/contract`, `public`, `public*`, `public-final`, `public-final*`, `pubment`, `pubment*`, `quasiquote`, `quasisyntax`, `quasisyntax/loc`, `quote`, `quote-syntax`, `quote-syntax/prune`, `recontract-out`, `recursive-contract`, `relative-in`, `rename`, `rename-in`, `rename-inner`, `rename-out`, `rename-super`, `require`, `send`, `send*`, `send+`, `send-generic`, `send/apply`, `send/keyword-apply`, `set!`, `set!-values`, `set-field!`, `shared`, `stream`, `stream*`, `stream-cons`, `struct`, `struct*`, `struct-copy`, `struct-field-index`, `struct-out`, `struct/c`, `struct/ctc`, `struct/dc`, `submod`, `super`, `super-instantiate`, `super-make-object`, `super-new`, `syntax`, `syntax-case`, `syntax-case*`, `syntax-id-rules`, `syntax-rules`, `syntax/loc`, `tag`, `this`, `this%`, `thunk`, `thunk*`, `time`, `unconstrained-domain->`, `unit`, `unit-from-context`, `unit/c`, `unit/new-import-export`, `unit/s`, `unless`, `unquote`, `unquote-splicing`, `unsyntax`, `unsyntax-splicing`, `values/drop`, `when`, `with-continuation-mark`, `with-contract`, `with-contract-continuation-mark`, `with-handlers`, `with-handlers*`, `with-method`, `with-syntax`, `λ`), Keyword, Pop(1)}, + {Words(``, "(?=[()[\\]{}\",\\'`;\\s])", `*`, `*list/c`, `+`, `-`, `/`, `<`, ``, `>/c`, `>=`, `>=/c`, `abort-current-continuation`, `abs`, `absolute-path?`, `acos`, `add-between`, `add1`, `alarm-evt`, `always-evt`, `and/c`, `andmap`, `angle`, `any/c`, `append`, `append*`, `append-map`, `apply`, `argmax`, `argmin`, `arithmetic-shift`, `arity-at-least`, `arity-at-least-value`, `arity-at-least?`, `arity-checking-wrapper`, `arity-includes?`, `arity=?`, `arrow-contract-info`, `arrow-contract-info-accepts-arglist`, `arrow-contract-info-chaperone-procedure`, `arrow-contract-info-check-first-order`, `arrow-contract-info?`, `asin`, `assf`, `assoc`, `assq`, `assv`, `atan`, `bad-number-of-results`, `banner`, `base->-doms/c`, `base->-rngs/c`, `base->?`, `between/c`, `bitwise-and`, `bitwise-bit-field`, `bitwise-bit-set?`, `bitwise-ior`, `bitwise-not`, `bitwise-xor`, `blame-add-car-context`, `blame-add-cdr-context`, `blame-add-context`, `blame-add-missing-party`, `blame-add-nth-arg-context`, `blame-add-range-context`, `blame-add-unknown-context`, `blame-context`, `blame-contract`, `blame-fmt->-string`, `blame-missing-party?`, `blame-negative`, `blame-original?`, `blame-positive`, `blame-replace-negative`, `blame-source`, `blame-swap`, `blame-swapped?`, `blame-update`, `blame-value`, `blame?`, `boolean=?`, `boolean?`, `bound-identifier=?`, `box`, `box-cas!`, `box-immutable`, `box-immutable/c`, `box/c`, `box?`, `break-enabled`, `break-parameterization?`, `break-thread`, `build-chaperone-contract-property`, `build-compound-type-name`, `build-contract-property`, `build-flat-contract-property`, `build-list`, `build-path`, `build-path/convention-type`, `build-string`, `build-vector`, `byte-pregexp`, `byte-pregexp?`, `byte-ready?`, `byte-regexp`, `byte-regexp?`, `byte?`, `bytes`, `bytes->immutable-bytes`, `bytes->list`, `bytes->path`, `bytes->path-element`, `bytes->string/latin-1`, `bytes->string/locale`, `bytes->string/utf-8`, `bytes-append`, `bytes-append*`, `bytes-close-converter`, `bytes-convert`, `bytes-convert-end`, `bytes-converter?`, `bytes-copy`, `bytes-copy!`, `bytes-environment-variable-name?`, `bytes-fill!`, `bytes-join`, `bytes-length`, `bytes-no-nuls?`, `bytes-open-converter`, `bytes-ref`, `bytes-set!`, `bytes-utf-8-index`, `bytes-utf-8-length`, `bytes-utf-8-ref`, `bytes?`, `bytes?`, `caaaar`, `caaadr`, `caaar`, `caadar`, `caaddr`, `caadr`, `caar`, `cadaar`, `cadadr`, `cadar`, `caddar`, `cadddr`, `caddr`, `cadr`, `call-in-nested-thread`, `call-with-atomic-output-file`, `call-with-break-parameterization`, `call-with-composable-continuation`, `call-with-continuation-barrier`, `call-with-continuation-prompt`, `call-with-current-continuation`, `call-with-default-reading-parameterization`, `call-with-escape-continuation`, `call-with-exception-handler`, `call-with-file-lock/timeout`, `call-with-immediate-continuation-mark`, `call-with-input-bytes`, `call-with-input-file`, `call-with-input-file*`, `call-with-input-string`, `call-with-output-bytes`, `call-with-output-file`, `call-with-output-file*`, `call-with-output-string`, `call-with-parameterization`, `call-with-semaphore`, `call-with-semaphore/enable-break`, `call-with-values`, `call/cc`, `call/ec`, `car`, `cartesian-product`, `cdaaar`, `cdaadr`, `cdaar`, `cdadar`, `cdaddr`, `cdadr`, `cdar`, `cddaar`, `cddadr`, `cddar`, `cdddar`, `cddddr`, `cdddr`, `cddr`, `cdr`, `ceiling`, `channel-get`, `channel-put`, `channel-put-evt`, `channel-put-evt?`, `channel-try-get`, `channel/c`, `channel?`, `chaperone-box`, `chaperone-channel`, `chaperone-continuation-mark-key`, `chaperone-contract-property?`, `chaperone-contract?`, `chaperone-evt`, `chaperone-hash`, `chaperone-hash-set`, `chaperone-of?`, `chaperone-procedure`, `chaperone-procedure*`, `chaperone-prompt-tag`, `chaperone-struct`, `chaperone-struct-type`, `chaperone-vector`, `chaperone?`, `char->integer`, `char-alphabetic?`, `char-blank?`, `char-ci<=?`, `char-ci=?`, `char-ci>?`, `char-downcase`, `char-foldcase`, `char-general-category`, `char-graphic?`, `char-in`, `char-in/c`, `char-iso-control?`, `char-lower-case?`, `char-numeric?`, `char-punctuation?`, `char-ready?`, `char-symbolic?`, `char-title-case?`, `char-titlecase`, `char-upcase`, `char-upper-case?`, `char-utf-8-length`, `char-whitespace?`, `char<=?`, `char=?`, `char>?`, `char?`, `check-duplicate-identifier`, `check-duplicates`, `checked-procedure-check-and-extract`, `choice-evt`, `class->interface`, `class-info`, `class-seal`, `class-unseal`, `class?`, `cleanse-path`, `close-input-port`, `close-output-port`, `coerce-chaperone-contract`, `coerce-chaperone-contracts`, `coerce-contract`, `coerce-contract/f`, `coerce-contracts`, `coerce-flat-contract`, `coerce-flat-contracts`, `collect-garbage`, `collection-file-path`, `collection-path`, `combinations`, `compile`, `compile-allow-set!-undefined`, `compile-context-preservation-enabled`, `compile-enforce-module-constants`, `compile-syntax`, `compiled-expression-recompile`, `compiled-expression?`, `compiled-module-expression?`, `complete-path?`, `complex?`, `compose`, `compose1`, `conjoin`, `conjugate`, `cons`, `cons/c`, `cons?`, `const`, `continuation-mark-key/c`, `continuation-mark-key?`, `continuation-mark-set->context`, `continuation-mark-set->list`, `continuation-mark-set->list*`, `continuation-mark-set-first`, `continuation-mark-set?`, `continuation-marks`, `continuation-prompt-available?`, `continuation-prompt-tag?`, `continuation?`, `contract-continuation-mark-key`, `contract-custom-write-property-proc`, `contract-exercise`, `contract-first-order`, `contract-first-order-passes?`, `contract-late-neg-projection`, `contract-name`, `contract-proc`, `contract-projection`, `contract-property?`, `contract-random-generate`, `contract-random-generate-fail`, `contract-random-generate-fail?`, `contract-random-generate-get-current-environment`, `contract-random-generate-stash`, `contract-random-generate/choose`, `contract-stronger?`, `contract-struct-exercise`, `contract-struct-generate`, `contract-struct-late-neg-projection`, `contract-struct-list-contract?`, `contract-val-first-projection`, `contract?`, `convert-stream`, `copy-directory/files`, `copy-file`, `copy-port`, `cos`, `cosh`, `count`, `current-blame-format`, `current-break-parameterization`, `current-code-inspector`, `current-command-line-arguments`, `current-compile`, `current-compiled-file-roots`, `current-continuation-marks`, `current-contract-region`, `current-custodian`, `current-directory`, `current-directory-for-user`, `current-drive`, `current-environment-variables`, `current-error-port`, `current-eval`, `current-evt-pseudo-random-generator`, `current-force-delete-permissions`, `current-future`, `current-gc-milliseconds`, `current-get-interaction-input-port`, `current-inexact-milliseconds`, `current-input-port`, `current-inspector`, `current-library-collection-links`, `current-library-collection-paths`, `current-load`, `current-load-extension`, `current-load-relative-directory`, `current-load/use-compiled`, `current-locale`, `current-logger`, `current-memory-use`, `current-milliseconds`, `current-module-declare-name`, `current-module-declare-source`, `current-module-name-resolver`, `current-module-path-for-load`, `current-namespace`, `current-output-port`, `current-parameterization`, `current-plumber`, `current-preserved-thread-cell-values`, `current-print`, `current-process-milliseconds`, `current-prompt-read`, `current-pseudo-random-generator`, `current-read-interaction`, `current-reader-guard`, `current-readtable`, `current-seconds`, `current-security-guard`, `current-subprocess-custodian-mode`, `current-thread`, `current-thread-group`, `current-thread-initial-stack-size`, `current-write-relative-directory`, `curry`, `curryr`, `custodian-box-value`, `custodian-box?`, `custodian-limit-memory`, `custodian-managed-list`, `custodian-memory-accounting-available?`, `custodian-require-memory`, `custodian-shutdown-all`, `custodian?`, `custom-print-quotable-accessor`, `custom-print-quotable?`, `custom-write-accessor`, `custom-write-property-proc`, `custom-write?`, `date`, `date*`, `date*-nanosecond`, `date*-time-zone-name`, `date*?`, `date-day`, `date-dst?`, `date-hour`, `date-minute`, `date-month`, `date-second`, `date-time-zone-offset`, `date-week-day`, `date-year`, `date-year-day`, `date?`, `datum->syntax`, `datum-intern-literal`, `default-continuation-prompt-tag`, `degrees->radians`, `delete-directory`, `delete-directory/files`, `delete-file`, `denominator`, `dict->list`, `dict-can-functional-set?`, `dict-can-remove-keys?`, `dict-clear`, `dict-clear!`, `dict-copy`, `dict-count`, `dict-empty?`, `dict-for-each`, `dict-has-key?`, `dict-implements/c`, `dict-implements?`, `dict-iter-contract`, `dict-iterate-first`, `dict-iterate-key`, `dict-iterate-next`, `dict-iterate-value`, `dict-key-contract`, `dict-keys`, `dict-map`, `dict-mutable?`, `dict-ref`, `dict-ref!`, `dict-remove`, `dict-remove!`, `dict-set`, `dict-set!`, `dict-set*`, `dict-set*!`, `dict-update`, `dict-update!`, `dict-value-contract`, `dict-values`, `dict?`, `directory-exists?`, `directory-list`, `disjoin`, `display`, `display-lines`, `display-lines-to-file`, `display-to-file`, `displayln`, `double-flonum?`, `drop`, `drop-common-prefix`, `drop-right`, `dropf`, `dropf-right`, `dump-memory-stats`, `dup-input-port`, `dup-output-port`, `dynamic->*`, `dynamic-get-field`, `dynamic-object/c`, `dynamic-place`, `dynamic-place*`, `dynamic-require`, `dynamic-require-for-syntax`, `dynamic-send`, `dynamic-set-field!`, `dynamic-wind`, `eighth`, `empty`, `empty-sequence`, `empty-stream`, `empty?`, `environment-variables-copy`, `environment-variables-names`, `environment-variables-ref`, `environment-variables-set!`, `environment-variables?`, `eof`, `eof-evt`, `eof-object?`, `ephemeron-value`, `ephemeron?`, `eprintf`, `eq-contract-val`, `eq-contract?`, `eq-hash-code`, `eq?`, `equal-contract-val`, `equal-contract?`, `equal-hash-code`, `equal-secondary-hash-code`, `equal<%>`, `equal?`, `equal?/recur`, `eqv-hash-code`, `eqv?`, `error`, `error-display-handler`, `error-escape-handler`, `error-print-context-length`, `error-print-source-location`, `error-print-width`, `error-value->string-handler`, `eval`, `eval-jit-enabled`, `eval-syntax`, `even?`, `evt/c`, `evt?`, `exact->inexact`, `exact-ceiling`, `exact-floor`, `exact-integer?`, `exact-nonnegative-integer?`, `exact-positive-integer?`, `exact-round`, `exact-truncate`, `exact?`, `executable-yield-handler`, `exit`, `exit-handler`, `exn`, `exn-continuation-marks`, `exn-message`, `exn:break`, `exn:break-continuation`, `exn:break:hang-up`, `exn:break:hang-up?`, `exn:break:terminate`, `exn:break:terminate?`, `exn:break?`, `exn:fail`, `exn:fail:contract`, `exn:fail:contract:arity`, `exn:fail:contract:arity?`, `exn:fail:contract:blame`, `exn:fail:contract:blame-object`, `exn:fail:contract:blame?`, `exn:fail:contract:continuation`, `exn:fail:contract:continuation?`, `exn:fail:contract:divide-by-zero`, `exn:fail:contract:divide-by-zero?`, `exn:fail:contract:non-fixnum-result`, `exn:fail:contract:non-fixnum-result?`, `exn:fail:contract:variable`, `exn:fail:contract:variable-id`, `exn:fail:contract:variable?`, `exn:fail:contract?`, `exn:fail:filesystem`, `exn:fail:filesystem:errno`, `exn:fail:filesystem:errno-errno`, `exn:fail:filesystem:errno?`, `exn:fail:filesystem:exists`, `exn:fail:filesystem:exists?`, `exn:fail:filesystem:missing-module`, `exn:fail:filesystem:missing-module-path`, `exn:fail:filesystem:missing-module?`, `exn:fail:filesystem:version`, `exn:fail:filesystem:version?`, `exn:fail:filesystem?`, `exn:fail:network`, `exn:fail:network:errno`, `exn:fail:network:errno-errno`, `exn:fail:network:errno?`, `exn:fail:network?`, `exn:fail:object`, `exn:fail:object?`, `exn:fail:out-of-memory`, `exn:fail:out-of-memory?`, `exn:fail:read`, `exn:fail:read-srclocs`, `exn:fail:read:eof`, `exn:fail:read:eof?`, `exn:fail:read:non-char`, `exn:fail:read:non-char?`, `exn:fail:read?`, `exn:fail:syntax`, `exn:fail:syntax-exprs`, `exn:fail:syntax:missing-module`, `exn:fail:syntax:missing-module-path`, `exn:fail:syntax:missing-module?`, `exn:fail:syntax:unbound`, `exn:fail:syntax:unbound?`, `exn:fail:syntax?`, `exn:fail:unsupported`, `exn:fail:unsupported?`, `exn:fail:user`, `exn:fail:user?`, `exn:fail?`, `exn:misc:match?`, `exn:missing-module-accessor`, `exn:missing-module?`, `exn:srclocs-accessor`, `exn:srclocs?`, `exn?`, `exp`, `expand`, `expand-once`, `expand-syntax`, `expand-syntax-once`, `expand-syntax-to-top-form`, `expand-to-top-form`, `expand-user-path`, `explode-path`, `expt`, `externalizable<%>`, `failure-result/c`, `false?`, `field-names`, `fifth`, `file->bytes`, `file->bytes-lines`, `file->lines`, `file->list`, `file->string`, `file->value`, `file-exists?`, `file-name-from-path`, `file-or-directory-identity`, `file-or-directory-modify-seconds`, `file-or-directory-permissions`, `file-position`, `file-position*`, `file-size`, `file-stream-buffer-mode`, `file-stream-port?`, `file-truncate`, `filename-extension`, `filesystem-change-evt`, `filesystem-change-evt-cancel`, `filesystem-change-evt?`, `filesystem-root-list`, `filter`, `filter-map`, `filter-not`, `filter-read-input-port`, `find-executable-path`, `find-files`, `find-library-collection-links`, `find-library-collection-paths`, `find-relative-path`, `find-system-path`, `findf`, `first`, `first-or/c`, `fixnum?`, `flat-contract`, `flat-contract-predicate`, `flat-contract-property?`, `flat-contract?`, `flat-named-contract`, `flatten`, `floating-point-bytes->real`, `flonum?`, `floor`, `flush-output`, `fold-files`, `foldl`, `foldr`, `for-each`, `force`, `format`, `fourth`, `fprintf`, `free-identifier=?`, `free-label-identifier=?`, `free-template-identifier=?`, `free-transformer-identifier=?`, `fsemaphore-count`, `fsemaphore-post`, `fsemaphore-try-wait?`, `fsemaphore-wait`, `fsemaphore?`, `future`, `future?`, `futures-enabled?`, `gcd`, `generate-member-key`, `generate-temporaries`, `generic-set?`, `generic?`, `gensym`, `get-output-bytes`, `get-output-string`, `get-preference`, `get/build-late-neg-projection`, `get/build-val-first-projection`, `getenv`, `global-port-print-handler`, `group-by`, `group-execute-bit`, `group-read-bit`, `group-write-bit`, `guard-evt`, `handle-evt`, `handle-evt?`, `has-blame?`, `has-contract?`, `hash`, `hash->list`, `hash-clear`, `hash-clear!`, `hash-copy`, `hash-copy-clear`, `hash-count`, `hash-empty?`, `hash-eq?`, `hash-equal?`, `hash-eqv?`, `hash-for-each`, `hash-has-key?`, `hash-iterate-first`, `hash-iterate-key`, `hash-iterate-key+value`, `hash-iterate-next`, `hash-iterate-pair`, `hash-iterate-value`, `hash-keys`, `hash-map`, `hash-placeholder?`, `hash-ref`, `hash-ref!`, `hash-remove`, `hash-remove!`, `hash-set`, `hash-set!`, `hash-set*`, `hash-set*!`, `hash-update`, `hash-update!`, `hash-values`, `hash-weak?`, `hash/c`, `hash?`, `hasheq`, `hasheqv`, `identifier-binding`, `identifier-binding-symbol`, `identifier-label-binding`, `identifier-prune-lexical-context`, `identifier-prune-to-source-module`, `identifier-remove-from-definition-context`, `identifier-template-binding`, `identifier-transformer-binding`, `identifier?`, `identity`, `if/c`, `imag-part`, `immutable?`, `impersonate-box`, `impersonate-channel`, `impersonate-continuation-mark-key`, `impersonate-hash`, `impersonate-hash-set`, `impersonate-procedure`, `impersonate-procedure*`, `impersonate-prompt-tag`, `impersonate-struct`, `impersonate-vector`, `impersonator-contract?`, `impersonator-ephemeron`, `impersonator-of?`, `impersonator-prop:application-mark`, `impersonator-prop:blame`, `impersonator-prop:contracted`, `impersonator-property-accessor-procedure?`, `impersonator-property?`, `impersonator?`, `implementation?`, `implementation?/c`, `in-bytes`, `in-bytes-lines`, `in-combinations`, `in-cycle`, `in-dict`, `in-dict-keys`, `in-dict-pairs`, `in-dict-values`, `in-directory`, `in-hash`, `in-hash-keys`, `in-hash-pairs`, `in-hash-values`, `in-immutable-hash`, `in-immutable-hash-keys`, `in-immutable-hash-pairs`, `in-immutable-hash-values`, `in-immutable-set`, `in-indexed`, `in-input-port-bytes`, `in-input-port-chars`, `in-lines`, `in-list`, `in-mlist`, `in-mutable-hash`, `in-mutable-hash-keys`, `in-mutable-hash-pairs`, `in-mutable-hash-values`, `in-mutable-set`, `in-naturals`, `in-parallel`, `in-permutations`, `in-port`, `in-producer`, `in-range`, `in-sequences`, `in-set`, `in-slice`, `in-stream`, `in-string`, `in-syntax`, `in-value`, `in-values*-sequence`, `in-values-sequence`, `in-vector`, `in-weak-hash`, `in-weak-hash-keys`, `in-weak-hash-pairs`, `in-weak-hash-values`, `in-weak-set`, `inexact->exact`, `inexact-real?`, `inexact?`, `infinite?`, `input-port-append`, `input-port?`, `inspector?`, `instanceof/c`, `integer->char`, `integer->integer-bytes`, `integer-bytes->integer`, `integer-in`, `integer-length`, `integer-sqrt`, `integer-sqrt/remainder`, `integer?`, `interface->method-names`, `interface-extension?`, `interface?`, `internal-definition-context-binding-identifiers`, `internal-definition-context-introduce`, `internal-definition-context-seal`, `internal-definition-context?`, `is-a?`, `is-a?/c`, `keyword->string`, `keyword-apply`, `keywordbytes`, `list->mutable-set`, `list->mutable-seteq`, `list->mutable-seteqv`, `list->set`, `list->seteq`, `list->seteqv`, `list->string`, `list->vector`, `list->weak-set`, `list->weak-seteq`, `list->weak-seteqv`, `list-contract?`, `list-prefix?`, `list-ref`, `list-set`, `list-tail`, `list-update`, `list/c`, `list?`, `listen-port-number?`, `listof`, `load`, `load-extension`, `load-on-demand-enabled`, `load-relative`, `load-relative-extension`, `load/cd`, `load/use-compiled`, `local-expand`, `local-expand/capture-lifts`, `local-transformer-expand`, `local-transformer-expand/capture-lifts`, `locale-string-encoding`, `log`, `log-all-levels`, `log-level-evt`, `log-level?`, `log-max-level`, `log-message`, `log-receiver?`, `logger-name`, `logger?`, `magnitude`, `make-arity-at-least`, `make-base-empty-namespace`, `make-base-namespace`, `make-bytes`, `make-channel`, `make-chaperone-contract`, `make-continuation-mark-key`, `make-continuation-prompt-tag`, `make-contract`, `make-custodian`, `make-custodian-box`, `make-custom-hash`, `make-custom-hash-types`, `make-custom-set`, `make-custom-set-types`, `make-date`, `make-date*`, `make-derived-parameter`, `make-directory`, `make-directory*`, `make-do-sequence`, `make-empty-namespace`, `make-environment-variables`, `make-ephemeron`, `make-exn`, `make-exn:break`, `make-exn:break:hang-up`, `make-exn:break:terminate`, `make-exn:fail`, `make-exn:fail:contract`, `make-exn:fail:contract:arity`, `make-exn:fail:contract:blame`, `make-exn:fail:contract:continuation`, `make-exn:fail:contract:divide-by-zero`, `make-exn:fail:contract:non-fixnum-result`, `make-exn:fail:contract:variable`, `make-exn:fail:filesystem`, `make-exn:fail:filesystem:errno`, `make-exn:fail:filesystem:exists`, `make-exn:fail:filesystem:missing-module`, `make-exn:fail:filesystem:version`, `make-exn:fail:network`, `make-exn:fail:network:errno`, `make-exn:fail:object`, `make-exn:fail:out-of-memory`, `make-exn:fail:read`, `make-exn:fail:read:eof`, `make-exn:fail:read:non-char`, `make-exn:fail:syntax`, `make-exn:fail:syntax:missing-module`, `make-exn:fail:syntax:unbound`, `make-exn:fail:unsupported`, `make-exn:fail:user`, `make-file-or-directory-link`, `make-flat-contract`, `make-fsemaphore`, `make-generic`, `make-handle-get-preference-locked`, `make-hash`, `make-hash-placeholder`, `make-hasheq`, `make-hasheq-placeholder`, `make-hasheqv`, `make-hasheqv-placeholder`, `make-immutable-custom-hash`, `make-immutable-hash`, `make-immutable-hasheq`, `make-immutable-hasheqv`, `make-impersonator-property`, `make-input-port`, `make-input-port/read-to-peek`, `make-inspector`, `make-keyword-procedure`, `make-known-char-range-list`, `make-limited-input-port`, `make-list`, `make-lock-file-name`, `make-log-receiver`, `make-logger`, `make-mixin-contract`, `make-mutable-custom-set`, `make-none/c`, `make-object`, `make-output-port`, `make-parameter`, `make-parent-directory*`, `make-phantom-bytes`, `make-pipe`, `make-pipe-with-specials`, `make-placeholder`, `make-plumber`, `make-polar`, `make-prefab-struct`, `make-primitive-class`, `make-proj-contract`, `make-pseudo-random-generator`, `make-reader-graph`, `make-readtable`, `make-rectangular`, `make-rename-transformer`, `make-resolved-module-path`, `make-security-guard`, `make-semaphore`, `make-set!-transformer`, `make-shared-bytes`, `make-sibling-inspector`, `make-special-comment`, `make-srcloc`, `make-string`, `make-struct-field-accessor`, `make-struct-field-mutator`, `make-struct-type`, `make-struct-type-property`, `make-syntax-delta-introducer`, `make-syntax-introducer`, `make-temporary-file`, `make-tentative-pretty-print-output-port`, `make-thread-cell`, `make-thread-group`, `make-vector`, `make-weak-box`, `make-weak-custom-hash`, `make-weak-custom-set`, `make-weak-hash`, `make-weak-hasheq`, `make-weak-hasheqv`, `make-will-executor`, `map`, `match-equality-test`, `matches-arity-exactly?`, `max`, `mcar`, `mcdr`, `mcons`, `member`, `member-name-key-hash-code`, `member-name-key=?`, `member-name-key?`, `memf`, `memq`, `memv`, `merge-input`, `method-in-interface?`, `min`, `mixin-contract`, `module->exports`, `module->imports`, `module->language-info`, `module->namespace`, `module-compiled-cross-phase-persistent?`, `module-compiled-exports`, `module-compiled-imports`, `module-compiled-language-info`, `module-compiled-name`, `module-compiled-submodules`, `module-declared?`, `module-path-index-join`, `module-path-index-resolve`, `module-path-index-split`, `module-path-index-submodule`, `module-path-index?`, `module-path?`, `module-predefined?`, `module-provide-protected?`, `modulo`, `mpair?`, `mutable-set`, `mutable-seteq`, `mutable-seteqv`, `n->th`, `nack-guard-evt`, `namespace-anchor->empty-namespace`, `namespace-anchor->namespace`, `namespace-anchor?`, `namespace-attach-module`, `namespace-attach-module-declaration`, `namespace-base-phase`, `namespace-mapped-symbols`, `namespace-module-identifier`, `namespace-module-registry`, `namespace-require`, `namespace-require/constant`, `namespace-require/copy`, `namespace-require/expansion-time`, `namespace-set-variable-value!`, `namespace-symbol->identifier`, `namespace-syntax-introduce`, `namespace-undefine-variable!`, `namespace-unprotect-module`, `namespace-variable-value`, `namespace?`, `nan?`, `natural-number/c`, `negate`, `negative?`, `never-evt`, `new-∀/c`, `new-∃/c`, `newline`, `ninth`, `non-empty-listof`, `non-empty-string?`, `none/c`, `normal-case-path`, `normalize-arity`, `normalize-path`, `normalized-arity?`, `not`, `not/c`, `null`, `null?`, `number->string`, `number?`, `numerator`, `object%`, `object->vector`, `object-info`, `object-interface`, `object-method-arity-includes?`, `object-name`, `object-or-false=?`, `object=?`, `object?`, `odd?`, `one-of/c`, `open-input-bytes`, `open-input-file`, `open-input-output-file`, `open-input-string`, `open-output-bytes`, `open-output-file`, `open-output-nowhere`, `open-output-string`, `or/c`, `order-of-magnitude`, `ormap`, `other-execute-bit`, `other-read-bit`, `other-write-bit`, `output-port?`, `pair?`, `parameter-procedure=?`, `parameter/c`, `parameter?`, `parameterization?`, `parse-command-line`, `partition`, `path->bytes`, `path->complete-path`, `path->directory-path`, `path->string`, `path-add-suffix`, `path-convention-type`, `path-element->bytes`, `path-element->string`, `path-element?`, `path-for-some-system?`, `path-list-string->path-list`, `path-only`, `path-replace-suffix`, `path-string?`, `pathbytes`, `port->bytes-lines`, `port->lines`, `port->list`, `port->string`, `port-closed-evt`, `port-closed?`, `port-commit-peeked`, `port-count-lines!`, `port-count-lines-enabled`, `port-counts-lines?`, `port-display-handler`, `port-file-identity`, `port-file-unlock`, `port-next-location`, `port-number?`, `port-print-handler`, `port-progress-evt`, `port-provides-progress-evts?`, `port-read-handler`, `port-try-file-lock?`, `port-write-handler`, `port-writes-atomic?`, `port-writes-special?`, `port?`, `positive?`, `predicate/c`, `prefab-key->struct-type`, `prefab-key?`, `prefab-struct-key`, `preferences-lock-file-mode`, `pregexp`, `pregexp?`, `pretty-display`, `pretty-format`, `pretty-print`, `pretty-print-.-symbol-without-bars`, `pretty-print-abbreviate-read-macros`, `pretty-print-columns`, `pretty-print-current-style-table`, `pretty-print-depth`, `pretty-print-exact-as-decimal`, `pretty-print-extend-style-table`, `pretty-print-handler`, `pretty-print-newline`, `pretty-print-post-print-hook`, `pretty-print-pre-print-hook`, `pretty-print-print-hook`, `pretty-print-print-line`, `pretty-print-remap-stylable`, `pretty-print-show-inexactness`, `pretty-print-size-hook`, `pretty-print-style-table?`, `pretty-printing`, `pretty-write`, `primitive-closure?`, `primitive-result-arity`, `primitive?`, `print`, `print-as-expression`, `print-boolean-long-form`, `print-box`, `print-graph`, `print-hash-table`, `print-mpair-curly-braces`, `print-pair-curly-braces`, `print-reader-abbreviations`, `print-struct`, `print-syntax-width`, `print-unreadable`, `print-vector-length`, `printable/c`, `printable<%>`, `printf`, `println`, `procedure->method`, `procedure-arity`, `procedure-arity-includes/c`, `procedure-arity-includes?`, `procedure-arity?`, `procedure-closure-contents-eq?`, `procedure-extract-target`, `procedure-keywords`, `procedure-reduce-arity`, `procedure-reduce-keyword-arity`, `procedure-rename`, `procedure-result-arity`, `procedure-specialize`, `procedure-struct-type?`, `procedure?`, `process`, `process*`, `process*/ports`, `process/ports`, `processor-count`, `progress-evt?`, `promise-forced?`, `promise-running?`, `promise/c`, `promise/name?`, `promise?`, `prop:arity-string`, `prop:arrow-contract`, `prop:arrow-contract-get-info`, `prop:arrow-contract?`, `prop:blame`, `prop:chaperone-contract`, `prop:checked-procedure`, `prop:contract`, `prop:contracted`, `prop:custom-print-quotable`, `prop:custom-write`, `prop:dict`, `prop:dict/contract`, `prop:equal+hash`, `prop:evt`, `prop:exn:missing-module`, `prop:exn:srclocs`, `prop:expansion-contexts`, `prop:flat-contract`, `prop:impersonator-of`, `prop:input-port`, `prop:liberal-define-context`, `prop:object-name`, `prop:opt-chaperone-contract`, `prop:opt-chaperone-contract-get-test`, `prop:opt-chaperone-contract?`, `prop:orc-contract`, `prop:orc-contract-get-subcontracts`, `prop:orc-contract?`, `prop:output-port`, `prop:place-location`, `prop:procedure`, `prop:recursive-contract`, `prop:recursive-contract-unroll`, `prop:recursive-contract?`, `prop:rename-transformer`, `prop:sequence`, `prop:set!-transformer`, `prop:stream`, `proper-subset?`, `pseudo-random-generator->vector`, `pseudo-random-generator-vector?`, `pseudo-random-generator?`, `put-preferences`, `putenv`, `quotient`, `quotient/remainder`, `radians->degrees`, `raise`, `raise-argument-error`, `raise-arguments-error`, `raise-arity-error`, `raise-blame-error`, `raise-contract-error`, `raise-mismatch-error`, `raise-not-cons-blame-error`, `raise-range-error`, `raise-result-error`, `raise-syntax-error`, `raise-type-error`, `raise-user-error`, `random`, `random-seed`, `range`, `rational?`, `rationalize`, `read`, `read-accept-bar-quote`, `read-accept-box`, `read-accept-compiled`, `read-accept-dot`, `read-accept-graph`, `read-accept-infix-dot`, `read-accept-lang`, `read-accept-quasiquote`, `read-accept-reader`, `read-byte`, `read-byte-or-special`, `read-bytes`, `read-bytes!`, `read-bytes!-evt`, `read-bytes-avail!`, `read-bytes-avail!*`, `read-bytes-avail!-evt`, `read-bytes-avail!/enable-break`, `read-bytes-evt`, `read-bytes-line`, `read-bytes-line-evt`, `read-case-sensitive`, `read-cdot`, `read-char`, `read-char-or-special`, `read-curly-brace-as-paren`, `read-curly-brace-with-tag`, `read-decimal-as-inexact`, `read-eval-print-loop`, `read-language`, `read-line`, `read-line-evt`, `read-on-demand-source`, `read-square-bracket-as-paren`, `read-square-bracket-with-tag`, `read-string`, `read-string!`, `read-string!-evt`, `read-string-evt`, `read-syntax`, `read-syntax/recursive`, `read/recursive`, `readtable-mapping`, `readtable?`, `real->decimal-string`, `real->double-flonum`, `real->floating-point-bytes`, `real->single-flonum`, `real-in`, `real-part`, `real?`, `reencode-input-port`, `reencode-output-port`, `regexp`, `regexp-match`, `regexp-match*`, `regexp-match-evt`, `regexp-match-exact?`, `regexp-match-peek`, `regexp-match-peek-immediate`, `regexp-match-peek-positions`, `regexp-match-peek-positions*`, `regexp-match-peek-positions-immediate`, `regexp-match-peek-positions-immediate/end`, `regexp-match-peek-positions/end`, `regexp-match-positions`, `regexp-match-positions*`, `regexp-match-positions/end`, `regexp-match/end`, `regexp-match?`, `regexp-max-lookbehind`, `regexp-quote`, `regexp-replace`, `regexp-replace*`, `regexp-replace-quote`, `regexp-replaces`, `regexp-split`, `regexp-try-match`, `regexp?`, `relative-path?`, `relocate-input-port`, `relocate-output-port`, `remainder`, `remf`, `remf*`, `remove`, `remove*`, `remove-duplicates`, `remq`, `remq*`, `remv`, `remv*`, `rename-contract`, `rename-file-or-directory`, `rename-transformer-target`, `rename-transformer?`, `replace-evt`, `reroot-path`, `resolve-path`, `resolved-module-path-name`, `resolved-module-path?`, `rest`, `reverse`, `round`, `second`, `seconds->date`, `security-guard?`, `semaphore-peek-evt`, `semaphore-peek-evt?`, `semaphore-post`, `semaphore-try-wait?`, `semaphore-wait`, `semaphore-wait/enable-break`, `semaphore?`, `sequence->list`, `sequence->stream`, `sequence-add-between`, `sequence-andmap`, `sequence-append`, `sequence-count`, `sequence-filter`, `sequence-fold`, `sequence-for-each`, `sequence-generate`, `sequence-generate*`, `sequence-length`, `sequence-map`, `sequence-ormap`, `sequence-ref`, `sequence-tail`, `sequence/c`, `sequence?`, `set`, `set!-transformer-procedure`, `set!-transformer?`, `set->list`, `set->stream`, `set-add`, `set-add!`, `set-box!`, `set-clear`, `set-clear!`, `set-copy`, `set-copy-clear`, `set-count`, `set-empty?`, `set-eq?`, `set-equal?`, `set-eqv?`, `set-first`, `set-for-each`, `set-implements/c`, `set-implements?`, `set-intersect`, `set-intersect!`, `set-map`, `set-mcar!`, `set-mcdr!`, `set-member?`, `set-mutable?`, `set-phantom-bytes!`, `set-port-next-location!`, `set-remove`, `set-remove!`, `set-rest`, `set-some-basic-contracts!`, `set-subtract`, `set-subtract!`, `set-symmetric-difference`, `set-symmetric-difference!`, `set-union`, `set-union!`, `set-weak?`, `set/c`, `set=?`, `set?`, `seteq`, `seteqv`, `seventh`, `sgn`, `shared-bytes`, `shell-execute`, `shrink-path-wrt`, `shuffle`, `simple-form-path`, `simplify-path`, `sin`, `single-flonum?`, `sinh`, `sixth`, `skip-projection-wrapper?`, `sleep`, `some-system-path->string`, `sort`, `special-comment-value`, `special-comment?`, `special-filter-input-port`, `split-at`, `split-at-right`, `split-common-prefix`, `split-path`, `splitf-at`, `splitf-at-right`, `sqr`, `sqrt`, `srcloc`, `srcloc->string`, `srcloc-column`, `srcloc-line`, `srcloc-position`, `srcloc-source`, `srcloc-span`, `srcloc?`, `stop-after`, `stop-before`, `stream->list`, `stream-add-between`, `stream-andmap`, `stream-append`, `stream-count`, `stream-empty?`, `stream-filter`, `stream-first`, `stream-fold`, `stream-for-each`, `stream-length`, `stream-map`, `stream-ormap`, `stream-ref`, `stream-rest`, `stream-tail`, `stream/c`, `stream?`, `string`, `string->bytes/latin-1`, `string->bytes/locale`, `string->bytes/utf-8`, `string->immutable-string`, `string->keyword`, `string->list`, `string->number`, `string->path`, `string->path-element`, `string->some-system-path`, `string->symbol`, `string->uninterned-symbol`, `string->unreadable-symbol`, `string-append`, `string-append*`, `string-ci<=?`, `string-ci=?`, `string-ci>?`, `string-contains?`, `string-copy`, `string-copy!`, `string-downcase`, `string-environment-variable-name?`, `string-fill!`, `string-foldcase`, `string-join`, `string-len/c`, `string-length`, `string-locale-ci?`, `string-locale-downcase`, `string-locale-upcase`, `string-locale?`, `string-no-nuls?`, `string-normalize-nfc`, `string-normalize-nfd`, `string-normalize-nfkc`, `string-normalize-nfkd`, `string-normalize-spaces`, `string-port?`, `string-prefix?`, `string-ref`, `string-replace`, `string-set!`, `string-split`, `string-suffix?`, `string-titlecase`, `string-trim`, `string-upcase`, `string-utf-8-length`, `string<=?`, `string=?`, `string>?`, `string?`, `struct->vector`, `struct-accessor-procedure?`, `struct-constructor-procedure?`, `struct-info`, `struct-mutator-procedure?`, `struct-predicate-procedure?`, `struct-type-info`, `struct-type-make-constructor`, `struct-type-make-predicate`, `struct-type-property-accessor-procedure?`, `struct-type-property/c`, `struct-type-property?`, `struct-type?`, `struct:arity-at-least`, `struct:arrow-contract-info`, `struct:date`, `struct:date*`, `struct:exn`, `struct:exn:break`, `struct:exn:break:hang-up`, `struct:exn:break:terminate`, `struct:exn:fail`, `struct:exn:fail:contract`, `struct:exn:fail:contract:arity`, `struct:exn:fail:contract:blame`, `struct:exn:fail:contract:continuation`, `struct:exn:fail:contract:divide-by-zero`, `struct:exn:fail:contract:non-fixnum-result`, `struct:exn:fail:contract:variable`, `struct:exn:fail:filesystem`, `struct:exn:fail:filesystem:errno`, `struct:exn:fail:filesystem:exists`, `struct:exn:fail:filesystem:missing-module`, `struct:exn:fail:filesystem:version`, `struct:exn:fail:network`, `struct:exn:fail:network:errno`, `struct:exn:fail:object`, `struct:exn:fail:out-of-memory`, `struct:exn:fail:read`, `struct:exn:fail:read:eof`, `struct:exn:fail:read:non-char`, `struct:exn:fail:syntax`, `struct:exn:fail:syntax:missing-module`, `struct:exn:fail:syntax:unbound`, `struct:exn:fail:unsupported`, `struct:exn:fail:user`, `struct:srcloc`, `struct:wrapped-extra-arg-arrow`, `struct?`, `sub1`, `subbytes`, `subclass?`, `subclass?/c`, `subprocess`, `subprocess-group-enabled`, `subprocess-kill`, `subprocess-pid`, `subprocess-status`, `subprocess-wait`, `subprocess?`, `subset?`, `substring`, `suggest/c`, `symbol->string`, `symbol-interned?`, `symbol-unreadable?`, `symboldatum`, `syntax->list`, `syntax-arm`, `syntax-column`, `syntax-debug-info`, `syntax-disarm`, `syntax-e`, `syntax-line`, `syntax-local-bind-syntaxes`, `syntax-local-certifier`, `syntax-local-context`, `syntax-local-expand-expression`, `syntax-local-get-shadower`, `syntax-local-identifier-as-binding`, `syntax-local-introduce`, `syntax-local-lift-context`, `syntax-local-lift-expression`, `syntax-local-lift-module`, `syntax-local-lift-module-end-declaration`, `syntax-local-lift-provide`, `syntax-local-lift-require`, `syntax-local-lift-values-expression`, `syntax-local-make-definition-context`, `syntax-local-make-delta-introducer`, `syntax-local-module-defined-identifiers`, `syntax-local-module-exports`, `syntax-local-module-required-identifiers`, `syntax-local-name`, `syntax-local-phase-level`, `syntax-local-submodules`, `syntax-local-transforming-module-provides?`, `syntax-local-value`, `syntax-local-value/immediate`, `syntax-original?`, `syntax-position`, `syntax-property`, `syntax-property-preserved?`, `syntax-property-symbol-keys`, `syntax-protect`, `syntax-rearm`, `syntax-recertify`, `syntax-shift-phase-level`, `syntax-source`, `syntax-source-module`, `syntax-span`, `syntax-taint`, `syntax-tainted?`, `syntax-track-origin`, `syntax-transforming-module-expression?`, `syntax-transforming-with-lifts?`, `syntax-transforming?`, `syntax/c`, `syntax?`, `system`, `system*`, `system*/exit-code`, `system-big-endian?`, `system-idle-evt`, `system-language+country`, `system-library-subpath`, `system-path-convention-type`, `system-type`, `system/exit-code`, `tail-marks-match?`, `take`, `take-common-prefix`, `take-right`, `takef`, `takef-right`, `tan`, `tanh`, `tcp-abandon-port`, `tcp-accept`, `tcp-accept-evt`, `tcp-accept-ready?`, `tcp-accept/enable-break`, `tcp-addresses`, `tcp-close`, `tcp-connect`, `tcp-connect/enable-break`, `tcp-listen`, `tcp-listener?`, `tcp-port?`, `tentative-pretty-print-port-cancel`, `tentative-pretty-print-port-transfer`, `tenth`, `terminal-port?`, `the-unsupplied-arg`, `third`, `thread`, `thread-cell-ref`, `thread-cell-set!`, `thread-cell-values?`, `thread-cell?`, `thread-dead-evt`, `thread-dead?`, `thread-group?`, `thread-receive`, `thread-receive-evt`, `thread-resume`, `thread-resume-evt`, `thread-rewind-receive`, `thread-running?`, `thread-send`, `thread-suspend`, `thread-suspend-evt`, `thread-try-receive`, `thread-wait`, `thread/suspend-to-kill`, `thread?`, `time-apply`, `touch`, `transplant-input-port`, `transplant-output-port`, `true`, `truncate`, `udp-addresses`, `udp-bind!`, `udp-bound?`, `udp-close`, `udp-connect!`, `udp-connected?`, `udp-multicast-interface`, `udp-multicast-join-group!`, `udp-multicast-leave-group!`, `udp-multicast-loopback?`, `udp-multicast-set-interface!`, `udp-multicast-set-loopback!`, `udp-multicast-set-ttl!`, `udp-multicast-ttl`, `udp-open-socket`, `udp-receive!`, `udp-receive!*`, `udp-receive!-evt`, `udp-receive!/enable-break`, `udp-receive-ready-evt`, `udp-send`, `udp-send*`, `udp-send-evt`, `udp-send-ready-evt`, `udp-send-to`, `udp-send-to*`, `udp-send-to-evt`, `udp-send-to/enable-break`, `udp-send/enable-break`, `udp?`, `unbox`, `uncaught-exception-handler`, `unit?`, `unspecified-dom`, `unsupplied-arg?`, `use-collection-link-paths`, `use-compiled-file-paths`, `use-user-specific-search-paths`, `user-execute-bit`, `user-read-bit`, `user-write-bit`, `value-blame`, `value-contract`, `values`, `variable-reference->empty-namespace`, `variable-reference->module-base-phase`, `variable-reference->module-declaration-inspector`, `variable-reference->module-path-index`, `variable-reference->module-source`, `variable-reference->namespace`, `variable-reference->phase`, `variable-reference->resolved-module-path`, `variable-reference-constant?`, `variable-reference?`, `vector`, `vector->immutable-vector`, `vector->list`, `vector->pseudo-random-generator`, `vector->pseudo-random-generator!`, `vector->values`, `vector-append`, `vector-argmax`, `vector-argmin`, `vector-copy`, `vector-copy!`, `vector-count`, `vector-drop`, `vector-drop-right`, `vector-fill!`, `vector-filter`, `vector-filter-not`, `vector-immutable`, `vector-immutable/c`, `vector-immutableof`, `vector-length`, `vector-map`, `vector-map!`, `vector-member`, `vector-memq`, `vector-memv`, `vector-ref`, `vector-set!`, `vector-set*!`, `vector-set-performance-stats!`, `vector-split-at`, `vector-split-at-right`, `vector-take`, `vector-take-right`, `vector/c`, `vector?`, `vectorof`, `version`, `void`, `void?`, `weak-box-value`, `weak-box?`, `weak-set`, `weak-seteq`, `weak-seteqv`, `will-execute`, `will-executor?`, `will-register`, `will-try-execute`, `with-input-from-bytes`, `with-input-from-file`, `with-input-from-string`, `with-output-to-bytes`, `with-output-to-file`, `with-output-to-string`, `would-be-future`, `wrap-evt`, `wrapped-extra-arg-arrow`, `wrapped-extra-arg-arrow-extra-neg-party-argument`, `wrapped-extra-arg-arrow-real-func`, `wrapped-extra-arg-arrow?`, `writable<%>`, `write`, `write-byte`, `write-bytes`, `write-bytes-avail`, `write-bytes-avail*`, `write-bytes-avail-evt`, `write-bytes-avail/enable-break`, `write-char`, `write-special`, `write-special-avail*`, `write-special-evt`, `write-string`, `write-to-file`, `writeln`, `xor`, `zero?`, `~.a`, `~.s`, `~.v`, `~a`, `~e`, `~r`, `~s`, `~v`), NameBuiltin, Pop(1)}, + {"(?:\\|[^|]*\\||\\\\[\\w\\W]|[^|\\\\()[\\]{}\",\\'`;\\s]+)+", Name, Pop(1)}, + Include("datum*"), + }, + "unquoted-list": { + Include("list"), + {`(?!\Z)`, Text, Push("unquoted-datum")}, + }, + "quasiquoted-datum": { + Include("datum"), + {`,@?`, Operator, Push("#pop", "unquoted-datum")}, + {"unquote(-splicing)?(?=[()[\\]{}\",\\'`;\\s])", Keyword, Push("#pop", "unquoted-datum")}, + {`[([{]`, Punctuation, Push("#pop", "quasiquoted-list")}, + Include("datum*"), + }, + "quasiquoted-list": { + Include("list"), + {`(?!\Z)`, Text, Push("quasiquoted-datum")}, + }, + "quoted-datum": { + Include("datum"), + {`[([{]`, Punctuation, Push("#pop", "quoted-list")}, + Include("datum*"), + }, + "quoted-list": { + Include("list"), + {`(?!\Z)`, Text, Push("quoted-datum")}, + }, + "block-comment": { + {`#\|`, CommentMultiline, Push()}, + {`\|#`, CommentMultiline, Pop(1)}, + {`[^#|]+|.`, CommentMultiline, nil}, + }, + "string": { + {`"`, LiteralStringDouble, Pop(1)}, + {`(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8}|.)`, LiteralStringEscape, nil}, + {`[^\\"]+`, LiteralStringDouble, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/ragel.go b/vendor/github.com/alecthomas/chroma/lexers/r/ragel.go new file mode 100644 index 00000000..3da0ea70 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/ragel.go @@ -0,0 +1,76 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ragel lexer. +var Ragel = internal.Register(MustNewLexer( + &Config{ + Name: "Ragel", + Aliases: []string{"ragel"}, + Filenames: []string{}, + MimeTypes: []string{}, + }, + Rules{ + "whitespace": { + {`\s+`, TextWhitespace, nil}, + }, + "comments": { + {`\#.*$`, Comment, nil}, + }, + "keywords": { + {`(access|action|alphtype)\b`, Keyword, nil}, + {`(getkey|write|machine|include)\b`, Keyword, nil}, + {`(any|ascii|extend|alpha|digit|alnum|lower|upper)\b`, Keyword, nil}, + {`(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b`, Keyword, nil}, + }, + "numbers": { + {`0x[0-9A-Fa-f]+`, LiteralNumberHex, nil}, + {`[+-]?[0-9]+`, LiteralNumberInteger, nil}, + }, + "literals": { + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralString, nil}, + {`\[(\\\\|\\\]|[^\]])*\]`, LiteralString, nil}, + {`/(?!\*)(\\\\|\\/|[^/])*/`, LiteralStringRegex, nil}, + }, + "identifiers": { + {`[a-zA-Z_]\w*`, NameVariable, nil}, + }, + "operators": { + {`,`, Operator, nil}, + {`\||&|--?`, Operator, nil}, + {`\.|<:|:>>?`, Operator, nil}, + {`:`, Operator, nil}, + {`->`, Operator, nil}, + {`(>|\$|%|<|@|<>)(/|eof\b)`, Operator, nil}, + {`(>|\$|%|<|@|<>)(!|err\b)`, Operator, nil}, + {`(>|\$|%|<|@|<>)(\^|lerr\b)`, Operator, nil}, + {`(>|\$|%|<|@|<>)(~|to\b)`, Operator, nil}, + {`(>|\$|%|<|@|<>)(\*|from\b)`, Operator, nil}, + {`>|@|\$|%`, Operator, nil}, + {`\*|\?|\+|\{[0-9]*,[0-9]*\}`, Operator, nil}, + {`!|\^`, Operator, nil}, + {`\(|\)`, Operator, nil}, + }, + "root": { + Include("literals"), + Include("whitespace"), + Include("comments"), + Include("keywords"), + Include("numbers"), + Include("identifiers"), + Include("operators"), + {`\{`, Punctuation, Push("host")}, + {`=`, Operator, nil}, + {`;`, Punctuation, nil}, + }, + "host": { + {`([^{}\'"/#]+|[^\\]\\[{}]|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|\#.*$\n?|/(?!\*)(\\\\|\\/|[^/])*/|/)+`, Other, nil}, + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go b/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go new file mode 100644 index 00000000..7ee9cb58 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/regedit.go @@ -0,0 +1,32 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Reg lexer. +var Reg = internal.Register(MustNewLexer( + &Config{ + Name: "reg", + Aliases: []string{"registry"}, + Filenames: []string{"*.reg"}, + MimeTypes: []string{"text/x-windows-registry"}, + }, + Rules{ + "root": { + {`Windows Registry Editor.*`, Text, nil}, + {`\s+`, Text, nil}, + {`[;#].*`, CommentSingle, nil}, + {`(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$`, ByGroups(Keyword, Operator, NameBuiltin, Keyword), nil}, + {`("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)`, ByGroups(NameAttribute, Text, Operator, Text), Push("value")}, + {`(.*?)([ \t]*)(=)([ \t]*)`, ByGroups(NameAttribute, Text, Operator, Text), Push("value")}, + }, + "value": { + {`-`, Operator, Pop(1)}, + {`(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)`, ByGroups(NameVariable, Punctuation, LiteralNumber), Pop(1)}, + {`.+`, LiteralString, Pop(1)}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go b/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go new file mode 100644 index 00000000..1f5550a5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rexx.go @@ -0,0 +1,59 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Rexx lexer. +var Rexx = internal.Register(MustNewLexer( + &Config{ + Name: "Rexx", + Aliases: []string{"rexx", "arexx"}, + Filenames: []string{"*.rexx", "*.rex", "*.rx", "*.arexx"}, + MimeTypes: []string{"text/x-rexx"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s`, TextWhitespace, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`"`, LiteralString, Push("string_double")}, + {`'`, LiteralString, Push("string_single")}, + {`[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?`, LiteralNumber, nil}, + {`([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b`, ByGroups(NameFunction, TextWhitespace, Operator, TextWhitespace, KeywordDeclaration), nil}, + {`([a-z_]\w*)(\s*)(:)`, ByGroups(NameLabel, TextWhitespace, Operator), nil}, + Include("function"), + Include("keyword"), + Include("operator"), + {`[a-z_]\w*`, Text, nil}, + }, + "function": { + {Words(``, `(\s*)(\()`, `abbrev`, `abs`, `address`, `arg`, `b2x`, `bitand`, `bitor`, `bitxor`, `c2d`, `c2x`, `center`, `charin`, `charout`, `chars`, `compare`, `condition`, `copies`, `d2c`, `d2x`, `datatype`, `date`, `delstr`, `delword`, `digits`, `errortext`, `form`, `format`, `fuzz`, `insert`, `lastpos`, `left`, `length`, `linein`, `lineout`, `lines`, `max`, `min`, `overlay`, `pos`, `queued`, `random`, `reverse`, `right`, `sign`, `sourceline`, `space`, `stream`, `strip`, `substr`, `subword`, `symbol`, `time`, `trace`, `translate`, `trunc`, `value`, `verify`, `word`, `wordindex`, `wordlength`, `wordpos`, `words`, `x2b`, `x2c`, `x2d`, `xrange`), ByGroups(NameBuiltin, TextWhitespace, Operator), nil}, + }, + "keyword": { + {`(address|arg|by|call|do|drop|else|end|exit|for|forever|if|interpret|iterate|leave|nop|numeric|off|on|options|parse|pull|push|queue|return|say|select|signal|to|then|trace|until|while)\b`, KeywordReserved, nil}, + }, + "operator": { + {`(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|¬>>|¬>|¬|\.|,)`, Operator, nil}, + }, + "string_double": { + {`[^"\n]+`, LiteralString, nil}, + {`""`, LiteralString, nil}, + {`"`, LiteralString, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "string_single": { + {`[^\'\n]`, LiteralString, nil}, + {`\'\'`, LiteralString, nil}, + {`\'`, LiteralString, Pop(1)}, + {`\n`, Text, Pop(1)}, + }, + "comment": { + {`[^*]+`, CommentMultiline, nil}, + {`\*/`, CommentMultiline, Pop(1)}, + {`\*`, CommentMultiline, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rst.go b/vendor/github.com/alecthomas/chroma/lexers/r/rst.go new file mode 100644 index 00000000..d5159d7e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rst.go @@ -0,0 +1,86 @@ +package r + +import ( + "strings" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Restructuredtext lexer. +var Restructuredtext = internal.Register(MustNewLexer( + &Config{ + Name: "reStructuredText", + Aliases: []string{"rst", "rest", "restructuredtext"}, + Filenames: []string{"*.rst", "*.rest"}, + MimeTypes: []string{"text/x-rst", "text/prs.fallenstein.rst"}, + }, + Rules{ + "root": { + {"^(=+|-+|`+|:+|\\.+|\\'+|\"+|~+|\\^+|_+|\\*+|\\++|#+)([ \\t]*\\n)(.+)(\\n)(\\1)(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text, GenericHeading, Text), nil}, + {"^(\\S.*)(\\n)(={3,}|-{3,}|`{3,}|:{3,}|\\.{3,}|\\'{3,}|\"{3,}|~{3,}|\\^{3,}|_{3,}|\\*{3,}|\\+{3,}|#{3,})(\\n)", ByGroups(GenericHeading, Text, GenericHeading, Text), nil}, + {`^(\s*)([-*+])( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)`, ByGroups(Text, LiteralNumber, UsingSelf("inline")), nil}, + {`^(\s*)(\|)( .+\n(?:\| .+\n)*)`, ByGroups(Text, Operator, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)`, EmitterFunc(rstCodeBlock), nil}, + {`^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\[.+\])(.*?)$`, ByGroups(Punctuation, Text, NameTag, UsingSelf("inline")), nil}, + {`^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))`, ByGroups(Punctuation, Text, NameTag, Text, OperatorWord, Punctuation, Text, UsingSelf("inline")), nil}, + {`^ *\.\..*(\n( +.*\n|\n)+)?`, CommentPreproc, nil}, + {`^( *)(:[a-zA-Z-]+:)(\s*)$`, ByGroups(Text, NameClass, Text), nil}, + {`^( *)(:.*?:)([ \t]+)(.*?)$`, ByGroups(Text, NameClass, Text, NameFunction), nil}, + {`^(\S.*(?)(`__?)", ByGroups(LiteralString, LiteralStringInterpol, LiteralString), nil}, + {"`.+?`__?", LiteralString, nil}, + {"(`.+?`)(:[a-zA-Z0-9:-]+?:)?", ByGroups(NameVariable, NameAttribute), nil}, + {"(:[a-zA-Z0-9:-]+?:)(`.+?`)", ByGroups(NameAttribute, NameVariable), nil}, + {`\*\*.+?\*\*`, GenericStrong, nil}, + {`\*.+?\*`, GenericEmph, nil}, + {`\[.*?\]_`, LiteralString, nil}, + {`<.+?>`, NameTag, nil}, + {"[^\\\\\\n\\[*`:]+", Text, nil}, + {`.`, Text, nil}, + }, + "literal": { + {"[^`]+", LiteralString, nil}, + {"``((?=$)|(?=[-/:.,; \\n\\x00\\\u2010\\\u2011\\\u2012\\\u2013\\\u2014\\\u00a0\\'\\\"\\)\\]\\}\\>\\\u2019\\\u201d\\\u00bb\\!\\?]))", LiteralString, Pop(1)}, + {"`", LiteralString, nil}, + }, + }, +)) + +func rstCodeBlock(groups []string, lexer Lexer) Iterator { + iterators := []Iterator{} + tokens := []*Token{ + {Punctuation, groups[1]}, + {Text, groups[2]}, + {OperatorWord, groups[3]}, + {Punctuation, groups[4]}, + {Text, groups[5]}, + {Keyword, groups[6]}, + {Text, groups[7]}, + } + code := strings.Join(groups[8:], "") + lexer = internal.Get(groups[6]) + if lexer == nil { + tokens = append(tokens, &Token{String, code}) + iterators = append(iterators, Literator(tokens...)) + } else { + sub, err := lexer.Tokenise(nil, code) + if err != nil { + panic(err) + } + iterators = append(iterators, Literator(tokens...), sub) + } + return Concaterator(iterators...) +} diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go b/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go new file mode 100644 index 00000000..6af8db20 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/ruby.go @@ -0,0 +1,250 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Ruby lexer. +var Ruby = internal.Register(MustNewLexer( + &Config{ + Name: "Ruby", + Aliases: []string{"rb", "ruby", "duby"}, + Filenames: []string{"*.rb", "*.rbw", "Rakefile", "*.rake", "*.gemspec", "*.rbx", "*.duby", "Gemfile"}, + MimeTypes: []string{"text/x-ruby", "application/x-ruby"}, + DotAll: true, + }, + Rules{ + "root": { + {`\A#!.+?$`, CommentHashbang, nil}, + {`#.*?$`, CommentSingle, nil}, + {`=begin\s.*?\n=end.*?$`, CommentMultiline, nil}, + {Words(``, `\b`, `BEGIN`, `END`, `alias`, `begin`, `break`, `case`, `defined?`, `do`, `else`, `elsif`, `end`, `ensure`, `for`, `if`, `in`, `next`, `redo`, `rescue`, `raise`, `retry`, `return`, `super`, `then`, `undef`, `unless`, `until`, `when`, `while`, `yield`), Keyword, nil}, + {`(module)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)`, ByGroups(Keyword, Text, NameNamespace), nil}, + {`(def)(\s+)`, ByGroups(Keyword, Text), Push("funcname")}, + {"def(?=[*%&^`~+-/\\[<>=])", Keyword, Push("funcname")}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {Words(``, `\b`, `initialize`, `new`, `loop`, `include`, `extend`, `raise`, `attr_reader`, `attr_writer`, `attr_accessor`, `attr`, `catch`, `throw`, `private`, `module_function`, `public`, `protected`, `true`, `false`, `nil`), KeywordPseudo, nil}, + {`(not|and|or)\b`, OperatorWord, nil}, + {Words(``, `\?`, `autoload`, `block_given`, `const_defined`, `eql`, `equal`, `frozen`, `include`, `instance_of`, `is_a`, `iterator`, `kind_of`, `method_defined`, `nil`, `private_method_defined`, `protected_method_defined`, `public_method_defined`, `respond_to`, `tainted`), NameBuiltin, nil}, + {`(chomp|chop|exit|gsub|sub)!`, NameBuiltin, nil}, + {Words(`(?~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(?<=\(|,|\[)/`, LiteralStringRegex, Push("multiline-regex")}, + {`(\s+)(/)(?![\s=])`, ByGroups(Text, LiteralStringRegex), Push("multiline-regex")}, + {`(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberOct, Text, Operator), nil}, + {`(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberHex, Text, Operator), nil}, + {`(0b[01]+(?:_[01]+)*)(\s*)([/?])?`, ByGroups(LiteralNumberBin, Text, Operator), nil}, + {`([\d]+(?:_\d+)*)(\s*)([/?])?`, ByGroups(LiteralNumberInteger, Text, Operator), nil}, + {`@@[a-zA-Z_]\w*`, NameVariableClass, nil}, + {`@[a-zA-Z_]\w*`, NameVariableInstance, nil}, + {`\$\w+`, NameVariableGlobal, nil}, + {"\\$[!@&`\\'+~=/\\\\,;.<>_*$?:\"^-]", NameVariableGlobal, nil}, + {`\$-[0adFiIlpvw]`, NameVariableGlobal, nil}, + {`::`, Operator, nil}, + Include("strings"), + {`\?(\\[MC]-)*(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)`, LiteralStringChar, nil}, + {`[A-Z]\w+`, NameConstant, nil}, + {Words(`(\.|::)`, ``, `*`, `**`, `-`, `+`, `-@`, `+@`, `/`, `%`, `&`, `|`, `^`, "`", `~`, `[]`, `[]=`, `<<`, `>>`, `<`, `<>`, `<=>`, `>`, `>=`, `==`, `===`), ByGroups(Operator, NameOperator), nil}, + {"(\\.|::)([a-zA-Z_]\\w*[!?]?|[*%&^`~+\\-/\\[<>=])", ByGroups(Operator, Name), nil}, + {`[a-zA-Z_]\w*[!?]?`, Name, nil}, + {`(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|!~|&&?|\|\||\.{1,3})`, Operator, nil}, + {`[-+/*%=<>&!^|~]=?`, Operator, nil}, + {`[(){};,/?:\\]`, Punctuation, nil}, + {`\s+`, Text, nil}, + }, + "funcname": { + {`\(`, Punctuation, Push("defexpr")}, + {"(?:([a-zA-Z_]\\w*)(\\.))?([a-zA-Z_]\\w*[!?]?|\\*\\*?|[-+]@?|[/%&|^`~]|\\[\\]=?|<<|>>|<=?>|>=?|===?)", ByGroups(NameClass, Operator, NameFunction), Pop(1)}, + Default(Pop(1)), + }, + "classname": { + {`\(`, Punctuation, Push("defexpr")}, + {`<<`, Operator, Pop(1)}, + {`[A-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "defexpr": { + {`(\))(\.|::)?`, ByGroups(Punctuation, Operator), Pop(1)}, + {`\(`, Operator, Push()}, + Include("root"), + }, + "in-intp": { + {`\{`, LiteralStringInterpol, Push()}, + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + "string-intp": { + {`#\{`, LiteralStringInterpol, Push("in-intp")}, + {`#@@?[a-zA-Z_]\w*`, LiteralStringInterpol, nil}, + {`#\$[a-zA-Z_]\w*`, LiteralStringInterpol, nil}, + }, + "string-intp-escaped": { + Include("string-intp"), + {`\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})`, LiteralStringEscape, nil}, + }, + "interpolated-regex": { + Include("string-intp"), + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\#]+`, LiteralStringRegex, nil}, + }, + "interpolated-string": { + Include("string-intp"), + {`[\\#]`, LiteralStringOther, nil}, + {`[^\\#]+`, LiteralStringOther, nil}, + }, + "multiline-regex": { + Include("string-intp"), + {`\\\\`, LiteralStringRegex, nil}, + {`\\/`, LiteralStringRegex, nil}, + {`[\\#]`, LiteralStringRegex, nil}, + {`[^\\/#]+`, LiteralStringRegex, nil}, + {`/[mixounse]*`, LiteralStringRegex, Pop(1)}, + }, + "end-part": { + {`.+`, CommentPreproc, Pop(1)}, + }, + "strings": { + {`\:@{0,2}[a-zA-Z_]\w*[!?]?`, LiteralStringSymbol, nil}, + {Words(`\:@{0,2}`, ``, `*`, `**`, `-`, `+`, `-@`, `+@`, `/`, `%`, `&`, `|`, `^`, "`", `~`, `[]`, `[]=`, `<<`, `>>`, `<`, `<>`, `<=>`, `>`, `>=`, `==`, `===`), LiteralStringSymbol, nil}, + {`:'(\\\\|\\'|[^'])*'`, LiteralStringSymbol, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`:"`, LiteralStringSymbol, Push("simple-sym")}, + {`([a-zA-Z_]\w*)(:)(?!:)`, ByGroups(LiteralStringSymbol, Punctuation), nil}, + {`"`, LiteralStringDouble, Push("simple-string")}, + {"(?&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)`, ByGroups(Text, LiteralStringOther, None), nil}, + {`(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)`, String, nil}, + }, + "simple-string": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringDouble, nil}, + {`[\\#]`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "simple-sym": { + Include("string-intp-escaped"), + {`[^\\"#]+`, LiteralStringSymbol, nil}, + {`[\\#]`, LiteralStringSymbol, nil}, + {`"`, LiteralStringSymbol, Pop(1)}, + }, + "simple-backtick": { + Include("string-intp-escaped"), + {"[^\\\\`#]+", LiteralStringBacktick, nil}, + {`[\\#]`, LiteralStringBacktick, nil}, + {"`", LiteralStringBacktick, Pop(1)}, + }, + "cb-intp-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-string": { + {`\\[\\{}]`, LiteralStringOther, nil}, + {`\{`, LiteralStringOther, Push()}, + {`\}`, LiteralStringOther, Pop(1)}, + {`[\\#{}]`, LiteralStringOther, nil}, + {`[^\\#{}]+`, LiteralStringOther, nil}, + }, + "cb-regex": { + {`\\[\\{}]`, LiteralStringRegex, nil}, + {`\{`, LiteralStringRegex, Push()}, + {`\}[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#{}]`, LiteralStringRegex, nil}, + {`[^\\#{}]+`, LiteralStringRegex, nil}, + }, + "sb-intp-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-string": { + {`\\[\\\[\]]`, LiteralStringOther, nil}, + {`\[`, LiteralStringOther, Push()}, + {`\]`, LiteralStringOther, Pop(1)}, + {`[\\#\[\]]`, LiteralStringOther, nil}, + {`[^\\#\[\]]+`, LiteralStringOther, nil}, + }, + "sb-regex": { + {`\\[\\\[\]]`, LiteralStringRegex, nil}, + {`\[`, LiteralStringRegex, Push()}, + {`\][mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#\[\]]`, LiteralStringRegex, nil}, + {`[^\\#\[\]]+`, LiteralStringRegex, nil}, + }, + "pa-intp-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-string": { + {`\\[\\()]`, LiteralStringOther, nil}, + {`\(`, LiteralStringOther, Push()}, + {`\)`, LiteralStringOther, Pop(1)}, + {`[\\#()]`, LiteralStringOther, nil}, + {`[^\\#()]+`, LiteralStringOther, nil}, + }, + "pa-regex": { + {`\\[\\()]`, LiteralStringRegex, nil}, + {`\(`, LiteralStringRegex, Push()}, + {`\)[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#()]`, LiteralStringRegex, nil}, + {`[^\\#()]+`, LiteralStringRegex, nil}, + }, + "ab-intp-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + Include("string-intp-escaped"), + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-string": { + {`\\[\\<>]`, LiteralStringOther, nil}, + {`<`, LiteralStringOther, Push()}, + {`>`, LiteralStringOther, Pop(1)}, + {`[\\#<>]`, LiteralStringOther, nil}, + {`[^\\#<>]+`, LiteralStringOther, nil}, + }, + "ab-regex": { + {`\\[\\<>]`, LiteralStringRegex, nil}, + {`<`, LiteralStringRegex, Push()}, + {`>[mixounse]*`, LiteralStringRegex, Pop(1)}, + Include("string-intp"), + {`[\\#<>]`, LiteralStringRegex, nil}, + {`[^\\#<>]+`, LiteralStringRegex, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/r/rust.go b/vendor/github.com/alecthomas/chroma/lexers/r/rust.go new file mode 100644 index 00000000..3d05cd53 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/r/rust.go @@ -0,0 +1,134 @@ +package r + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Rust lexer. +var Rust = internal.Register(MustNewLexer( + &Config{ + Name: "Rust", + Aliases: []string{"rust"}, + Filenames: []string{"*.rs", "*.rs.in"}, + MimeTypes: []string{"text/rust"}, + EnsureNL: true, + }, + Rules{ + "root": { + {`#![^[\r\n].*$`, CommentPreproc, nil}, + Default(Push("base")), + }, + "base": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + {`//!.*?\n`, LiteralStringDoc, nil}, + {`///(\n|[^/].*?\n)`, LiteralStringDoc, nil}, + {`//(.*?)\n`, CommentSingle, nil}, + {`/\*\*(\n|[^/*])`, LiteralStringDoc, Push("doccomment")}, + {`/\*!`, LiteralStringDoc, Push("doccomment")}, + {`/\*`, CommentMultiline, Push("comment")}, + {`\$([a-zA-Z_]\w*|\(,?|\),?|,?)`, CommentPreproc, nil}, + {Words(``, `\b`, `as`, `box`, `const`, `crate`, `else`, `extern`, `for`, `if`, `impl`, `in`, `loop`, `match`, `move`, `mut`, `pub`, `ref`, `return`, `static`, `super`, `trait`, `unsafe`, `use`, `where`, `while`), Keyword, nil}, + {Words(``, `\b`, `abstract`, `alignof`, `become`, `do`, `final`, `macro`, `offsetof`, `override`, `priv`, `proc`, `pure`, `sizeof`, `typeof`, `unsized`, `virtual`, `yield`), KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`mod\b`, Keyword, Push("modname")}, + {`let\b`, KeywordDeclaration, nil}, + {`fn\b`, Keyword, Push("funcname")}, + {`(struct|enum|type|union)\b`, Keyword, Push("typename")}, + {`(default)(\s+)(type|fn)\b`, ByGroups(Keyword, Text, Keyword), nil}, + {Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil}, + {`self\b`, NameBuiltinPseudo, nil}, + {Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil}, + {`::\b`, Text, nil}, + {`(?::|->)`, Text, Push("typename")}, + {`(break|continue)(\s*)(\'[A-Za-z_]\w*)?`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {`'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil}, + {`b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'`, LiteralStringChar, nil}, + {`0b[01_]+`, LiteralNumberBin, Push("number_lit")}, + {`0o[0-7_]+`, LiteralNumberOct, Push("number_lit")}, + {`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, Push("number_lit")}, + {`[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)`, LiteralNumberFloat, Push("number_lit")}, + {`[0-9][0-9_]*`, LiteralNumberInteger, Push("number_lit")}, + {`b"`, LiteralString, Push("bytestring")}, + {`"`, LiteralString, Push("string")}, + {`b?r(#*)".*?"\1`, LiteralString, nil}, + {`'static`, NameBuiltin, nil}, + {`'[a-zA-Z_]\w*`, NameAttribute, nil}, + {`[{}()\[\],.;]`, Punctuation, nil}, + {`[+\-*/%&|<>^!~@=:?]`, Operator, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + {`#!?\[`, CommentPreproc, Push("attribute[")}, + {`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, TextWhitespace, Punctuation), Push("macro{")}, + {`([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()`, ByGroups(CommentPreproc, Punctuation, TextWhitespace, Name, Punctuation), Push("macro(")}, + }, + "comment": { + {`[^*/]+`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "doccomment": { + {`[^*/]+`, LiteralStringDoc, nil}, + {`/\*`, LiteralStringDoc, Push()}, + {`\*/`, LiteralStringDoc, Pop(1)}, + {`[*/]`, LiteralStringDoc, nil}, + }, + "modname": { + {`\s+`, Text, nil}, + {`[a-zA-Z_]\w*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "funcname": { + {`\s+`, Text, nil}, + {`[a-zA-Z_]\w*`, NameFunction, Pop(1)}, + Default(Pop(1)), + }, + "typename": { + {`\s+`, Text, nil}, + {`&`, KeywordPseudo, nil}, + {Words(``, `\b`, `Copy`, `Send`, `Sized`, `Sync`, `Drop`, `Fn`, `FnMut`, `FnOnce`, `Box`, `ToOwned`, `Clone`, `PartialEq`, `PartialOrd`, `Eq`, `Ord`, `AsRef`, `AsMut`, `Into`, `From`, `Default`, `Iterator`, `Extend`, `IntoIterator`, `DoubleEndedIterator`, `ExactSizeIterator`, `Option`, `Some`, `None`, `Result`, `Ok`, `Err`, `SliceConcatExt`, `String`, `ToString`, `Vec`), NameBuiltin, nil}, + {Words(``, `\b`, `u8`, `u16`, `u32`, `u64`, `i8`, `i16`, `i32`, `i64`, `usize`, `isize`, `f32`, `f64`, `str`, `bool`), KeywordType, nil}, + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "number_lit": { + {`[ui](8|16|32|64|size)`, Keyword, Pop(1)}, + {`f(32|64)`, Keyword, Pop(1)}, + Default(Pop(1)), + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}`, LiteralStringEscape, nil}, + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "bytestring": { + {`\\x[89a-fA-F][0-9a-fA-F]`, LiteralStringEscape, nil}, + Include("string"), + }, + "macro{": { + {`\{`, Operator, Push()}, + {`\}`, Operator, Pop(1)}, + }, + "macro(": { + {`\(`, Operator, Push()}, + {`\)`, Operator, Pop(1)}, + }, + "attribute_common": { + {`"`, LiteralString, Push("string")}, + {`\[`, CommentPreproc, Push("attribute[")}, + {`\(`, CommentPreproc, Push("attribute(")}, + }, + "attribute[": { + Include("attribute_common"), + {`\];?`, CommentPreproc, Pop(1)}, + {`[^"\]]+`, CommentPreproc, nil}, + }, + "attribute(": { + Include("attribute_common"), + {`\);?`, CommentPreproc, Pop(1)}, + {`[^")]+`, CommentPreproc, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sass.go b/vendor/github.com/alecthomas/chroma/lexers/s/sass.go new file mode 100644 index 00000000..c84a9438 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sass.go @@ -0,0 +1,144 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sass lexer. +var Sass = internal.Register(MustNewLexer( + &Config{ + Name: "Sass", + Aliases: []string{"sass"}, + Filenames: []string{"*.sass"}, + MimeTypes: []string{"text/x-sass"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`[ \t]*\n`, Text, nil}, + // { `[ \t]*`, ?? ??, nil }, + }, + "content": { + // { `//[^\n]*`, ?? .callback at 0x10fcaf378> ??, Push("root") }, + // { `/\*[^\n]*`, ?? .callback at 0x10fcaf400> ??, Push("root") }, + {`@import`, Keyword, Push("import")}, + {`@for`, Keyword, Push("for")}, + {`@(debug|warn|if|while)`, Keyword, Push("value")}, + {`(@mixin)( [\w-]+)`, ByGroups(Keyword, NameFunction), Push("value")}, + {`(@include)( [\w-]+)`, ByGroups(Keyword, NameDecorator), Push("value")}, + {`@extend`, Keyword, Push("selector")}, + {`@[\w-]+`, Keyword, Push("selector")}, + {`=[\w-]+`, NameFunction, Push("value")}, + {`\+[\w-]+`, NameDecorator, Push("value")}, + {`([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))`, ByGroups(NameVariable, Operator), Push("value")}, + {`:`, NameAttribute, Push("old-style-attr")}, + {`(?=.+?[=:]([^a-z]|$))`, NameAttribute, Push("new-style-attr")}, + Default(Push("selector")), + }, + "single-comment": { + {`.+`, CommentSingle, nil}, + {`\n`, Text, Push("root")}, + }, + "multi-comment": { + {`.+`, CommentMultiline, nil}, + {`\n`, Text, Push("root")}, + }, + "import": { + {`[ \t]+`, Text, nil}, + {`\S+`, LiteralString, nil}, + {`\n`, Text, Push("root")}, + }, + "old-style-attr": { + {`[^\s:="\[]+`, NameAttribute, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[ \t]*=`, Operator, Push("value")}, + Default(Push("value")), + }, + "new-style-attr": { + {`[^\s:="\[]+`, NameAttribute, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[ \t]*[=:]`, Operator, Push("value")}, + }, + "inline-comment": { + {`(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+`, CommentMultiline, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\*/`, Comment, Pop(1)}, + }, + "value": { + {`[ \t]+`, Text, nil}, + {`[!$][\w-]+`, NameVariable, nil}, + {`url\(`, LiteralStringOther, Push("string-url")}, + {`[a-z_-][\w-]*(?=\()`, NameFunction, nil}, + {Words(``, `\b`, `align-content`, `align-items`, `align-self`, `alignment-baseline`, `all`, `animation`, `animation-delay`, `animation-direction`, `animation-duration`, `animation-fill-mode`, `animation-iteration-count`, `animation-name`, `animation-play-state`, `animation-timing-function`, `appearance`, `azimuth`, `backface-visibility`, `background`, `background-attachment`, `background-blend-mode`, `background-clip`, `background-color`, `background-image`, `background-origin`, `background-position`, `background-repeat`, `background-size`, `baseline-shift`, `bookmark-label`, `bookmark-level`, `bookmark-state`, `border`, `border-bottom`, `border-bottom-color`, `border-bottom-left-radius`, `border-bottom-right-radius`, `border-bottom-style`, `border-bottom-width`, `border-boundary`, `border-collapse`, `border-color`, `border-image`, `border-image-outset`, `border-image-repeat`, `border-image-slice`, `border-image-source`, `border-image-width`, `border-left`, `border-left-color`, `border-left-style`, `border-left-width`, `border-radius`, `border-right`, `border-right-color`, `border-right-style`, `border-right-width`, `border-spacing`, `border-style`, `border-top`, `border-top-color`, `border-top-left-radius`, `border-top-right-radius`, `border-top-style`, `border-top-width`, `border-width`, `bottom`, `box-decoration-break`, `box-shadow`, `box-sizing`, `box-snap`, `box-suppress`, `break-after`, `break-before`, `break-inside`, `caption-side`, `caret`, `caret-animation`, `caret-color`, `caret-shape`, `chains`, `clear`, `clip`, `clip-path`, `clip-rule`, `color`, `color-interpolation-filters`, `column-count`, `column-fill`, `column-gap`, `column-rule`, `column-rule-color`, `column-rule-style`, `column-rule-width`, `column-span`, `column-width`, `columns`, `content`, `counter-increment`, `counter-reset`, `counter-set`, `crop`, `cue`, `cue-after`, `cue-before`, `cursor`, `direction`, `display`, `dominant-baseline`, `elevation`, `empty-cells`, `filter`, `flex`, `flex-basis`, `flex-direction`, `flex-flow`, `flex-grow`, `flex-shrink`, `flex-wrap`, `float`, `float-defer`, `float-offset`, `float-reference`, `flood-color`, `flood-opacity`, `flow`, `flow-from`, `flow-into`, `font`, `font-family`, `font-feature-settings`, `font-kerning`, `font-language-override`, `font-size`, `font-size-adjust`, `font-stretch`, `font-style`, `font-synthesis`, `font-variant`, `font-variant-alternates`, `font-variant-caps`, `font-variant-east-asian`, `font-variant-ligatures`, `font-variant-numeric`, `font-variant-position`, `font-weight`, `footnote-display`, `footnote-policy`, `glyph-orientation-vertical`, `grid`, `grid-area`, `grid-auto-columns`, `grid-auto-flow`, `grid-auto-rows`, `grid-column`, `grid-column-end`, `grid-column-gap`, `grid-column-start`, `grid-gap`, `grid-row`, `grid-row-end`, `grid-row-gap`, `grid-row-start`, `grid-template`, `grid-template-areas`, `grid-template-columns`, `grid-template-rows`, `hanging-punctuation`, `height`, `hyphenate-character`, `hyphenate-limit-chars`, `hyphenate-limit-last`, `hyphenate-limit-lines`, `hyphenate-limit-zone`, `hyphens`, `image-orientation`, `image-resolution`, `initial-letter`, `initial-letter-align`, `initial-letter-wrap`, `isolation`, `justify-content`, `justify-items`, `justify-self`, `left`, `letter-spacing`, `lighting-color`, `line-break`, `line-grid`, `line-height`, `line-snap`, `list-style`, `list-style-image`, `list-style-position`, `list-style-type`, `margin`, `margin-bottom`, `margin-left`, `margin-right`, `margin-top`, `marker-side`, `marquee-direction`, `marquee-loop`, `marquee-speed`, `marquee-style`, `mask`, `mask-border`, `mask-border-mode`, `mask-border-outset`, `mask-border-repeat`, `mask-border-slice`, `mask-border-source`, `mask-border-width`, `mask-clip`, `mask-composite`, `mask-image`, `mask-mode`, `mask-origin`, `mask-position`, `mask-repeat`, `mask-size`, `mask-type`, `max-height`, `max-lines`, `max-width`, `min-height`, `min-width`, `mix-blend-mode`, `motion`, `motion-offset`, `motion-path`, `motion-rotation`, `move-to`, `nav-down`, `nav-left`, `nav-right`, `nav-up`, `object-fit`, `object-position`, `offset-after`, `offset-before`, `offset-end`, `offset-start`, `opacity`, `order`, `orphans`, `outline`, `outline-color`, `outline-offset`, `outline-style`, `outline-width`, `overflow`, `overflow-style`, `overflow-wrap`, `overflow-x`, `overflow-y`, `padding`, `padding-bottom`, `padding-left`, `padding-right`, `padding-top`, `page`, `page-break-after`, `page-break-before`, `page-break-inside`, `page-policy`, `pause`, `pause-after`, `pause-before`, `perspective`, `perspective-origin`, `pitch`, `pitch-range`, `play-during`, `polar-angle`, `polar-distance`, `position`, `presentation-level`, `quotes`, `region-fragment`, `resize`, `rest`, `rest-after`, `rest-before`, `richness`, `right`, `rotation`, `rotation-point`, `ruby-align`, `ruby-merge`, `ruby-position`, `running`, `scroll-snap-coordinate`, `scroll-snap-destination`, `scroll-snap-points-x`, `scroll-snap-points-y`, `scroll-snap-type`, `shape-image-threshold`, `shape-inside`, `shape-margin`, `shape-outside`, `size`, `speak`, `speak-as`, `speak-header`, `speak-numeral`, `speak-punctuation`, `speech-rate`, `stress`, `string-set`, `tab-size`, `table-layout`, `text-align`, `text-align-last`, `text-combine-upright`, `text-decoration`, `text-decoration-color`, `text-decoration-line`, `text-decoration-skip`, `text-decoration-style`, `text-emphasis`, `text-emphasis-color`, `text-emphasis-position`, `text-emphasis-style`, `text-indent`, `text-justify`, `text-orientation`, `text-overflow`, `text-shadow`, `text-space-collapse`, `text-space-trim`, `text-spacing`, `text-transform`, `text-underline-position`, `text-wrap`, `top`, `transform`, `transform-origin`, `transform-style`, `transition`, `transition-delay`, `transition-duration`, `transition-property`, `transition-timing-function`, `unicode-bidi`, `user-select`, `vertical-align`, `visibility`, `voice-balance`, `voice-duration`, `voice-family`, `voice-pitch`, `voice-range`, `voice-rate`, `voice-stress`, `voice-volume`, `volume`, `white-space`, `widows`, `width`, `will-change`, `word-break`, `word-spacing`, `word-wrap`, `wrap-after`, `wrap-before`, `wrap-flow`, `wrap-inside`, `wrap-through`, `writing-mode`, `z-index`, `above`, `absolute`, `always`, `armenian`, `aural`, `auto`, `avoid`, `baseline`, `behind`, `below`, `bidi-override`, `blink`, `block`, `bold`, `bolder`, `both`, `capitalize`, `center-left`, `center-right`, `center`, `circle`, `cjk-ideographic`, `close-quote`, `collapse`, `condensed`, `continuous`, `crop`, `crosshair`, `cross`, `cursive`, `dashed`, `decimal-leading-zero`, `decimal`, `default`, `digits`, `disc`, `dotted`, `double`, `e-resize`, `embed`, `extra-condensed`, `extra-expanded`, `expanded`, `fantasy`, `far-left`, `far-right`, `faster`, `fast`, `fixed`, `georgian`, `groove`, `hebrew`, `help`, `hidden`, `hide`, `higher`, `high`, `hiragana-iroha`, `hiragana`, `icon`, `inherit`, `inline-table`, `inline`, `inset`, `inside`, `invert`, `italic`, `justify`, `katakana-iroha`, `katakana`, `landscape`, `larger`, `large`, `left-side`, `leftwards`, `level`, `lighter`, `line-through`, `list-item`, `loud`, `lower-alpha`, `lower-greek`, `lower-roman`, `lowercase`, `ltr`, `lower`, `low`, `medium`, `message-box`, `middle`, `mix`, `monospace`, `n-resize`, `narrower`, `ne-resize`, `no-close-quote`, `no-open-quote`, `no-repeat`, `none`, `normal`, `nowrap`, `nw-resize`, `oblique`, `once`, `open-quote`, `outset`, `outside`, `overline`, `pointer`, `portrait`, `px`, `relative`, `repeat-x`, `repeat-y`, `repeat`, `rgb`, `ridge`, `right-side`, `rightwards`, `s-resize`, `sans-serif`, `scroll`, `se-resize`, `semi-condensed`, `semi-expanded`, `separate`, `serif`, `show`, `silent`, `slow`, `slower`, `small-caps`, `small-caption`, `smaller`, `soft`, `solid`, `spell-out`, `square`, `static`, `status-bar`, `super`, `sw-resize`, `table-caption`, `table-cell`, `table-column`, `table-column-group`, `table-footer-group`, `table-header-group`, `table-row`, `table-row-group`, `text`, `text-bottom`, `text-top`, `thick`, `thin`, `transparent`, `ultra-condensed`, `ultra-expanded`, `underline`, `upper-alpha`, `upper-latin`, `upper-roman`, `uppercase`, `url`, `visible`, `w-resize`, `wait`, `wider`, `x-fast`, `x-high`, `x-large`, `x-loud`, `x-low`, `x-small`, `x-soft`, `xx-large`, `xx-small`, `yes`), NameConstant, nil}, + {Words(``, `\b`, `aliceblue`, `antiquewhite`, `aqua`, `aquamarine`, `azure`, `beige`, `bisque`, `black`, `blanchedalmond`, `blue`, `blueviolet`, `brown`, `burlywood`, `cadetblue`, `chartreuse`, `chocolate`, `coral`, `cornflowerblue`, `cornsilk`, `crimson`, `cyan`, `darkblue`, `darkcyan`, `darkgoldenrod`, `darkgray`, `darkgreen`, `darkgrey`, `darkkhaki`, `darkmagenta`, `darkolivegreen`, `darkorange`, `darkorchid`, `darkred`, `darksalmon`, `darkseagreen`, `darkslateblue`, `darkslategray`, `darkslategrey`, `darkturquoise`, `darkviolet`, `deeppink`, `deepskyblue`, `dimgray`, `dimgrey`, `dodgerblue`, `firebrick`, `floralwhite`, `forestgreen`, `fuchsia`, `gainsboro`, `ghostwhite`, `gold`, `goldenrod`, `gray`, `green`, `greenyellow`, `grey`, `honeydew`, `hotpink`, `indianred`, `indigo`, `ivory`, `khaki`, `lavender`, `lavenderblush`, `lawngreen`, `lemonchiffon`, `lightblue`, `lightcoral`, `lightcyan`, `lightgoldenrodyellow`, `lightgray`, `lightgreen`, `lightgrey`, `lightpink`, `lightsalmon`, `lightseagreen`, `lightskyblue`, `lightslategray`, `lightslategrey`, `lightsteelblue`, `lightyellow`, `lime`, `limegreen`, `linen`, `magenta`, `maroon`, `mediumaquamarine`, `mediumblue`, `mediumorchid`, `mediumpurple`, `mediumseagreen`, `mediumslateblue`, `mediumspringgreen`, `mediumturquoise`, `mediumvioletred`, `midnightblue`, `mintcream`, `mistyrose`, `moccasin`, `navajowhite`, `navy`, `oldlace`, `olive`, `olivedrab`, `orange`, `orangered`, `orchid`, `palegoldenrod`, `palegreen`, `paleturquoise`, `palevioletred`, `papayawhip`, `peachpuff`, `peru`, `pink`, `plum`, `powderblue`, `purple`, `rebeccapurple`, `red`, `rosybrown`, `royalblue`, `saddlebrown`, `salmon`, `sandybrown`, `seagreen`, `seashell`, `sienna`, `silver`, `skyblue`, `slateblue`, `slategray`, `slategrey`, `snow`, `springgreen`, `steelblue`, `tan`, `teal`, `thistle`, `tomato`, `turquoise`, `violet`, `wheat`, `white`, `whitesmoke`, `yellow`, `yellowgreen`, `transparent`), NameEntity, nil}, + {Words(``, `\b`, `black`, `silver`, `gray`, `white`, `maroon`, `red`, `purple`, `fuchsia`, `green`, `lime`, `olive`, `yellow`, `navy`, `blue`, `teal`, `aqua`), NameBuiltin, nil}, + {`\!(important|default)`, NameException, nil}, + {`(true|false)`, NamePseudo, nil}, + {`(and|or|not)`, OperatorWord, nil}, + {`/\*`, CommentMultiline, Push("inline-comment")}, + {`//[^\n]*`, CommentSingle, nil}, + {`\#[a-z0-9]{1,6}`, LiteralNumberHex, nil}, + {`(-?\d+)(\%|[a-z]+)?`, ByGroups(LiteralNumberInteger, KeywordType), nil}, + {`(-?\d*\.\d+)(\%|[a-z]+)?`, ByGroups(LiteralNumberFloat, KeywordType), nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`[~^*!&%<>|+=@:,./?-]+`, Operator, nil}, + {`[\[\]()]+`, Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`[a-z_-][\w-]*`, Name, nil}, + {`\n`, Text, Push("root")}, + }, + "interpolation": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("value"), + }, + "selector": { + {`[ \t]+`, Text, nil}, + {`\:`, NameDecorator, Push("pseudo-class")}, + {`\.`, NameClass, Push("class")}, + {`\#`, NameNamespace, Push("id")}, + {`[\w-]+`, NameTag, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`&`, Keyword, nil}, + {`[~^*!&\[\]()<>|+=@:;,./?-]`, Operator, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`\n`, Text, Push("root")}, + }, + "string-double": { + {`(\\.|#(?=[^\n{])|[^\n"#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-single": { + {`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`'`, LiteralStringDouble, Pop(1)}, + }, + "string-url": { + {`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\)`, LiteralStringOther, Pop(1)}, + }, + "pseudo-class": { + {`[\w-]+`, NameDecorator, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "class": { + {`[\w-]+`, NameClass, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "id": { + {`[\w-]+`, NameNamespace, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "for": { + {`(from|to|through)`, OperatorWord, nil}, + Include("value"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scala.go b/vendor/github.com/alecthomas/chroma/lexers/s/scala.go new file mode 100644 index 00000000..fbefc560 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scala.go @@ -0,0 +1,113 @@ +package s + +import ( + "fmt" + + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var ( + scalaOp = "[-~\\^\\*!%&\\\\<>\\|+=:/?@\xa6-\xa7\xa9\xac\xae\xb0-\xb1\xb6\xd7\xf7\u03f6\u0482\u0606-\u0608\u060e-\u060f\u06e9\u06fd-\u06fe\u07f6\u09fa\u0b70\u0bf3-\u0bf8\u0bfa\u0c7f\u0cf1-\u0cf2\u0d79\u0f01-\u0f03\u0f13-\u0f17\u0f1a-\u0f1f\u0f34\u0f36\u0f38\u0fbe-\u0fc5\u0fc7-\u0fcf\u109e-\u109f\u1360\u1390-\u1399\u1940\u19e0-\u19ff\u1b61-\u1b6a\u1b74-\u1b7c\u2044\u2052\u207a-\u207c\u208a-\u208c\u2100-\u2101\u2103-\u2106\u2108-\u2109\u2114\u2116-\u2118\u211e-\u2123\u2125\u2127\u2129\u212e\u213a-\u213b\u2140-\u2144\u214a-\u214d\u214f\u2190-\u2328\u232b-\u244a\u249c-\u24e9\u2500-\u2767\u2794-\u27c4\u27c7-\u27e5\u27f0-\u2982\u2999-\u29d7\u29dc-\u29fb\u29fe-\u2b54\u2ce5-\u2cea\u2e80-\u2ffb\u3004\u3012-\u3013\u3020\u3036-\u3037\u303e-\u303f\u3190-\u3191\u3196-\u319f\u31c0-\u31e3\u3200-\u321e\u322a-\u3250\u3260-\u327f\u328a-\u32b0\u32c0-\u33ff\u4dc0-\u4dff\ua490-\ua4c6\ua828-\ua82b\ufb29\ufdfd\ufe62\ufe64-\ufe66\uff0b\uff1c-\uff1e\uff5c\uff5e\uffe2\uffe4\uffe8-\uffee\ufffc-\ufffd]+" + scalaUpper = "[A-Z\\$_\xc0-\xd6\xd8-\xde\u0100\u0102\u0104\u0106\u0108\u010a\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e\u0130\u0132\u0134\u0136\u0139\u013b\u013d\u013f\u0141\u0143\u0145\u0147\u014a\u014c\u014e\u0150\u0152\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176\u0178-\u0179\u017b\u017d\u0181-\u0182\u0184\u0186-\u0187\u0189-\u018b\u018e-\u0191\u0193-\u0194\u0196-\u0198\u019c-\u019d\u019f-\u01a0\u01a2\u01a4\u01a6-\u01a7\u01a9\u01ac\u01ae-\u01af\u01b1-\u01b3\u01b5\u01b7-\u01b8\u01bc\u01c4\u01c7\u01ca\u01cd\u01cf\u01d1\u01d3\u01d5\u01d7\u01d9\u01db\u01de\u01e0\u01e2\u01e4\u01e6\u01e8\u01ea\u01ec\u01ee\u01f1\u01f4\u01f6-\u01f8\u01fa\u01fc\u01fe\u0200\u0202\u0204\u0206\u0208\u020a\u020c\u020e\u0210\u0212\u0214\u0216\u0218\u021a\u021c\u021e\u0220\u0222\u0224\u0226\u0228\u022a\u022c\u022e\u0230\u0232\u023a-\u023b\u023d-\u023e\u0241\u0243-\u0246\u0248\u024a\u024c\u024e\u0370\u0372\u0376\u0386\u0388-\u038f\u0391-\u03ab\u03cf\u03d2-\u03d4\u03d8\u03da\u03dc\u03de\u03e0\u03e2\u03e4\u03e6\u03e8\u03ea\u03ec\u03ee\u03f4\u03f7\u03f9-\u03fa\u03fd-\u042f\u0460\u0462\u0464\u0466\u0468\u046a\u046c\u046e\u0470\u0472\u0474\u0476\u0478\u047a\u047c\u047e\u0480\u048a\u048c\u048e\u0490\u0492\u0494\u0496\u0498\u049a\u049c\u049e\u04a0\u04a2\u04a4\u04a6\u04a8\u04aa\u04ac\u04ae\u04b0\u04b2\u04b4\u04b6\u04b8\u04ba\u04bc\u04be\u04c0-\u04c1\u04c3\u04c5\u04c7\u04c9\u04cb\u04cd\u04d0\u04d2\u04d4\u04d6\u04d8\u04da\u04dc\u04de\u04e0\u04e2\u04e4\u04e6\u04e8\u04ea\u04ec\u04ee\u04f0\u04f2\u04f4\u04f6\u04f8\u04fa\u04fc\u04fe\u0500\u0502\u0504\u0506\u0508\u050a\u050c\u050e\u0510\u0512\u0514\u0516\u0518\u051a\u051c\u051e\u0520\u0522\u0531-\u0556\u10a0-\u10c5\u1e00\u1e02\u1e04\u1e06\u1e08\u1e0a\u1e0c\u1e0e\u1e10\u1e12\u1e14\u1e16\u1e18\u1e1a\u1e1c\u1e1e\u1e20\u1e22\u1e24\u1e26\u1e28\u1e2a\u1e2c\u1e2e\u1e30\u1e32\u1e34\u1e36\u1e38\u1e3a\u1e3c\u1e3e\u1e40\u1e42\u1e44\u1e46\u1e48\u1e4a\u1e4c\u1e4e\u1e50\u1e52\u1e54\u1e56\u1e58\u1e5a\u1e5c\u1e5e\u1e60\u1e62\u1e64\u1e66\u1e68\u1e6a\u1e6c\u1e6e\u1e70\u1e72\u1e74\u1e76\u1e78\u1e7a\u1e7c\u1e7e\u1e80\u1e82\u1e84\u1e86\u1e88\u1e8a\u1e8c\u1e8e\u1e90\u1e92\u1e94\u1e9e\u1ea0\u1ea2\u1ea4\u1ea6\u1ea8\u1eaa\u1eac\u1eae\u1eb0\u1eb2\u1eb4\u1eb6\u1eb8\u1eba\u1ebc\u1ebe\u1ec0\u1ec2\u1ec4\u1ec6\u1ec8\u1eca\u1ecc\u1ece\u1ed0\u1ed2\u1ed4\u1ed6\u1ed8\u1eda\u1edc\u1ede\u1ee0\u1ee2\u1ee4\u1ee6\u1ee8\u1eea\u1eec\u1eee\u1ef0\u1ef2\u1ef4\u1ef6\u1ef8\u1efa\u1efc\u1efe\u1f08-\u1f0f\u1f18-\u1f1d\u1f28-\u1f2f\u1f38-\u1f3f\u1f48-\u1f4d\u1f59-\u1f5f\u1f68-\u1f6f\u1fb8-\u1fbb\u1fc8-\u1fcb\u1fd8-\u1fdb\u1fe8-\u1fec\u1ff8-\u1ffb\u2102\u2107\u210b-\u210d\u2110-\u2112\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u2130-\u2133\u213e-\u213f\u2145\u2183\u2c00-\u2c2e\u2c60\u2c62-\u2c64\u2c67\u2c69\u2c6b\u2c6d-\u2c6f\u2c72\u2c75\u2c80\u2c82\u2c84\u2c86\u2c88\u2c8a\u2c8c\u2c8e\u2c90\u2c92\u2c94\u2c96\u2c98\u2c9a\u2c9c\u2c9e\u2ca0\u2ca2\u2ca4\u2ca6\u2ca8\u2caa\u2cac\u2cae\u2cb0\u2cb2\u2cb4\u2cb6\u2cb8\u2cba\u2cbc\u2cbe\u2cc0\u2cc2\u2cc4\u2cc6\u2cc8\u2cca\u2ccc\u2cce\u2cd0\u2cd2\u2cd4\u2cd6\u2cd8\u2cda\u2cdc\u2cde\u2ce0\u2ce2\ua640\ua642\ua644\ua646\ua648\ua64a\ua64c\ua64e\ua650\ua652\ua654\ua656\ua658\ua65a\ua65c\ua65e\ua662\ua664\ua666\ua668\ua66a\ua66c\ua680\ua682\ua684\ua686\ua688\ua68a\ua68c\ua68e\ua690\ua692\ua694\ua696\ua722\ua724\ua726\ua728\ua72a\ua72c\ua72e\ua732\ua734\ua736\ua738\ua73a\ua73c\ua73e\ua740\ua742\ua744\ua746\ua748\ua74a\ua74c\ua74e\ua750\ua752\ua754\ua756\ua758\ua75a\ua75c\ua75e\ua760\ua762\ua764\ua766\ua768\ua76a\ua76c\ua76e\ua779\ua77b\ua77d-\ua77e\ua780\ua782\ua784\ua786\ua78b\uff21-\uff3a]" + scalaLetter = `[a-zA-Z\\$_ªµºÀ-ÖØ-öø-ʯͰ-ͳͶ-ͷͻ-ͽΆΈ-ϵϷ-ҁҊ-Ֆա-ևא-ײء-ؿف-يٮ-ٯٱ-ۓەۮ-ۯۺ-ۼۿܐܒ-ܯݍ-ޥޱߊ-ߪऄ-हऽॐक़-ॡॲ-ॿঅ-হঽৎড়-ৡৰ-ৱਅ-ਹਖ਼-ਫ਼ੲ-ੴઅ-હઽૐ-ૡଅ-ହଽଡ଼-ୡୱஃ-ஹௐఅ-ఽౘ-ౡಅ-ಹಽೞ-ೡഅ-ഽൠ-ൡൺ-ൿඅ-ෆก-ะา-ำเ-ๅກ-ະາ-ຳຽ-ໄໜ-ༀཀ-ཬྈ-ྋက-ဪဿၐ-ၕၚ-ၝၡၥ-ၦၮ-ၰၵ-ႁႎႠ-ჺᄀ-ፚᎀ-ᎏᎠ-ᙬᙯ-ᙶᚁ-ᚚᚠ-ᛪᛮ-ᜑᜠ-ᜱᝀ-ᝑᝠ-ᝰក-ឳៜᠠ-ᡂᡄ-ᢨᢪ-ᤜᥐ-ᦩᧁ-ᧇᨀ-ᨖᬅ-ᬳᭅ-ᭋᮃ-ᮠᮮ-ᮯᰀ-ᰣᱍ-ᱏᱚ-ᱷᴀ-ᴫᵢ-ᵷᵹ-ᶚḀ-ᾼιῂ-ῌῐ-Ίῠ-Ῥῲ-ῼⁱⁿℂℇℊ-ℓℕℙ-ℝℤΩℨK-ℭℯ-ℹℼ-ℿⅅ-ⅉⅎⅠ-ↈⰀ-ⱼⲀ-ⳤⴀ-ⵥⶀ-ⷞ〆-〇〡-〩〸-〺〼ぁ-ゖゟァ-ヺヿ-ㆎㆠ-ㆷㇰ-ㇿ㐀-䶵一-ꀔꀖ-ꒌꔀ-ꘋꘐ-ꘟꘪ-ꙮꚀ-ꚗꜢ-ꝯꝱ-ꞇꞋ-ꠁꠃ-ꠅꠇ-ꠊꠌ-ꠢꡀ-ꡳꢂ-ꢳꤊ-ꤥꤰ-ꥆꨀ-ꨨꩀ-ꩂꩄ-ꩋ가-힣豈-יִײַ-ﬨשׁ-ﴽﵐ-ﷻﹰ-ﻼA-Za-zヲ-ッア-ンᅠ-ᅵ]` + scalaIDRest = fmt.Sprintf(`%s(?:%s|[0-9])*(?:(?<=_)%s)?`, scalaLetter, scalaLetter, scalaOp) + scalaLetterLetterDigit = fmt.Sprintf(`%s(?:%s|\d)*`, scalaLetter, scalaLetter) +) + +// Scala lexer. +var Scala = internal.Register(MustNewLexer( + &Config{ + Name: "Scala", + Aliases: []string{"scala"}, + Filenames: []string{"*.scala"}, + MimeTypes: []string{"text/x-scala"}, + DotAll: true, + }, + Rules{ + "root": { + {`(class|trait|object)(\s+)`, ByGroups(Keyword, Text), Push("class")}, + {`[^\S\n]+`, Text, nil}, + {`//.*?\n`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("comment")}, + {`@` + scalaIDRest, NameDecorator, nil}, + {`(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|lazy|match|new|override|pr(?:ivate|otected)|re(?:quires|turn)|s(?:ealed|uper)|t(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|(<[%:-]|=>|>:|[#=@_⇒←])(\b|(?=\s)|$)`, Keyword, nil}, + {`:(?!` + scalaOp + `%s)`, Keyword, Push("type")}, + {fmt.Sprintf("%s%s\\b", scalaUpper, scalaIDRest), NameClass, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`(import|package)(\s+)`, ByGroups(Keyword, Text), Push("import")}, + {`(type)(\s+)`, ByGroups(Keyword, Text), Push("type")}, + {`""".*?"""(?!")`, LiteralString, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'`, LiteralStringChar, nil}, + {"'" + scalaIDRest, TextSymbol, nil}, + {`[fs]"""`, LiteralString, Push("interptriplestring")}, + {`[fs]"`, LiteralString, Push("interpstring")}, + {`raw"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {scalaIDRest, Name, nil}, + {"`[^`]+`", Name, nil}, + {`\[`, Operator, Push("typeparam")}, + {`[(){};,.#]`, Operator, nil}, + {scalaOp, Operator, nil}, + {`([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?`, LiteralNumberFloat, nil}, + {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+L?`, LiteralNumberInteger, nil}, + {`\n`, Text, nil}, + }, + "class": { + {fmt.Sprintf("(%s|%s|`[^`]+`)(\\s*)(\\[)", scalaIDRest, scalaOp), ByGroups(NameClass, Text, Operator), Push("typeparam")}, + {`\s+`, Text, nil}, + {`\{`, Operator, Pop(1)}, + {`\(`, Operator, Pop(1)}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {fmt.Sprintf("%s|%s|`[^`]+`", scalaIDRest, scalaOp), NameClass, Pop(1)}, + }, + "type": { + {`\s+`, Text, nil}, + {`<[%:]|>:|[#_]|forSome|type`, Keyword, nil}, + {`([,);}]|=>|=|⇒)(\s*)`, ByGroups(Operator, Text), Pop(1)}, + {`[({]`, Operator, Push()}, + {fmt.Sprintf("((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)(\\[)", scalaIDRest, scalaOp, scalaIDRest, scalaOp), ByGroups(KeywordType, Text, Operator), Push("#pop", "typeparam")}, + {fmt.Sprintf("((?:%s|%s|`[^`]+`)(?:\\.(?:%s|%s|`[^`]+`))*)(\\s*)$", scalaIDRest, scalaOp, scalaIDRest, scalaOp), ByGroups(KeywordType, Text), Pop(1)}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {fmt.Sprintf("\\.|%s|%s|`[^`]+`", scalaIDRest, scalaOp), KeywordType, nil}, + }, + "typeparam": { + {`[\s,]+`, Text, nil}, + {`<[%:]|=>|>:|[#_⇒]|forSome|type`, Keyword, nil}, + {`([\])}])`, Operator, Pop(1)}, + {`[(\[{]`, Operator, Push()}, + {fmt.Sprintf("\\.|%s|%s|`[^`]+`", scalaIDRest, scalaOp), KeywordType, nil}, + }, + "comment": { + {`[^/*]+`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "import": { + {fmt.Sprintf("(%s|\\.)+", scalaIDRest), NameNamespace, Pop(1)}, + }, + "interpstringcommon": { + {`[^"$\\]+`, LiteralString, nil}, + {`\$\$`, LiteralString, nil}, + {`\$` + scalaLetter + `(?:` + scalaLetter + `|\d)*`, LiteralStringInterpol, nil}, + {`\$\{`, LiteralStringInterpol, Push("interpbrace")}, + {`\\.`, LiteralString, nil}, + }, + "interptriplestring": { + {`"""(?!")`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + Include("interpstringcommon"), + }, + "interpstring": { + {`"`, LiteralString, Pop(1)}, + Include("interpstringcommon"), + }, + "interpbrace": { + {`\}`, LiteralStringInterpol, Pop(1)}, + {`\{`, LiteralStringInterpol, Push()}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go b/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go new file mode 100644 index 00000000..b721d68d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scheme.go @@ -0,0 +1,53 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// nolint + +// Scheme lexer. +var SchemeLang = internal.Register(MustNewLexer( + &Config{ + Name: "Scheme", + Aliases: []string{"scheme", "scm"}, + Filenames: []string{"*.scm", "*.ss"}, + MimeTypes: []string{"text/x-scheme", "application/x-scheme"}, + }, + Rules{ + "root": { + {`;.*$`, CommentSingle, nil}, + {`#\|`, CommentMultiline, Push("multiline-comment")}, + {`#;\s*\(`, Comment, Push("commented-form")}, + {`#!r6rs`, Comment, nil}, + {`\s+`, Text, nil}, + {`-?\d+\.\d+`, LiteralNumberFloat, nil}, + {`-?\d+`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralString, nil}, + {`'[\w!$%&*+,/:<=>?@^~|-]+`, LiteralStringSymbol, nil}, + {`#\\([()/'\"._!§$%& ?=+-]|[a-zA-Z0-9]+)`, LiteralStringChar, nil}, + {`(#t|#f)`, NameConstant, nil}, + {"('|#|`|,@|,|\\.)", Operator, nil}, + {`(lambda |define |if |else |cond |and |or |case |let |let\* |letrec |begin |do |delay |set\! |\=\> |quote |quasiquote |unquote |unquote\-splicing |define\-syntax |let\-syntax |letrec\-syntax |syntax\-rules )`, Keyword, nil}, + {`(?<='\()[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(?<=#\()[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(?<=\()(\* |\+ |\- |\/ |\< |\<\= |\= |\> |\>\= |abs |acos |angle |append |apply |asin |assoc |assq |assv |atan |boolean\? |caaaar |caaadr |caaar |caadar |caaddr |caadr |caar |cadaar |cadadr |cadar |caddar |cadddr |caddr |cadr |call\-with\-current\-continuation |call\-with\-input\-file |call\-with\-output\-file |call\-with\-values |call\/cc |car |cdaaar |cdaadr |cdaar |cdadar |cdaddr |cdadr |cdar |cddaar |cddadr |cddar |cdddar |cddddr |cdddr |cddr |cdr |ceiling |char\-\>integer |char\-alphabetic\? |char\-ci\<\=\? |char\-ci\<\? |char\-ci\=\? |char\-ci\>\=\? |char\-ci\>\? |char\-downcase |char\-lower\-case\? |char\-numeric\? |char\-ready\? |char\-upcase |char\-upper\-case\? |char\-whitespace\? |char\<\=\? |char\<\? |char\=\? |char\>\=\? |char\>\? |char\? |close\-input\-port |close\-output\-port |complex\? |cons |cos |current\-input\-port |current\-output\-port |denominator |display |dynamic\-wind |eof\-object\? |eq\? |equal\? |eqv\? |eval |even\? |exact\-\>inexact |exact\? |exp |expt |floor |for\-each |force |gcd |imag\-part |inexact\-\>exact |inexact\? |input\-port\? |integer\-\>char |integer\? |interaction\-environment |lcm |length |list |list\-\>string |list\-\>vector |list\-ref |list\-tail |list\? |load |log |magnitude |make\-polar |make\-rectangular |make\-string |make\-vector |map |max |member |memq |memv |min |modulo |negative\? |newline |not |null\-environment |null\? |number\-\>string |number\? |numerator |odd\? |open\-input\-file |open\-output\-file |output\-port\? |pair\? |peek\-char |port\? |positive\? |procedure\? |quotient |rational\? |rationalize |read |read\-char |real\-part |real\? |remainder |reverse |round |scheme\-report\-environment |set\-car\! |set\-cdr\! |sin |sqrt |string |string\-\>list |string\-\>number |string\-\>symbol |string\-append |string\-ci\<\=\? |string\-ci\<\? |string\-ci\=\? |string\-ci\>\=\? |string\-ci\>\? |string\-copy |string\-fill\! |string\-length |string\-ref |string\-set\! |string\<\=\? |string\<\? |string\=\? |string\>\=\? |string\>\? |string\? |substring |symbol\-\>string |symbol\? |tan |transcript\-off |transcript\-on |truncate |values |vector |vector\-\>list |vector\-fill\! |vector\-length |vector\-ref |vector\-set\! |vector\? |with\-input\-from\-file |with\-output\-to\-file |write |write\-char |zero\? )`, NameBuiltin, nil}, + {`(?<=\()[\w!$%&*+,/:<=>?@^~|-]+`, NameFunction, nil}, + {`[\w!$%&*+,/:<=>?@^~|-]+`, NameVariable, nil}, + {`(\(|\))`, Punctuation, nil}, + {`(\[|\])`, Punctuation, nil}, + }, + "multiline-comment": { + {`#\|`, CommentMultiline, Push()}, + {`\|#`, CommentMultiline, Pop(1)}, + {`[^|#]+`, CommentMultiline, nil}, + {`[|#]`, CommentMultiline, nil}, + }, + "commented-form": { + {`\(`, Comment, Push()}, + {`\)`, Comment, Pop(1)}, + {`[^()]+`, Comment, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go b/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go new file mode 100644 index 00000000..87fa66cc --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/scilab.go @@ -0,0 +1,44 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Scilab lexer. +var Scilab = internal.Register(MustNewLexer( + &Config{ + Name: "Scilab", + Aliases: []string{"scilab"}, + Filenames: []string{"*.sci", "*.sce", "*.tst"}, + MimeTypes: []string{"text/scilab"}, + }, + Rules{ + "root": { + {`//.*?$`, CommentSingle, nil}, + {`^\s*function`, Keyword, Push("deffunc")}, + {Words(``, `\b`, `__FILE__`, `__LINE__`, `break`, `case`, `catch`, `classdef`, `continue`, `do`, `else`, `elseif`, `end`, `end_try_catch`, `end_unwind_protect`, `endclassdef`, `endevents`, `endfor`, `endfunction`, `endif`, `endmethods`, `endproperties`, `endswitch`, `endwhile`, `events`, `for`, `function`, `get`, `global`, `if`, `methods`, `otherwise`, `persistent`, `properties`, `return`, `set`, `static`, `switch`, `try`, `until`, `unwind_protect`, `unwind_protect_cleanup`, `while`), Keyword, nil}, + {Words(``, `\b`, `!!_invoke_`, `%H5Object_e`, `%H5Object_fieldnames`, `%H5Object_p`, `%XMLAttr_6`, `%XMLAttr_e`, `%XMLAttr_i_XMLElem`, `%XMLAttr_length`, `%XMLAttr_p`, `%XMLAttr_size`, `%XMLDoc_6`, `%XMLDoc_e`, `%XMLDoc_i_XMLList`, `%XMLDoc_p`, `%XMLElem_6`, `%XMLElem_e`, `%XMLElem_i_XMLDoc`, `%XMLElem_i_XMLElem`, `%XMLElem_i_XMLList`, `%XMLElem_p`, `%XMLList_6`, `%XMLList_e`, `%XMLList_i_XMLElem`, `%XMLList_i_XMLList`, `%XMLList_length`, `%XMLList_p`, `%XMLList_size`, `%XMLNs_6`, `%XMLNs_e`, `%XMLNs_i_XMLElem`, `%XMLNs_p`, `%XMLSet_6`, `%XMLSet_e`, `%XMLSet_length`, `%XMLSet_p`, `%XMLSet_size`, `%XMLValid_p`, `%_EClass_6`, `%_EClass_e`, `%_EClass_p`, `%_EObj_0`, `%_EObj_1__EObj`, `%_EObj_1_b`, `%_EObj_1_c`, `%_EObj_1_i`, `%_EObj_1_s`, `%_EObj_2__EObj`, `%_EObj_2_b`, `%_EObj_2_c`, `%_EObj_2_i`, `%_EObj_2_s`, `%_EObj_3__EObj`, `%_EObj_3_b`, `%_EObj_3_c`, `%_EObj_3_i`, `%_EObj_3_s`, `%_EObj_4__EObj`, `%_EObj_4_b`, `%_EObj_4_c`, `%_EObj_4_i`, `%_EObj_4_s`, `%_EObj_5`, `%_EObj_6`, `%_EObj_a__EObj`, `%_EObj_a_b`, `%_EObj_a_c`, `%_EObj_a_i`, `%_EObj_a_s`, `%_EObj_d__EObj`, `%_EObj_d_b`, `%_EObj_d_c`, `%_EObj_d_i`, `%_EObj_d_s`, `%_EObj_disp`, `%_EObj_e`, `%_EObj_g__EObj`, `%_EObj_g_b`, `%_EObj_g_c`, `%_EObj_g_i`, `%_EObj_g_s`, `%_EObj_h__EObj`, `%_EObj_h_b`, `%_EObj_h_c`, `%_EObj_h_i`, `%_EObj_h_s`, `%_EObj_i__EObj`, `%_EObj_j__EObj`, `%_EObj_j_b`, `%_EObj_j_c`, `%_EObj_j_i`, `%_EObj_j_s`, `%_EObj_k__EObj`, `%_EObj_k_b`, `%_EObj_k_c`, `%_EObj_k_i`, `%_EObj_k_s`, `%_EObj_l__EObj`, `%_EObj_l_b`, `%_EObj_l_c`, `%_EObj_l_i`, `%_EObj_l_s`, `%_EObj_m__EObj`, `%_EObj_m_b`, `%_EObj_m_c`, `%_EObj_m_i`, `%_EObj_m_s`, `%_EObj_n__EObj`, `%_EObj_n_b`, `%_EObj_n_c`, `%_EObj_n_i`, `%_EObj_n_s`, `%_EObj_o__EObj`, `%_EObj_o_b`, `%_EObj_o_c`, `%_EObj_o_i`, `%_EObj_o_s`, `%_EObj_p`, `%_EObj_p__EObj`, `%_EObj_p_b`, `%_EObj_p_c`, `%_EObj_p_i`, `%_EObj_p_s`, `%_EObj_q__EObj`, `%_EObj_q_b`, `%_EObj_q_c`, `%_EObj_q_i`, `%_EObj_q_s`, `%_EObj_r__EObj`, `%_EObj_r_b`, `%_EObj_r_c`, `%_EObj_r_i`, `%_EObj_r_s`, `%_EObj_s__EObj`, `%_EObj_s_b`, `%_EObj_s_c`, `%_EObj_s_i`, `%_EObj_s_s`, `%_EObj_t`, `%_EObj_x__EObj`, `%_EObj_x_b`, `%_EObj_x_c`, `%_EObj_x_i`, `%_EObj_x_s`, `%_EObj_y__EObj`, `%_EObj_y_b`, `%_EObj_y_c`, `%_EObj_y_i`, `%_EObj_y_s`, `%_EObj_z__EObj`, `%_EObj_z_b`, `%_EObj_z_c`, `%_EObj_z_i`, `%_EObj_z_s`, `%_eigs`, `%_load`, `%b_1__EObj`, `%b_2__EObj`, `%b_3__EObj`, `%b_4__EObj`, `%b_a__EObj`, `%b_d__EObj`, `%b_g__EObj`, `%b_h__EObj`, `%b_i_XMLList`, `%b_i__EObj`, `%b_j__EObj`, `%b_k__EObj`, `%b_l__EObj`, `%b_m__EObj`, `%b_n__EObj`, `%b_o__EObj`, `%b_p__EObj`, `%b_q__EObj`, `%b_r__EObj`, `%b_s__EObj`, `%b_x__EObj`, `%b_y__EObj`, `%b_z__EObj`, `%c_1__EObj`, `%c_2__EObj`, `%c_3__EObj`, `%c_4__EObj`, `%c_a__EObj`, `%c_d__EObj`, `%c_g__EObj`, `%c_h__EObj`, `%c_i_XMLAttr`, `%c_i_XMLDoc`, `%c_i_XMLElem`, `%c_i_XMLList`, `%c_i__EObj`, `%c_j__EObj`, `%c_k__EObj`, `%c_l__EObj`, `%c_m__EObj`, `%c_n__EObj`, `%c_o__EObj`, `%c_p__EObj`, `%c_q__EObj`, `%c_r__EObj`, `%c_s__EObj`, `%c_x__EObj`, `%c_y__EObj`, `%c_z__EObj`, `%ce_i_XMLList`, `%fptr_i_XMLList`, `%h_i_XMLList`, `%hm_i_XMLList`, `%i_1__EObj`, `%i_2__EObj`, `%i_3__EObj`, `%i_4__EObj`, `%i_a__EObj`, `%i_abs`, `%i_cumprod`, `%i_cumsum`, `%i_d__EObj`, `%i_diag`, `%i_g__EObj`, `%i_h__EObj`, `%i_i_XMLList`, `%i_i__EObj`, `%i_j__EObj`, `%i_k__EObj`, `%i_l__EObj`, `%i_m__EObj`, `%i_matrix`, `%i_max`, `%i_maxi`, `%i_min`, `%i_mini`, `%i_mput`, `%i_n__EObj`, `%i_o__EObj`, `%i_p`, `%i_p__EObj`, `%i_prod`, `%i_q__EObj`, `%i_r__EObj`, `%i_s__EObj`, `%i_sum`, `%i_tril`, `%i_triu`, `%i_x__EObj`, `%i_y__EObj`, `%i_z__EObj`, `%ip_i_XMLList`, `%l_i_XMLList`, `%l_i__EObj`, `%lss_i_XMLList`, `%mc_i_XMLList`, `%msp_full`, `%msp_i_XMLList`, `%msp_spget`, `%p_i_XMLList`, `%ptr_i_XMLList`, `%r_i_XMLList`, `%s_1__EObj`, `%s_2__EObj`, `%s_3__EObj`, `%s_4__EObj`, `%s_a__EObj`, `%s_d__EObj`, `%s_g__EObj`, `%s_h__EObj`, `%s_i_XMLList`, `%s_i__EObj`, `%s_j__EObj`, `%s_k__EObj`, `%s_l__EObj`, `%s_m__EObj`, `%s_n__EObj`, `%s_o__EObj`, `%s_p__EObj`, `%s_q__EObj`, `%s_r__EObj`, `%s_s__EObj`, `%s_x__EObj`, `%s_y__EObj`, `%s_z__EObj`, `%sp_i_XMLList`, `%spb_i_XMLList`, `%st_i_XMLList`, `Calendar`, `ClipBoard`, `Matplot`, `Matplot1`, `PlaySound`, `TCL_DeleteInterp`, `TCL_DoOneEvent`, `TCL_EvalFile`, `TCL_EvalStr`, `TCL_ExistArray`, `TCL_ExistInterp`, `TCL_ExistVar`, `TCL_GetVar`, `TCL_GetVersion`, `TCL_SetVar`, `TCL_UnsetVar`, `TCL_UpVar`, `_`, `_code2str`, `_d`, `_str2code`, `about`, `abs`, `acos`, `addModulePreferences`, `addcolor`, `addf`, `addhistory`, `addinter`, `addlocalizationdomain`, `amell`, `and`, `argn`, `arl2_ius`, `ascii`, `asin`, `atan`, `backslash`, `balanc`, `banner`, `base2dec`, `basename`, `bdiag`, `beep`, `besselh`, `besseli`, `besselj`, `besselk`, `bessely`, `beta`, `bezout`, `bfinit`, `blkfc1i`, `blkslvi`, `bool2s`, `browsehistory`, `browsevar`, `bsplin3val`, `buildDoc`, `buildouttb`, `bvode`, `c_link`, `call`, `callblk`, `captions`, `cd`, `cdfbet`, `cdfbin`, `cdfchi`, `cdfchn`, `cdff`, `cdffnc`, `cdfgam`, `cdfnbn`, `cdfnor`, `cdfpoi`, `cdft`, `ceil`, `champ`, `champ1`, `chdir`, `chol`, `clc`, `clean`, `clear`, `clearfun`, `clearglobal`, `closeEditor`, `closeEditvar`, `closeXcos`, `code2str`, `coeff`, `color`, `comp`, `completion`, `conj`, `contour2di`, `contr`, `conv2`, `convstr`, `copy`, `copyfile`, `corr`, `cos`, `coserror`, `createdir`, `cshep2d`, `csvDefault`, `csvIsnum`, `csvRead`, `csvStringToDouble`, `csvTextScan`, `csvWrite`, `ctree2`, `ctree3`, `ctree4`, `cumprod`, `cumsum`, `curblock`, `curblockc`, `daskr`, `dasrt`, `dassl`, `data2sig`, `datatipCreate`, `datatipManagerMode`, `datatipMove`, `datatipRemove`, `datatipSetDisplay`, `datatipSetInterp`, `datatipSetOrientation`, `datatipSetStyle`, `datatipToggle`, `dawson`, `dct`, `debug`, `dec2base`, `deff`, `definedfields`, `degree`, `delbpt`, `delete`, `deletefile`, `delip`, `delmenu`, `det`, `dgettext`, `dhinf`, `diag`, `diary`, `diffobjs`, `disp`, `dispbpt`, `displayhistory`, `disposefftwlibrary`, `dlgamma`, `dnaupd`, `dneupd`, `double`, `drawaxis`, `drawlater`, `drawnow`, `driver`, `dsaupd`, `dsearch`, `dseupd`, `dst`, `duplicate`, `editvar`, `emptystr`, `end_scicosim`, `ereduc`, `erf`, `erfc`, `erfcx`, `erfi`, `errcatch`, `errclear`, `error`, `eval_cshep2d`, `exec`, `execstr`, `exists`, `exit`, `exp`, `expm`, `exportUI`, `export_to_hdf5`, `eye`, `fadj2sp`, `fec`, `feval`, `fft`, `fftw`, `fftw_flags`, `fftw_forget_wisdom`, `fftwlibraryisloaded`, `figure`, `file`, `filebrowser`, `fileext`, `fileinfo`, `fileparts`, `filesep`, `find`, `findBD`, `findfiles`, `fire_closing_finished`, `floor`, `format`, `fort`, `fprintfMat`, `freq`, `frexp`, `fromc`, `fromjava`, `fscanfMat`, `fsolve`, `fstair`, `full`, `fullpath`, `funcprot`, `funptr`, `gamma`, `gammaln`, `geom3d`, `get`, `getURL`, `get_absolute_file_path`, `get_fftw_wisdom`, `getblocklabel`, `getcallbackobject`, `getdate`, `getdebuginfo`, `getdefaultlanguage`, `getdrives`, `getdynlibext`, `getenv`, `getfield`, `gethistory`, `gethistoryfile`, `getinstalledlookandfeels`, `getio`, `getlanguage`, `getlongpathname`, `getlookandfeel`, `getmd5`, `getmemory`, `getmodules`, `getos`, `getpid`, `getrelativefilename`, `getscicosvars`, `getscilabmode`, `getshortpathname`, `gettext`, `getvariablesonstack`, `getversion`, `glist`, `global`, `glue`, `grand`, `graphicfunction`, `grayplot`, `grep`, `gsort`, `gstacksize`, `h5attr`, `h5close`, `h5cp`, `h5dataset`, `h5dump`, `h5exists`, `h5flush`, `h5get`, `h5group`, `h5isArray`, `h5isAttr`, `h5isCompound`, `h5isFile`, `h5isGroup`, `h5isList`, `h5isRef`, `h5isSet`, `h5isSpace`, `h5isType`, `h5isVlen`, `h5label`, `h5ln`, `h5ls`, `h5mount`, `h5mv`, `h5open`, `h5read`, `h5readattr`, `h5rm`, `h5umount`, `h5write`, `h5writeattr`, `havewindow`, `helpbrowser`, `hess`, `hinf`, `historymanager`, `historysize`, `host`, `htmlDump`, `htmlRead`, `htmlReadStr`, `htmlWrite`, `iconvert`, `ieee`, `ilib_verbose`, `imag`, `impl`, `import_from_hdf5`, `imult`, `inpnvi`, `int`, `int16`, `int2d`, `int32`, `int3d`, `int8`, `interp`, `interp2d`, `interp3d`, `intg`, `intppty`, `inttype`, `inv`, `invoke_lu`, `is_handle_valid`, `is_hdf5_file`, `isalphanum`, `isascii`, `isdef`, `isdigit`, `isdir`, `isequal`, `isequalbitwise`, `iserror`, `isfile`, `isglobal`, `isletter`, `isnum`, `isreal`, `iswaitingforinput`, `jallowClassReloading`, `jarray`, `jautoTranspose`, `jautoUnwrap`, `javaclasspath`, `javalibrarypath`, `jcast`, `jcompile`, `jconvMatrixMethod`, `jcreatejar`, `jdeff`, `jdisableTrace`, `jenableTrace`, `jexists`, `jgetclassname`, `jgetfield`, `jgetfields`, `jgetinfo`, `jgetmethods`, `jimport`, `jinvoke`, `jinvoke_db`, `jnewInstance`, `jremove`, `jsetfield`, `junwrap`, `junwraprem`, `jwrap`, `jwrapinfloat`, `kron`, `lasterror`, `ldiv`, `ldivf`, `legendre`, `length`, `lib`, `librarieslist`, `libraryinfo`, `light`, `linear_interpn`, `lines`, `link`, `linmeq`, `list`, `listvar_in_hdf5`, `load`, `loadGui`, `loadScicos`, `loadXcos`, `loadfftwlibrary`, `loadhistory`, `log`, `log1p`, `lsq`, `lsq_splin`, `lsqrsolve`, `lsslist`, `lstcat`, `lstsize`, `ltitr`, `lu`, `ludel`, `lufact`, `luget`, `lusolve`, `macr2lst`, `macr2tree`, `matfile_close`, `matfile_listvar`, `matfile_open`, `matfile_varreadnext`, `matfile_varwrite`, `matrix`, `max`, `maxfiles`, `mclearerr`, `mclose`, `meof`, `merror`, `messagebox`, `mfprintf`, `mfscanf`, `mget`, `mgeti`, `mgetl`, `mgetstr`, `min`, `mlist`, `mode`, `model2blk`, `mopen`, `move`, `movefile`, `mprintf`, `mput`, `mputl`, `mputstr`, `mscanf`, `mseek`, `msprintf`, `msscanf`, `mtell`, `mtlb_mode`, `mtlb_sparse`, `mucomp`, `mulf`, `name2rgb`, `nearfloat`, `newaxes`, `newest`, `newfun`, `nnz`, `norm`, `notify`, `number_properties`, `ode`, `odedc`, `ones`, `openged`, `opentk`, `optim`, `or`, `ordmmd`, `parallel_concurrency`, `parallel_run`, `param3d`, `param3d1`, `part`, `pathconvert`, `pathsep`, `phase_simulation`, `plot2d`, `plot2d1`, `plot2d2`, `plot2d3`, `plot2d4`, `plot3d`, `plot3d1`, `plotbrowser`, `pointer_xproperty`, `poly`, `ppol`, `pppdiv`, `predef`, `preferences`, `print`, `printf`, `printfigure`, `printsetupbox`, `prod`, `progressionbar`, `prompt`, `pwd`, `qld`, `qp_solve`, `qr`, `raise_window`, `rand`, `rankqr`, `rat`, `rcond`, `rdivf`, `read`, `read4b`, `read_csv`, `readb`, `readgateway`, `readmps`, `real`, `realtime`, `realtimeinit`, `regexp`, `relocate_handle`, `remez`, `removeModulePreferences`, `removedir`, `removelinehistory`, `res_with_prec`, `resethistory`, `residu`, `resume`, `return`, `ricc`, `rlist`, `roots`, `rotate_axes`, `round`, `rpem`, `rtitr`, `rubberbox`, `save`, `saveGui`, `saveafterncommands`, `saveconsecutivecommands`, `savehistory`, `schur`, `sci_haltscicos`, `sci_tree2`, `sci_tree3`, `sci_tree4`, `sciargs`, `scicos_debug`, `scicos_debug_count`, `scicos_time`, `scicosim`, `scinotes`, `sctree`, `semidef`, `set`, `set_blockerror`, `set_fftw_wisdom`, `set_xproperty`, `setbpt`, `setdefaultlanguage`, `setenv`, `setfield`, `sethistoryfile`, `setlanguage`, `setlookandfeel`, `setmenu`, `sfact`, `sfinit`, `show_window`, `sident`, `sig2data`, `sign`, `simp`, `simp_mode`, `sin`, `size`, `slash`, `sleep`, `sorder`, `sparse`, `spchol`, `spcompack`, `spec`, `spget`, `splin`, `splin2d`, `splin3d`, `splitURL`, `spones`, `sprintf`, `sqrt`, `stacksize`, `str2code`, `strcat`, `strchr`, `strcmp`, `strcspn`, `strindex`, `string`, `stringbox`, `stripblanks`, `strncpy`, `strrchr`, `strrev`, `strsplit`, `strspn`, `strstr`, `strsubst`, `strtod`, `strtok`, `subf`, `sum`, `svd`, `swap_handles`, `symfcti`, `syredi`, `system_getproperty`, `system_setproperty`, `ta2lpd`, `tan`, `taucs_chdel`, `taucs_chfact`, `taucs_chget`, `taucs_chinfo`, `taucs_chsolve`, `tempname`, `testmatrix`, `timer`, `tlist`, `tohome`, `tokens`, `toolbar`, `toprint`, `tr_zer`, `tril`, `triu`, `type`, `typename`, `uiDisplayTree`, `uicontextmenu`, `uicontrol`, `uigetcolor`, `uigetdir`, `uigetfile`, `uigetfont`, `uimenu`, `uint16`, `uint32`, `uint8`, `uipopup`, `uiputfile`, `uiwait`, `ulink`, `umf_ludel`, `umf_lufact`, `umf_luget`, `umf_luinfo`, `umf_lusolve`, `umfpack`, `unglue`, `unix`, `unsetmenu`, `unzoom`, `updatebrowsevar`, `usecanvas`, `useeditor`, `user`, `var2vec`, `varn`, `vec2var`, `waitbar`, `warnBlockByUID`, `warning`, `what`, `where`, `whereis`, `who`, `winsid`, `with_module`, `writb`, `write`, `write4b`, `write_csv`, `x_choose`, `x_choose_modeless`, `x_dialog`, `x_mdialog`, `xarc`, `xarcs`, `xarrows`, `xchange`, `xchoicesi`, `xclick`, `xcos`, `xcosAddToolsMenu`, `xcosConfigureXmlFile`, `xcosDiagramToScilab`, `xcosPalCategoryAdd`, `xcosPalDelete`, `xcosPalDisable`, `xcosPalEnable`, `xcosPalGenerateIcon`, `xcosPalGet`, `xcosPalLoad`, `xcosPalMove`, `xcosSimulationStarted`, `xcosUpdateBlock`, `xdel`, `xend`, `xfarc`, `xfarcs`, `xfpoly`, `xfpolys`, `xfrect`, `xget`, `xgetmouse`, `xgraduate`, `xgrid`, `xinit`, `xlfont`, `xls_open`, `xls_read`, `xmlAddNs`, `xmlAppend`, `xmlAsNumber`, `xmlAsText`, `xmlDTD`, `xmlDelete`, `xmlDocument`, `xmlDump`, `xmlElement`, `xmlFormat`, `xmlGetNsByHref`, `xmlGetNsByPrefix`, `xmlGetOpenDocs`, `xmlIsValidObject`, `xmlName`, `xmlNs`, `xmlRead`, `xmlReadStr`, `xmlRelaxNG`, `xmlRemove`, `xmlSchema`, `xmlSetAttributes`, `xmlValidate`, `xmlWrite`, `xmlXPath`, `xname`, `xpause`, `xpoly`, `xpolys`, `xrect`, `xrects`, `xs2bmp`, `xs2emf`, `xs2eps`, `xs2gif`, `xs2jpg`, `xs2pdf`, `xs2png`, `xs2ppm`, `xs2ps`, `xs2svg`, `xsegs`, `xset`, `xstring`, `xstringb`, `xtitle`, `zeros`, `znaupd`, `zneupd`, `zoom_rect`, `abort`, `apropos`, `break`, `case`, `catch`, `continue`, `do`, `else`, `elseif`, `end`, `endfunction`, `for`, `function`, `help`, `if`, `pause`, `quit`, `select`, `then`, `try`, `while`, `!_deff_wrapper`, `%0_i_st`, `%3d_i_h`, `%Block_xcosUpdateBlock`, `%TNELDER_p`, `%TNELDER_string`, `%TNMPLOT_p`, `%TNMPLOT_string`, `%TOPTIM_p`, `%TOPTIM_string`, `%TSIMPLEX_p`, `%TSIMPLEX_string`, `%_EVoid_p`, `%_gsort`, `%_listvarinfile`, `%_rlist`, `%_save`, `%_sodload`, `%_strsplit`, `%_unwrap`, `%ar_p`, `%asn`, `%b_a_b`, `%b_a_s`, `%b_c_s`, `%b_c_spb`, `%b_cumprod`, `%b_cumsum`, `%b_d_s`, `%b_diag`, `%b_e`, `%b_f_s`, `%b_f_spb`, `%b_g_s`, `%b_g_spb`, `%b_grand`, `%b_h_s`, `%b_h_spb`, `%b_i_b`, `%b_i_ce`, `%b_i_h`, `%b_i_hm`, `%b_i_s`, `%b_i_sp`, `%b_i_spb`, `%b_i_st`, `%b_iconvert`, `%b_l_b`, `%b_l_s`, `%b_m_b`, `%b_m_s`, `%b_matrix`, `%b_n_hm`, `%b_o_hm`, `%b_p_s`, `%b_prod`, `%b_r_b`, `%b_r_s`, `%b_s_b`, `%b_s_s`, `%b_string`, `%b_sum`, `%b_tril`, `%b_triu`, `%b_x_b`, `%b_x_s`, `%bicg`, `%bicgstab`, `%c_a_c`, `%c_b_c`, `%c_b_s`, `%c_diag`, `%c_dsearch`, `%c_e`, `%c_eye`, `%c_f_s`, `%c_grand`, `%c_i_c`, `%c_i_ce`, `%c_i_h`, `%c_i_hm`, `%c_i_lss`, `%c_i_r`, `%c_i_s`, `%c_i_st`, `%c_matrix`, `%c_n_l`, `%c_n_st`, `%c_o_l`, `%c_o_st`, `%c_ones`, `%c_rand`, `%c_tril`, `%c_triu`, `%cblock_c_cblock`, `%cblock_c_s`, `%cblock_e`, `%cblock_f_cblock`, `%cblock_p`, `%cblock_size`, `%ce_6`, `%ce_c_ce`, `%ce_e`, `%ce_f_ce`, `%ce_i_ce`, `%ce_i_s`, `%ce_i_st`, `%ce_matrix`, `%ce_p`, `%ce_size`, `%ce_string`, `%ce_t`, `%cgs`, `%champdat_i_h`, `%choose`, `%diagram_xcos`, `%dir_p`, `%fptr_i_st`, `%grand_perm`, `%grayplot_i_h`, `%h_i_st`, `%hmS_k_hmS_generic`, `%hm_1_hm`, `%hm_1_s`, `%hm_2_hm`, `%hm_2_s`, `%hm_3_hm`, `%hm_3_s`, `%hm_4_hm`, `%hm_4_s`, `%hm_5`, `%hm_a_hm`, `%hm_a_r`, `%hm_a_s`, `%hm_abs`, `%hm_and`, `%hm_bool2s`, `%hm_c_hm`, `%hm_ceil`, `%hm_conj`, `%hm_cos`, `%hm_cumprod`, `%hm_cumsum`, `%hm_d_hm`, `%hm_d_s`, `%hm_degree`, `%hm_dsearch`, `%hm_e`, `%hm_exp`, `%hm_eye`, `%hm_f_hm`, `%hm_find`, `%hm_floor`, `%hm_g_hm`, `%hm_grand`, `%hm_gsort`, `%hm_h_hm`, `%hm_i_b`, `%hm_i_ce`, `%hm_i_h`, `%hm_i_hm`, `%hm_i_i`, `%hm_i_p`, `%hm_i_r`, `%hm_i_s`, `%hm_i_st`, `%hm_iconvert`, `%hm_imag`, `%hm_int`, `%hm_isnan`, `%hm_isreal`, `%hm_j_hm`, `%hm_j_s`, `%hm_k_hm`, `%hm_k_s`, `%hm_log`, `%hm_m_p`, `%hm_m_r`, `%hm_m_s`, `%hm_matrix`, `%hm_max`, `%hm_mean`, `%hm_median`, `%hm_min`, `%hm_n_b`, `%hm_n_c`, `%hm_n_hm`, `%hm_n_i`, `%hm_n_p`, `%hm_n_s`, `%hm_o_b`, `%hm_o_c`, `%hm_o_hm`, `%hm_o_i`, `%hm_o_p`, `%hm_o_s`, `%hm_ones`, `%hm_or`, `%hm_p`, `%hm_prod`, `%hm_q_hm`, `%hm_r_s`, `%hm_rand`, `%hm_real`, `%hm_round`, `%hm_s`, `%hm_s_hm`, `%hm_s_r`, `%hm_s_s`, `%hm_sign`, `%hm_sin`, `%hm_size`, `%hm_sqrt`, `%hm_stdev`, `%hm_string`, `%hm_sum`, `%hm_x_hm`, `%hm_x_p`, `%hm_x_s`, `%hm_zeros`, `%i_1_s`, `%i_2_s`, `%i_3_s`, `%i_4_s`, `%i_Matplot`, `%i_a_i`, `%i_a_s`, `%i_and`, `%i_ascii`, `%i_b_s`, `%i_bezout`, `%i_champ`, `%i_champ1`, `%i_contour`, `%i_contour2d`, `%i_d_i`, `%i_d_s`, `%i_dsearch`, `%i_e`, `%i_fft`, `%i_g_i`, `%i_gcd`, `%i_grand`, `%i_h_i`, `%i_i_ce`, `%i_i_h`, `%i_i_hm`, `%i_i_i`, `%i_i_s`, `%i_i_st`, `%i_j_i`, `%i_j_s`, `%i_l_s`, `%i_lcm`, `%i_length`, `%i_m_i`, `%i_m_s`, `%i_mfprintf`, `%i_mprintf`, `%i_msprintf`, `%i_n_s`, `%i_o_s`, `%i_or`, `%i_p_i`, `%i_p_s`, `%i_plot2d`, `%i_plot2d1`, `%i_plot2d2`, `%i_q_s`, `%i_r_i`, `%i_r_s`, `%i_round`, `%i_s_i`, `%i_s_s`, `%i_sign`, `%i_string`, `%i_x_i`, `%i_x_s`, `%ip_a_s`, `%ip_i_st`, `%ip_m_s`, `%ip_n_ip`, `%ip_o_ip`, `%ip_p`, `%ip_part`, `%ip_s_s`, `%ip_string`, `%k`, `%l_i_h`, `%l_i_s`, `%l_i_st`, `%l_isequal`, `%l_n_c`, `%l_n_l`, `%l_n_m`, `%l_n_p`, `%l_n_s`, `%l_n_st`, `%l_o_c`, `%l_o_l`, `%l_o_m`, `%l_o_p`, `%l_o_s`, `%l_o_st`, `%lss_a_lss`, `%lss_a_p`, `%lss_a_r`, `%lss_a_s`, `%lss_c_lss`, `%lss_c_p`, `%lss_c_r`, `%lss_c_s`, `%lss_e`, `%lss_eye`, `%lss_f_lss`, `%lss_f_p`, `%lss_f_r`, `%lss_f_s`, `%lss_i_ce`, `%lss_i_lss`, `%lss_i_p`, `%lss_i_r`, `%lss_i_s`, `%lss_i_st`, `%lss_inv`, `%lss_l_lss`, `%lss_l_p`, `%lss_l_r`, `%lss_l_s`, `%lss_m_lss`, `%lss_m_p`, `%lss_m_r`, `%lss_m_s`, `%lss_n_lss`, `%lss_n_p`, `%lss_n_r`, `%lss_n_s`, `%lss_norm`, `%lss_o_lss`, `%lss_o_p`, `%lss_o_r`, `%lss_o_s`, `%lss_ones`, `%lss_r_lss`, `%lss_r_p`, `%lss_r_r`, `%lss_r_s`, `%lss_rand`, `%lss_s`, `%lss_s_lss`, `%lss_s_p`, `%lss_s_r`, `%lss_s_s`, `%lss_size`, `%lss_t`, `%lss_v_lss`, `%lss_v_p`, `%lss_v_r`, `%lss_v_s`, `%lt_i_s`, `%m_n_l`, `%m_o_l`, `%mc_i_h`, `%mc_i_s`, `%mc_i_st`, `%mc_n_st`, `%mc_o_st`, `%mc_string`, `%mps_p`, `%mps_string`, `%msp_a_s`, `%msp_abs`, `%msp_e`, `%msp_find`, `%msp_i_s`, `%msp_i_st`, `%msp_length`, `%msp_m_s`, `%msp_maxi`, `%msp_n_msp`, `%msp_nnz`, `%msp_o_msp`, `%msp_p`, `%msp_sparse`, `%msp_spones`, `%msp_t`, `%p_a_lss`, `%p_a_r`, `%p_c_lss`, `%p_c_r`, `%p_cumprod`, `%p_cumsum`, `%p_d_p`, `%p_d_r`, `%p_d_s`, `%p_det`, `%p_e`, `%p_f_lss`, `%p_f_r`, `%p_grand`, `%p_i_ce`, `%p_i_h`, `%p_i_hm`, `%p_i_lss`, `%p_i_p`, `%p_i_r`, `%p_i_s`, `%p_i_st`, `%p_inv`, `%p_j_s`, `%p_k_p`, `%p_k_r`, `%p_k_s`, `%p_l_lss`, `%p_l_p`, `%p_l_r`, `%p_l_s`, `%p_m_hm`, `%p_m_lss`, `%p_m_r`, `%p_matrix`, `%p_n_l`, `%p_n_lss`, `%p_n_r`, `%p_o_l`, `%p_o_lss`, `%p_o_r`, `%p_o_sp`, `%p_p_s`, `%p_part`, `%p_prod`, `%p_q_p`, `%p_q_r`, `%p_q_s`, `%p_r_lss`, `%p_r_p`, `%p_r_r`, `%p_r_s`, `%p_s_lss`, `%p_s_r`, `%p_simp`, `%p_string`, `%p_sum`, `%p_v_lss`, `%p_v_p`, `%p_v_r`, `%p_v_s`, `%p_x_hm`, `%p_x_r`, `%p_y_p`, `%p_y_r`, `%p_y_s`, `%p_z_p`, `%p_z_r`, `%p_z_s`, `%pcg`, `%plist_p`, `%plist_string`, `%r_0`, `%r_a_hm`, `%r_a_lss`, `%r_a_p`, `%r_a_r`, `%r_a_s`, `%r_c_lss`, `%r_c_p`, `%r_c_r`, `%r_c_s`, `%r_clean`, `%r_cumprod`, `%r_cumsum`, `%r_d_p`, `%r_d_r`, `%r_d_s`, `%r_det`, `%r_diag`, `%r_e`, `%r_eye`, `%r_f_lss`, `%r_f_p`, `%r_f_r`, `%r_f_s`, `%r_i_ce`, `%r_i_hm`, `%r_i_lss`, `%r_i_p`, `%r_i_r`, `%r_i_s`, `%r_i_st`, `%r_inv`, `%r_j_s`, `%r_k_p`, `%r_k_r`, `%r_k_s`, `%r_l_lss`, `%r_l_p`, `%r_l_r`, `%r_l_s`, `%r_m_hm`, `%r_m_lss`, `%r_m_p`, `%r_m_r`, `%r_m_s`, `%r_matrix`, `%r_n_lss`, `%r_n_p`, `%r_n_r`, `%r_n_s`, `%r_norm`, `%r_o_lss`, `%r_o_p`, `%r_o_r`, `%r_o_s`, `%r_ones`, `%r_p`, `%r_p_s`, `%r_prod`, `%r_q_p`, `%r_q_r`, `%r_q_s`, `%r_r_lss`, `%r_r_p`, `%r_r_r`, `%r_r_s`, `%r_rand`, `%r_s`, `%r_s_hm`, `%r_s_lss`, `%r_s_p`, `%r_s_r`, `%r_s_s`, `%r_simp`, `%r_size`, `%r_string`, `%r_sum`, `%r_t`, `%r_tril`, `%r_triu`, `%r_v_lss`, `%r_v_p`, `%r_v_r`, `%r_v_s`, `%r_varn`, `%r_x_p`, `%r_x_r`, `%r_x_s`, `%r_y_p`, `%r_y_r`, `%r_y_s`, `%r_z_p`, `%r_z_r`, `%r_z_s`, `%s_1_hm`, `%s_1_i`, `%s_2_hm`, `%s_2_i`, `%s_3_hm`, `%s_3_i`, `%s_4_hm`, `%s_4_i`, `%s_5`, `%s_a_b`, `%s_a_hm`, `%s_a_i`, `%s_a_ip`, `%s_a_lss`, `%s_a_msp`, `%s_a_r`, `%s_a_sp`, `%s_and`, `%s_b_i`, `%s_b_s`, `%s_bezout`, `%s_c_b`, `%s_c_cblock`, `%s_c_lss`, `%s_c_r`, `%s_c_sp`, `%s_d_b`, `%s_d_i`, `%s_d_p`, `%s_d_r`, `%s_d_sp`, `%s_e`, `%s_f_b`, `%s_f_cblock`, `%s_f_lss`, `%s_f_r`, `%s_f_sp`, `%s_g_b`, `%s_g_s`, `%s_gcd`, `%s_grand`, `%s_h_b`, `%s_h_s`, `%s_i_b`, `%s_i_c`, `%s_i_ce`, `%s_i_h`, `%s_i_hm`, `%s_i_i`, `%s_i_lss`, `%s_i_p`, `%s_i_r`, `%s_i_s`, `%s_i_sp`, `%s_i_spb`, `%s_i_st`, `%s_j_i`, `%s_k_hm`, `%s_k_p`, `%s_k_r`, `%s_k_sp`, `%s_l_b`, `%s_l_hm`, `%s_l_i`, `%s_l_lss`, `%s_l_p`, `%s_l_r`, `%s_l_s`, `%s_l_sp`, `%s_lcm`, `%s_m_b`, `%s_m_hm`, `%s_m_i`, `%s_m_ip`, `%s_m_lss`, `%s_m_msp`, `%s_m_r`, `%s_matrix`, `%s_n_hm`, `%s_n_i`, `%s_n_l`, `%s_n_lss`, `%s_n_r`, `%s_n_st`, `%s_o_hm`, `%s_o_i`, `%s_o_l`, `%s_o_lss`, `%s_o_r`, `%s_o_st`, `%s_or`, `%s_p_b`, `%s_p_i`, `%s_pow`, `%s_q_hm`, `%s_q_i`, `%s_q_p`, `%s_q_r`, `%s_q_sp`, `%s_r_b`, `%s_r_i`, `%s_r_lss`, `%s_r_p`, `%s_r_r`, `%s_r_s`, `%s_r_sp`, `%s_s_b`, `%s_s_hm`, `%s_s_i`, `%s_s_ip`, `%s_s_lss`, `%s_s_r`, `%s_s_sp`, `%s_simp`, `%s_v_lss`, `%s_v_p`, `%s_v_r`, `%s_v_s`, `%s_x_b`, `%s_x_hm`, `%s_x_i`, `%s_x_r`, `%s_y_p`, `%s_y_r`, `%s_y_sp`, `%s_z_p`, `%s_z_r`, `%s_z_sp`, `%sn`, `%sp_a_s`, `%sp_a_sp`, `%sp_and`, `%sp_c_s`, `%sp_ceil`, `%sp_conj`, `%sp_cos`, `%sp_cumprod`, `%sp_cumsum`, `%sp_d_s`, `%sp_d_sp`, `%sp_det`, `%sp_diag`, `%sp_e`, `%sp_exp`, `%sp_f_s`, `%sp_floor`, `%sp_grand`, `%sp_gsort`, `%sp_i_ce`, `%sp_i_h`, `%sp_i_s`, `%sp_i_sp`, `%sp_i_st`, `%sp_int`, `%sp_inv`, `%sp_k_s`, `%sp_k_sp`, `%sp_l_s`, `%sp_l_sp`, `%sp_length`, `%sp_max`, `%sp_min`, `%sp_norm`, `%sp_or`, `%sp_p_s`, `%sp_prod`, `%sp_q_s`, `%sp_q_sp`, `%sp_r_s`, `%sp_r_sp`, `%sp_round`, `%sp_s_s`, `%sp_s_sp`, `%sp_sin`, `%sp_sqrt`, `%sp_string`, `%sp_sum`, `%sp_tril`, `%sp_triu`, `%sp_y_s`, `%sp_y_sp`, `%sp_z_s`, `%sp_z_sp`, `%spb_and`, `%spb_c_b`, `%spb_cumprod`, `%spb_cumsum`, `%spb_diag`, `%spb_e`, `%spb_f_b`, `%spb_g_b`, `%spb_g_spb`, `%spb_h_b`, `%spb_h_spb`, `%spb_i_b`, `%spb_i_ce`, `%spb_i_h`, `%spb_i_st`, `%spb_or`, `%spb_prod`, `%spb_sum`, `%spb_tril`, `%spb_triu`, `%st_6`, `%st_c_st`, `%st_e`, `%st_f_st`, `%st_i_b`, `%st_i_c`, `%st_i_fptr`, `%st_i_h`, `%st_i_i`, `%st_i_ip`, `%st_i_lss`, `%st_i_msp`, `%st_i_p`, `%st_i_r`, `%st_i_s`, `%st_i_sp`, `%st_i_spb`, `%st_i_st`, `%st_matrix`, `%st_n_c`, `%st_n_l`, `%st_n_mc`, `%st_n_p`, `%st_n_s`, `%st_o_c`, `%st_o_l`, `%st_o_mc`, `%st_o_p`, `%st_o_s`, `%st_o_tl`, `%st_p`, `%st_size`, `%st_string`, `%st_t`, `%ticks_i_h`, `%xls_e`, `%xls_p`, `%xlssheet_e`, `%xlssheet_p`, `%xlssheet_size`, `%xlssheet_string`, `DominationRank`, `G_make`, `IsAScalar`, `NDcost`, `OS_Version`, `PlotSparse`, `ReadHBSparse`, `TCL_CreateSlave`, `abcd`, `abinv`, `accept_func_default`, `accept_func_vfsa`, `acf`, `acosd`, `acosh`, `acoshm`, `acosm`, `acot`, `acotd`, `acoth`, `acsc`, `acscd`, `acsch`, `add_demo`, `add_help_chapter`, `add_module_help_chapter`, `add_param`, `add_profiling`, `adj2sp`, `aff2ab`, `ana_style`, `analpf`, `analyze`, `aplat`, `arhnk`, `arl2`, `arma2p`, `arma2ss`, `armac`, `armax`, `armax1`, `arobasestring2strings`, `arsimul`, `ascii2string`, `asciimat`, `asec`, `asecd`, `asech`, `asind`, `asinh`, `asinhm`, `asinm`, `assert_checkalmostequal`, `assert_checkequal`, `assert_checkerror`, `assert_checkfalse`, `assert_checkfilesequal`, `assert_checktrue`, `assert_comparecomplex`, `assert_computedigits`, `assert_cond2reltol`, `assert_cond2reqdigits`, `assert_generror`, `atand`, `atanh`, `atanhm`, `atanm`, `atomsAutoload`, `atomsAutoloadAdd`, `atomsAutoloadDel`, `atomsAutoloadList`, `atomsCategoryList`, `atomsCheckModule`, `atomsDepTreeShow`, `atomsGetConfig`, `atomsGetInstalled`, `atomsGetInstalledPath`, `atomsGetLoaded`, `atomsGetLoadedPath`, `atomsInstall`, `atomsIsInstalled`, `atomsIsLoaded`, `atomsList`, `atomsLoad`, `atomsQuit`, `atomsRemove`, `atomsRepositoryAdd`, `atomsRepositoryDel`, `atomsRepositoryList`, `atomsRestoreConfig`, `atomsSaveConfig`, `atomsSearch`, `atomsSetConfig`, `atomsShow`, `atomsSystemInit`, `atomsSystemUpdate`, `atomsTest`, `atomsUpdate`, `atomsVersion`, `augment`, `auread`, `auwrite`, `balreal`, `bench_run`, `bilin`, `bilt`, `bin2dec`, `binomial`, `bitand`, `bitcmp`, `bitget`, `bitor`, `bitset`, `bitxor`, `black`, `blanks`, `bloc2exp`, `bloc2ss`, `block_parameter_error`, `bode`, `bode_asymp`, `bstap`, `buttmag`, `bvodeS`, `bytecode`, `bytecodewalk`, `cainv`, `calendar`, `calerf`, `calfrq`, `canon`, `casc`, `cat`, `cat_code`, `cb_m2sci_gui`, `ccontrg`, `cell`, `cell2mat`, `cellstr`, `center`, `cepstrum`, `cfspec`, `char`, `chart`, `cheb1mag`, `cheb2mag`, `check_gateways`, `check_modules_xml`, `check_versions`, `chepol`, `chfact`, `chsolve`, `classmarkov`, `clean_help`, `clock`, `cls2dls`, `cmb_lin`, `cmndred`, `cmoment`, `coding_ga_binary`, `coding_ga_identity`, `coff`, `coffg`, `colcomp`, `colcompr`, `colinout`, `colregul`, `companion`, `complex`, `compute_initial_temp`, `cond`, `cond2sp`, `condestsp`, `configure_msifort`, `configure_msvc`, `conjgrad`, `cont_frm`, `cont_mat`, `contrss`, `conv`, `convert_to_float`, `convertindex`, `convol`, `convol2d`, `copfac`, `correl`, `cosd`, `cosh`, `coshm`, `cosm`, `cotd`, `cotg`, `coth`, `cothm`, `cov`, `covar`, `createXConfiguration`, `createfun`, `createstruct`, `cross`, `crossover_ga_binary`, `crossover_ga_default`, `csc`, `cscd`, `csch`, `csgn`, `csim`, `cspect`, `ctr_gram`, `czt`, `dae`, `daeoptions`, `damp`, `datafit`, `date`, `datenum`, `datevec`, `dbphi`, `dcf`, `ddp`, `dec2bin`, `dec2hex`, `dec2oct`, `del_help_chapter`, `del_module_help_chapter`, `demo_begin`, `demo_choose`, `demo_compiler`, `demo_end`, `demo_file_choice`, `demo_folder_choice`, `demo_function_choice`, `demo_gui`, `demo_run`, `demo_viewCode`, `denom`, `derivat`, `derivative`, `des2ss`, `des2tf`, `detectmsifort64tools`, `detectmsvc64tools`, `determ`, `detr`, `detrend`, `devtools_run_builder`, `dhnorm`, `diff`, `diophant`, `dir`, `dirname`, `dispfiles`, `dllinfo`, `dscr`, `dsimul`, `dt_ility`, `dtsi`, `edit`, `edit_error`, `editor`, `eigenmarkov`, `eigs`, `ell1mag`, `enlarge_shape`, `entropy`, `eomday`, `epred`, `eqfir`, `eqiir`, `equil`, `equil1`, `erfinv`, `etime`, `eval`, `evans`, `evstr`, `example_run`, `expression2code`, `extract_help_examples`, `factor`, `factorial`, `factors`, `faurre`, `ffilt`, `fft2`, `fftshift`, `fieldnames`, `filt_sinc`, `filter`, `findABCD`, `findAC`, `findBDK`, `findR`, `find_freq`, `find_links`, `find_scicos_version`, `findm`, `findmsifortcompiler`, `findmsvccompiler`, `findx0BD`, `firstnonsingleton`, `fix`, `fixedpointgcd`, `flipdim`, `flts`, `fminsearch`, `formatBlackTip`, `formatBodeMagTip`, `formatBodePhaseTip`, `formatGainplotTip`, `formatHallModuleTip`, `formatHallPhaseTip`, `formatNicholsGainTip`, `formatNicholsPhaseTip`, `formatNyquistTip`, `formatPhaseplotTip`, `formatSgridDampingTip`, `formatSgridFreqTip`, `formatZgridDampingTip`, `formatZgridFreqTip`, `format_txt`, `fourplan`, `frep2tf`, `freson`, `frfit`, `frmag`, `fseek_origin`, `fsfirlin`, `fspec`, `fspecg`, `fstabst`, `ftest`, `ftuneq`, `fullfile`, `fullrf`, `fullrfk`, `fun2string`, `g_margin`, `gainplot`, `gamitg`, `gcare`, `gcd`, `gencompilationflags_unix`, `generateBlockImage`, `generateBlockImages`, `generic_i_ce`, `generic_i_h`, `generic_i_hm`, `generic_i_s`, `generic_i_st`, `genlib`, `genmarkov`, `geomean`, `getDiagramVersion`, `getModelicaPath`, `getPreferencesValue`, `get_file_path`, `get_function_path`, `get_param`, `get_profile`, `get_scicos_version`, `getd`, `getscilabkeywords`, `getshell`, `gettklib`, `gfare`, `gfrancis`, `givens`, `glever`, `gmres`, `group`, `gschur`, `gspec`, `gtild`, `h2norm`, `h_cl`, `h_inf`, `h_inf_st`, `h_norm`, `hallchart`, `halt`, `hank`, `hankelsv`, `harmean`, `haveacompiler`, `head_comments`, `help_from_sci`, `help_skeleton`, `hermit`, `hex2dec`, `hilb`, `hilbert`, `histc`, `horner`, `householder`, `hrmt`, `htrianr`, `hypermat`, `idct`, `idst`, `ifft`, `ifftshift`, `iir`, `iirgroup`, `iirlp`, `iirmod`, `ilib_build`, `ilib_build_jar`, `ilib_compile`, `ilib_for_link`, `ilib_gen_Make`, `ilib_gen_Make_unix`, `ilib_gen_cleaner`, `ilib_gen_gateway`, `ilib_gen_loader`, `ilib_include_flag`, `ilib_mex_build`, `im_inv`, `importScicosDiagram`, `importScicosPal`, `importXcosDiagram`, `imrep2ss`, `ind2sub`, `inistate`, `init_ga_default`, `init_param`, `initial_scicos_tables`, `input`, `instruction2code`, `intc`, `intdec`, `integrate`, `interp1`, `interpln`, `intersect`, `intl`, `intsplin`, `inttrap`, `inv_coeff`, `invr`, `invrs`, `invsyslin`, `iqr`, `isLeapYear`, `is_absolute_path`, `is_param`, `iscell`, `iscellstr`, `iscolumn`, `isempty`, `isfield`, `isinf`, `ismatrix`, `isnan`, `isrow`, `isscalar`, `issparse`, `issquare`, `isstruct`, `isvector`, `jmat`, `justify`, `kalm`, `karmarkar`, `kernel`, `kpure`, `krac2`, `kroneck`, `lattn`, `lattp`, `launchtest`, `lcf`, `lcm`, `lcmdiag`, `leastsq`, `leqe`, `leqr`, `lev`, `levin`, `lex_sort`, `lft`, `lin`, `lin2mu`, `lincos`, `lindquist`, `linf`, `linfn`, `linsolve`, `linspace`, `list2vec`, `list_param`, `listfiles`, `listfunctions`, `listvarinfile`, `lmisolver`, `lmitool`, `loadXcosLibs`, `loadmatfile`, `loadwave`, `log10`, `log2`, `logm`, `logspace`, `lqe`, `lqg`, `lqg2stan`, `lqg_ltr`, `lqr`, `ls`, `lyap`, `m2sci_gui`, `m_circle`, `macglov`, `macrovar`, `mad`, `makecell`, `manedit`, `mapsound`, `markp2ss`, `matfile2sci`, `mdelete`, `mean`, `meanf`, `median`, `members`, `mese`, `meshgrid`, `mfft`, `mfile2sci`, `minreal`, `minss`, `mkdir`, `modulo`, `moment`, `mrfit`, `msd`, `mstr2sci`, `mtlb`, `mtlb_0`, `mtlb_a`, `mtlb_all`, `mtlb_any`, `mtlb_axes`, `mtlb_axis`, `mtlb_beta`, `mtlb_box`, `mtlb_choices`, `mtlb_close`, `mtlb_colordef`, `mtlb_cond`, `mtlb_cov`, `mtlb_cumprod`, `mtlb_cumsum`, `mtlb_dec2hex`, `mtlb_delete`, `mtlb_diag`, `mtlb_diff`, `mtlb_dir`, `mtlb_double`, `mtlb_e`, `mtlb_echo`, `mtlb_error`, `mtlb_eval`, `mtlb_exist`, `mtlb_eye`, `mtlb_false`, `mtlb_fft`, `mtlb_fftshift`, `mtlb_filter`, `mtlb_find`, `mtlb_findstr`, `mtlb_fliplr`, `mtlb_fopen`, `mtlb_format`, `mtlb_fprintf`, `mtlb_fread`, `mtlb_fscanf`, `mtlb_full`, `mtlb_fwrite`, `mtlb_get`, `mtlb_grid`, `mtlb_hold`, `mtlb_i`, `mtlb_ifft`, `mtlb_image`, `mtlb_imp`, `mtlb_int16`, `mtlb_int32`, `mtlb_int8`, `mtlb_is`, `mtlb_isa`, `mtlb_isfield`, `mtlb_isletter`, `mtlb_isspace`, `mtlb_l`, `mtlb_legendre`, `mtlb_linspace`, `mtlb_logic`, `mtlb_logical`, `mtlb_loglog`, `mtlb_lower`, `mtlb_max`, `mtlb_mean`, `mtlb_median`, `mtlb_mesh`, `mtlb_meshdom`, `mtlb_min`, `mtlb_more`, `mtlb_num2str`, `mtlb_ones`, `mtlb_pcolor`, `mtlb_plot`, `mtlb_prod`, `mtlb_qr`, `mtlb_qz`, `mtlb_rand`, `mtlb_randn`, `mtlb_rcond`, `mtlb_realmax`, `mtlb_realmin`, `mtlb_s`, `mtlb_semilogx`, `mtlb_semilogy`, `mtlb_setstr`, `mtlb_size`, `mtlb_sort`, `mtlb_sortrows`, `mtlb_sprintf`, `mtlb_sscanf`, `mtlb_std`, `mtlb_strcmp`, `mtlb_strcmpi`, `mtlb_strfind`, `mtlb_strrep`, `mtlb_subplot`, `mtlb_sum`, `mtlb_t`, `mtlb_toeplitz`, `mtlb_tril`, `mtlb_triu`, `mtlb_true`, `mtlb_type`, `mtlb_uint16`, `mtlb_uint32`, `mtlb_uint8`, `mtlb_upper`, `mtlb_var`, `mtlb_zeros`, `mu2lin`, `mutation_ga_binary`, `mutation_ga_default`, `mvcorrel`, `mvvacov`, `nancumsum`, `nand2mean`, `nanmax`, `nanmean`, `nanmeanf`, `nanmedian`, `nanmin`, `nanreglin`, `nanstdev`, `nansum`, `narsimul`, `ndgrid`, `ndims`, `nehari`, `neigh_func_csa`, `neigh_func_default`, `neigh_func_fsa`, `neigh_func_vfsa`, `neldermead_cget`, `neldermead_configure`, `neldermead_costf`, `neldermead_defaultoutput`, `neldermead_destroy`, `neldermead_function`, `neldermead_get`, `neldermead_log`, `neldermead_new`, `neldermead_restart`, `neldermead_search`, `neldermead_updatesimp`, `nextpow2`, `nfreq`, `nicholschart`, `nlev`, `nmplot_cget`, `nmplot_configure`, `nmplot_contour`, `nmplot_destroy`, `nmplot_function`, `nmplot_get`, `nmplot_historyplot`, `nmplot_log`, `nmplot_new`, `nmplot_outputcmd`, `nmplot_restart`, `nmplot_search`, `nmplot_simplexhistory`, `noisegen`, `nonreg_test_run`, `now`, `nthroot`, `null`, `num2cell`, `numderivative`, `numdiff`, `numer`, `nyquist`, `nyquistfrequencybounds`, `obs_gram`, `obscont`, `observer`, `obsv_mat`, `obsvss`, `oct2dec`, `odeoptions`, `optim_ga`, `optim_moga`, `optim_nsga`, `optim_nsga2`, `optim_sa`, `optimbase_cget`, `optimbase_checkbounds`, `optimbase_checkcostfun`, `optimbase_checkx0`, `optimbase_configure`, `optimbase_destroy`, `optimbase_function`, `optimbase_get`, `optimbase_hasbounds`, `optimbase_hasconstraints`, `optimbase_hasnlcons`, `optimbase_histget`, `optimbase_histset`, `optimbase_incriter`, `optimbase_isfeasible`, `optimbase_isinbounds`, `optimbase_isinnonlincons`, `optimbase_log`, `optimbase_logshutdown`, `optimbase_logstartup`, `optimbase_new`, `optimbase_outputcmd`, `optimbase_outstruct`, `optimbase_proj2bnds`, `optimbase_set`, `optimbase_stoplog`, `optimbase_terminate`, `optimget`, `optimplotfunccount`, `optimplotfval`, `optimplotx`, `optimset`, `optimsimplex_center`, `optimsimplex_check`, `optimsimplex_compsomefv`, `optimsimplex_computefv`, `optimsimplex_deltafv`, `optimsimplex_deltafvmax`, `optimsimplex_destroy`, `optimsimplex_dirmat`, `optimsimplex_fvmean`, `optimsimplex_fvstdev`, `optimsimplex_fvvariance`, `optimsimplex_getall`, `optimsimplex_getallfv`, `optimsimplex_getallx`, `optimsimplex_getfv`, `optimsimplex_getn`, `optimsimplex_getnbve`, `optimsimplex_getve`, `optimsimplex_getx`, `optimsimplex_gradientfv`, `optimsimplex_log`, `optimsimplex_new`, `optimsimplex_reflect`, `optimsimplex_setall`, `optimsimplex_setallfv`, `optimsimplex_setallx`, `optimsimplex_setfv`, `optimsimplex_setn`, `optimsimplex_setnbve`, `optimsimplex_setve`, `optimsimplex_setx`, `optimsimplex_shrink`, `optimsimplex_size`, `optimsimplex_sort`, `optimsimplex_xbar`, `orth`, `output_ga_default`, `output_moga_default`, `output_nsga2_default`, `output_nsga_default`, `p_margin`, `pack`, `pareto_filter`, `parrot`, `pbig`, `pca`, `pcg`, `pdiv`, `pen2ea`, `pencan`, `pencost`, `penlaur`, `perctl`, `perl`, `perms`, `permute`, `pertrans`, `pfactors`, `pfss`, `phasemag`, `phaseplot`, `phc`, `pinv`, `playsnd`, `plotprofile`, `plzr`, `pmodulo`, `pol2des`, `pol2str`, `polar`, `polfact`, `prbs_a`, `prettyprint`, `primes`, `princomp`, `profile`, `proj`, `projsl`, `projspec`, `psmall`, `pspect`, `qmr`, `qpsolve`, `quart`, `quaskro`, `rafiter`, `randpencil`, `range`, `rank`, `readxls`, `recompilefunction`, `recons`, `reglin`, `regress`, `remezb`, `remove_param`, `remove_profiling`, `repfreq`, `replace_Ix_by_Fx`, `repmat`, `reset_profiling`, `resize_matrix`, `returntoscilab`, `rhs2code`, `ric_desc`, `riccati`, `rmdir`, `routh_t`, `rowcomp`, `rowcompr`, `rowinout`, `rowregul`, `rowshuff`, `rref`, `sample`, `samplef`, `samwr`, `savematfile`, `savewave`, `scanf`, `sci2exp`, `sciGUI_init`, `sci_sparse`, `scicos_getvalue`, `scicos_simulate`, `scicos_workspace_init`, `scisptdemo`, `scitest`, `sdiff`, `sec`, `secd`, `sech`, `selection_ga_elitist`, `selection_ga_random`, `sensi`, `setPreferencesValue`, `set_param`, `setdiff`, `sgrid`, `show_margins`, `show_pca`, `showprofile`, `signm`, `sinc`, `sincd`, `sind`, `sinh`, `sinhm`, `sinm`, `sm2des`, `sm2ss`, `smga`, `smooth`, `solve`, `sound`, `soundsec`, `sp2adj`, `spaninter`, `spanplus`, `spantwo`, `specfact`, `speye`, `sprand`, `spzeros`, `sqroot`, `sqrtm`, `squarewave`, `squeeze`, `srfaur`, `srkf`, `ss2des`, `ss2ss`, `ss2tf`, `sskf`, `ssprint`, `ssrand`, `st_deviation`, `st_i_generic`, `st_ility`, `stabil`, `statgain`, `stdev`, `stdevf`, `steadycos`, `strange`, `strcmpi`, `struct`, `sub2ind`, `sva`, `svplot`, `sylm`, `sylv`, `sysconv`, `sysdiag`, `sysfact`, `syslin`, `syssize`, `system`, `systmat`, `tabul`, `tand`, `tanh`, `tanhm`, `tanm`, `tbx_build_blocks`, `tbx_build_cleaner`, `tbx_build_gateway`, `tbx_build_gateway_clean`, `tbx_build_gateway_loader`, `tbx_build_help`, `tbx_build_help_loader`, `tbx_build_loader`, `tbx_build_localization`, `tbx_build_macros`, `tbx_build_pal_loader`, `tbx_build_src`, `tbx_builder`, `tbx_builder_gateway`, `tbx_builder_gateway_lang`, `tbx_builder_help`, `tbx_builder_help_lang`, `tbx_builder_macros`, `tbx_builder_src`, `tbx_builder_src_lang`, `tbx_generate_pofile`, `temp_law_csa`, `temp_law_default`, `temp_law_fsa`, `temp_law_huang`, `temp_law_vfsa`, `test_clean`, `test_on_columns`, `test_run`, `test_run_level`, `testexamples`, `tf2des`, `tf2ss`, `thrownan`, `tic`, `time_id`, `toc`, `toeplitz`, `tokenpos`, `toolboxes`, `trace`, `trans`, `translatepaths`, `tree2code`, `trfmod`, `trianfml`, `trimmean`, `trisolve`, `trzeros`, `typeof`, `ui_observer`, `union`, `unique`, `unit_test_run`, `unix_g`, `unix_s`, `unix_w`, `unix_x`, `unobs`, `unpack`, `unwrap`, `variance`, `variancef`, `vec2list`, `vectorfind`, `ver`, `warnobsolete`, `wavread`, `wavwrite`, `wcenter`, `weekday`, `wfir`, `wfir_gui`, `whereami`, `who_user`, `whos`, `wiener`, `wigner`, `window`, `winlist`, `with_javasci`, `with_macros_source`, `with_modelica_compiler`, `with_tk`, `xcorr`, `xcosBlockEval`, `xcosBlockInterface`, `xcosCodeGeneration`, `xcosConfigureModelica`, `xcosPal`, `xcosPalAdd`, `xcosPalAddBlock`, `xcosPalExport`, `xcosPalGenerateAllIcons`, `xcosShowBlockWarning`, `xcosValidateBlockSet`, `xcosValidateCompareBlock`, `xcos_compile`, `xcos_debug_gui`, `xcos_run`, `xcos_simulate`, `xcov`, `xmltochm`, `xmltoformat`, `xmltohtml`, `xmltojar`, `xmltopdf`, `xmltops`, `xmltoweb`, `yulewalk`, `zeropen`, `zgrid`, `zpbutt`, `zpch1`, `zpch2`, `zpell`), NameBuiltin, nil}, + {Words(``, `\b`, `$`, `%F`, `%T`, `%e`, `%eps`, `%f`, `%fftw`, `%gui`, `%i`, `%inf`, `%io`, `%modalWarning`, `%nan`, `%pi`, `%s`, `%t`, `%tk`, `%toolboxes`, `%toolboxes_dir`, `%z`, `PWD`, `SCI`, `SCIHOME`, `TMPDIR`, `arnoldilib`, `assertlib`, `atomslib`, `cacsdlib`, `compatibility_functilib`, `corelib`, `data_structureslib`, `demo_toolslib`, `development_toolslib`, `differential_equationlib`, `dynamic_linklib`, `elementary_functionslib`, `enull`, `evoid`, `external_objectslib`, `fd`, `fileiolib`, `functionslib`, `genetic_algorithmslib`, `helptoolslib`, `home`, `integerlib`, `interpolationlib`, `iolib`, `jnull`, `jvoid`, `linear_algebralib`, `m2scilib`, `matiolib`, `modules_managerlib`, `neldermeadlib`, `optimbaselib`, `optimizationlib`, `optimsimplexlib`, `output_streamlib`, `overloadinglib`, `parameterslib`, `polynomialslib`, `preferenceslib`, `randliblib`, `scicos_autolib`, `scicos_utilslib`, `scinoteslib`, `signal_processinglib`, `simulated_annealinglib`, `soundlib`, `sparselib`, `special_functionslib`, `spreadsheetlib`, `statisticslib`, `stringlib`, `tclscilib`, `timelib`, `umfpacklib`, `xcoslib`), NameConstant, nil}, + {`-|==|~=|<|>|<=|>=|&&|&|~|\|\|?`, Operator, nil}, + {`\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\`, Operator, nil}, + {`[\[\](){}@.,=:;]`, Punctuation, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`(?<=[\w)\].])\'+`, Operator, nil}, + {`(?|+=@:,./?-]+`, Operator, nil}, + {`[\[\]()]+`, Punctuation, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`[a-z_-][\w-]*`, Name, nil}, + {`\n`, Text, nil}, + {`[;{}]`, Punctuation, Pop(1)}, + }, + "interpolation": { + {`\}`, LiteralStringInterpol, Pop(1)}, + Include("value"), + }, + "selector": { + {`[ \t]+`, Text, nil}, + {`\:`, NameDecorator, Push("pseudo-class")}, + {`\.`, NameClass, Push("class")}, + {`\#`, NameNamespace, Push("id")}, + {`[\w-]+`, NameTag, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`&`, Keyword, nil}, + {`[~^*!&\[\]()<>|+=@:;,./?-]`, Operator, nil}, + {`"`, LiteralStringDouble, Push("string-double")}, + {`'`, LiteralStringSingle, Push("string-single")}, + {`\n`, Text, nil}, + {`[;{}]`, Punctuation, Pop(1)}, + }, + "string-double": { + {`(\\.|#(?=[^\n{])|[^\n"#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-single": { + {`(\\.|#(?=[^\n{])|[^\n'#])+`, LiteralStringDouble, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`'`, LiteralStringDouble, Pop(1)}, + }, + "string-url": { + {`(\\#|#(?=[^\n{])|[^\n#)])+`, LiteralStringOther, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + {`\)`, LiteralStringOther, Pop(1)}, + }, + "pseudo-class": { + {`[\w-]+`, NameDecorator, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "class": { + {`[\w-]+`, NameClass, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "id": { + {`[\w-]+`, NameNamespace, nil}, + {`#\{`, LiteralStringInterpol, Push("interpolation")}, + Default(Pop(1)), + }, + "for": { + {`(from|to|through)`, OperatorWord, nil}, + Include("value"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go b/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go new file mode 100644 index 00000000..db64707f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/smalltalk.go @@ -0,0 +1,99 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Smalltalk lexer. +var Smalltalk = internal.Register(MustNewLexer( + &Config{ + Name: "Smalltalk", + Aliases: []string{"smalltalk", "squeak", "st"}, + Filenames: []string{"*.st"}, + MimeTypes: []string{"text/x-smalltalk"}, + }, + Rules{ + "root": { + {`(<)(\w+:)(.*?)(>)`, ByGroups(Text, Keyword, Text, Text), nil}, + Include("squeak fileout"), + Include("whitespaces"), + Include("method definition"), + {`(\|)([\w\s]*)(\|)`, ByGroups(Operator, NameVariable, Operator), nil}, + Include("objects"), + {`\^|:=|_`, Operator, nil}, + {`[\]({}.;!]`, Text, nil}, + }, + "method definition": { + {`([a-zA-Z]+\w*:)(\s*)(\w+)`, ByGroups(NameFunction, Text, NameVariable), nil}, + {`^(\b[a-zA-Z]+\w*\b)(\s*)$`, ByGroups(NameFunction, Text), nil}, + {`^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$`, ByGroups(NameFunction, Text, NameVariable, Text), nil}, + }, + "blockvariables": { + Include("whitespaces"), + {`(:)(\s*)(\w+)`, ByGroups(Operator, Text, NameVariable), nil}, + {`\|`, Operator, Pop(1)}, + Default(Pop(1)), + }, + "literals": { + {`'(''|[^'])*'`, LiteralString, Push("afterobject")}, + {`\$.`, LiteralStringChar, Push("afterobject")}, + {`#\(`, LiteralStringSymbol, Push("parenth")}, + {`\)`, Text, Push("afterobject")}, + {`(\d+r)?-?\d+(\.\d+)?(e-?\d+)?`, LiteralNumber, Push("afterobject")}, + }, + "_parenth_helper": { + Include("whitespaces"), + {`(\d+r)?-?\d+(\.\d+)?(e-?\d+)?`, LiteralNumber, nil}, + {`[-+*/\\~<>=|&#!?,@%\w:]+`, LiteralStringSymbol, nil}, + {`'(''|[^'])*'`, LiteralString, nil}, + {`\$.`, LiteralStringChar, nil}, + {`#*\(`, LiteralStringSymbol, Push("inner_parenth")}, + }, + "parenth": { + {`\)`, LiteralStringSymbol, Push("root", "afterobject")}, + Include("_parenth_helper"), + }, + "inner_parenth": { + {`\)`, LiteralStringSymbol, Pop(1)}, + Include("_parenth_helper"), + }, + "whitespaces": { + {`\s+`, Text, nil}, + {`"(""|[^"])*"`, Comment, nil}, + }, + "objects": { + {`\[`, Text, Push("blockvariables")}, + {`\]`, Text, Push("afterobject")}, + {`\b(self|super|true|false|nil|thisContext)\b`, NameBuiltinPseudo, Push("afterobject")}, + {`\b[A-Z]\w*(?!:)\b`, NameClass, Push("afterobject")}, + {`\b[a-z]\w*(?!:)\b`, NameVariable, Push("afterobject")}, + {`#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)`, LiteralStringSymbol, Push("afterobject")}, + Include("literals"), + }, + "afterobject": { + {`! !$`, Keyword, Pop(1)}, + Include("whitespaces"), + {`\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)`, NameBuiltin, Pop(1)}, + {`\b(new\b(?!:))`, NameBuiltin, nil}, + {`:=|_`, Operator, Pop(1)}, + {`\b[a-zA-Z]+\w*:`, NameFunction, Pop(1)}, + {`\b[a-zA-Z]+\w*`, NameFunction, nil}, + {`\w+:?|[-+*/\\~<>=|&!?,@%]+`, NameFunction, Pop(1)}, + {`\.`, Punctuation, Pop(1)}, + {`;`, Punctuation, nil}, + {`[\])}]`, Text, nil}, + {`[\[({]`, Text, Pop(1)}, + }, + "squeak fileout": { + {`^"(""|[^"])*"!`, Keyword, nil}, + {`^'(''|[^'])*'!`, Keyword, nil}, + {`^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)`, ByGroups(Keyword, NameClass, Keyword, LiteralString, Keyword, Text, Keyword), nil}, + {`^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)`, ByGroups(Keyword, NameClass, Keyword, LiteralString, Keyword), nil}, + {`^(\w+)( subclass: )(#\w+)(\s+instanceVariableNames: )(.*?)(\s+classVariableNames: )(.*?)(\s+poolDictionaries: )(.*?)(\s+category: )(.*?)(!)`, ByGroups(NameClass, Keyword, LiteralStringSymbol, Keyword, LiteralString, Keyword, LiteralString, Keyword, LiteralString, Keyword, LiteralString, Keyword), nil}, + {`^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)`, ByGroups(NameClass, Keyword, LiteralString, Keyword), nil}, + {`(!\n)(\].*)(! !)$`, ByGroups(Keyword, Text, Keyword), nil}, + {`! !$`, Keyword, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go b/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go new file mode 100644 index 00000000..c364ffa5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/smarty.go @@ -0,0 +1,40 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + . "github.com/alecthomas/chroma/lexers/circular" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Smarty lexer. +var Smarty = internal.Register(MustNewLexer( + &Config{ + Name: "Smarty", + Aliases: []string{"smarty"}, + Filenames: []string{"*.tpl"}, + MimeTypes: []string{"application/x-smarty"}, + DotAll: true, + }, + Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`(\{)(\*.*?\*)(\})`, ByGroups(CommentPreproc, Comment, CommentPreproc), nil}, + {`(\{php\})(.*?)(\{/php\})`, ByGroups(CommentPreproc, Using(PHP), CommentPreproc), nil}, + {`(\{)(/?[a-zA-Z_]\w*)(\s*)`, ByGroups(CommentPreproc, NameFunction, Text), Push("smarty")}, + {`\{`, CommentPreproc, Push("smarty")}, + }, + "smarty": { + {`\s+`, Text, nil}, + {`\{`, CommentPreproc, Push()}, + {`\}`, CommentPreproc, Pop(1)}, + {`#[a-zA-Z_]\w*#`, NameVariable, nil}, + {`\$[a-zA-Z_]\w*(\.\w+)*`, NameVariable, nil}, + {`[~!%^&*()+=|\[\]:;,.<>/?@-]`, Operator, nil}, + {`(true|false|null)\b`, KeywordConstant, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + {`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`[a-zA-Z_]\w*`, NameAttribute, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go b/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go new file mode 100644 index 00000000..c999b50f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/snobol.go @@ -0,0 +1,48 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Snobol lexer. +var Snobol = internal.Register(MustNewLexer( + &Config{ + Name: "Snobol", + Aliases: []string{"snobol"}, + Filenames: []string{"*.snobol"}, + MimeTypes: []string{"text/x-snobol"}, + }, + Rules{ + "root": { + {`\*.*\n`, Comment, nil}, + {`[+.] `, Punctuation, Push("statement")}, + {`-.*\n`, Comment, nil}, + {`END\s*\n`, NameLabel, Push("heredoc")}, + {`[A-Za-z$][\w$]*`, NameLabel, Push("statement")}, + {`\s+`, Text, Push("statement")}, + }, + "statement": { + {`\s*\n`, Text, Pop(1)}, + {`\s+`, Text, nil}, + {`(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])`, NameBuiltin, nil}, + {`[A-Za-z][\w.]*`, Name, nil}, + {`\*\*|[?$.!%*/#+\-@|&\\=]`, Operator, nil}, + {`"[^"]*"`, LiteralString, nil}, + {`'[^']*'`, LiteralString, nil}, + {`[0-9]+(?=[^.EeDd])`, LiteralNumberInteger, nil}, + {`[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`:`, Punctuation, Push("goto")}, + {`[()<>,;]`, Punctuation, nil}, + }, + "goto": { + {`\s*\n`, Text, Pop(2)}, + {`\s+`, Text, nil}, + {`F|S`, Keyword, nil}, + {`(\()([A-Za-z][\w.]*)(\))`, ByGroups(Punctuation, NameLabel, Punctuation), nil}, + }, + "heredoc": { + {`.*\n`, LiteralStringHeredoc, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go b/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go new file mode 100644 index 00000000..d7cf0f97 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/solidity.go @@ -0,0 +1,110 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Solidity lexer. +var Solidity = internal.Register(MustNewLexer( + &Config{ + Name: "Solidity", + Aliases: []string{"sol", "solidity"}, + Filenames: []string{"*.sol"}, + MimeTypes: []string{}, + DotAll: true, + }, + Rules{ + "assembly": { + Include("comments"), + Include("numbers"), + Include("strings"), + Include("whitespace"), + {`\{`, Punctuation, Push()}, + {`\}`, Punctuation, Pop(1)}, + {`[(),]`, Punctuation, nil}, + {`:=|=:`, Operator, nil}, + {`(let)(\s*)(\w*\b)`, ByGroups(OperatorWord, Text, NameVariable), nil}, + {`(\w*\b)(\:[^=])`, ByGroups(NameLabel, Punctuation), nil}, + {`(stop|add|mul|sub|div|sdiv|mod|smod|addmod|mulmod|exp|signextend|lt|gt|slt|sgt|eq|iszero|and|or|xor|not|byte|keccak256|sha3|address|balance|origin|caller|callvalue|calldataload|calldatasize|calldatacopy|codesize|codecopy|gasprice|extcodesize|extcodecopy|blockhash|coinbase|timestamp|number|difficulty|gaslimit|pop|mload|mstore|mstore8|sload|sstore|for|switch|jump|jumpi|pc|msize|gas|jumpdest|push1|push2|push32|dup1|dup2|dup16|swap1|swap2|swap16|log0|log1|log4|create|call|callcode|return|delegatecall|suicide|returndatasize|returndatacopy|staticcall|revert|invalid)\b`, NameFunction, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "comments": { + {`//([\w\W]*?\n)`, CommentSingle, nil}, + {`/[*][\w\W]*?[*]/`, CommentMultiline, nil}, + {`/[*][\w\W]*`, CommentMultiline, nil}, + }, + "keywords-other": { + {Words(``, `\b`, `for`, `in`, `while`, `do`, `break`, `return`, `returns`, `continue`, `if`, `else`, `throw`, `new`, `delete`), Keyword, nil}, + {`assembly\b`, Keyword, Push("assembly")}, + {Words(``, `\b`, `contract`, `interface`, `enum`, `event`, `function`, `library`, `mapping`, `modifier`, `struct`, `var`), KeywordDeclaration, nil}, + {`(import|using)\b`, KeywordNamespace, nil}, + {`pragma (solidity|experimental)\b`, KeywordReserved, nil}, + {`(_|as|constant|default|from|is)\b`, KeywordReserved, nil}, + {`payable\b`, KeywordReserved, nil}, + {`(memory|storage)\b`, KeywordReserved, nil}, + {`(external|internal|private|public)\b`, KeywordReserved, nil}, + {`(anonymous|indexed)\b`, KeywordReserved, nil}, + {`(abstract|pure|static|view)\b`, KeywordReserved, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(wei|finney|szabo|ether)\b`, KeywordConstant, nil}, + {`(seconds|minutes|hours|days|weeks|years)\b`, KeywordConstant, nil}, + }, + "keywords-types": { + {Words(``, `\b`, `address`, `bool`, `byte`, `bytes`, `int`, `fixed`, `string`, `ufixed`, `uint`), KeywordType, nil}, + {Words(``, `\b`, `int8`, `int16`, `int24`, `int32`, `int40`, `int48`, `int56`, `int64`, `int72`, `int80`, `int88`, `int96`, `int104`, `int112`, `int120`, `int128`, `int136`, `int144`, `int152`, `int160`, `int168`, `int176`, `int184`, `int192`, `int200`, `int208`, `int216`, `int224`, `int232`, `int240`, `int248`, `int256`), KeywordType, nil}, + {Words(``, `\b`, `uint8`, `uint16`, `uint24`, `uint32`, `uint40`, `uint48`, `uint56`, `uint64`, `uint72`, `uint80`, `uint88`, `uint96`, `uint104`, `uint112`, `uint120`, `uint128`, `uint136`, `uint144`, `uint152`, `uint160`, `uint168`, `uint176`, `uint184`, `uint192`, `uint200`, `uint208`, `uint216`, `uint224`, `uint232`, `uint240`, `uint248`, `uint256`), KeywordType, nil}, + {Words(``, `\b`, `bytes1`, `bytes2`, `bytes3`, `bytes4`, `bytes5`, `bytes6`, `bytes7`, `bytes8`, `bytes9`, `bytes10`, `bytes11`, `bytes12`, `bytes13`, `bytes14`, `bytes15`, `bytes16`, `bytes17`, `bytes18`, `bytes19`, `bytes20`, `bytes21`, `bytes22`, `bytes23`, `bytes24`, `bytes25`, `bytes26`, `bytes27`, `bytes28`, `bytes29`, `bytes30`, `bytes31`, `bytes32`), KeywordType, nil}, + {Words(``, `\b`, `fixed8x0`, `fixed8x1`, `fixed8x2`, `fixed8x3`, `fixed8x4`, `fixed8x5`, `fixed8x6`, `fixed8x7`, `fixed8x8`, `fixed8x9`, `fixed8x10`, `fixed8x11`, `fixed8x12`, `fixed8x13`, `fixed8x14`, `fixed8x15`, `fixed8x16`, `fixed8x17`, `fixed8x18`, `fixed8x19`, `fixed8x20`, `fixed8x21`, `fixed8x22`, `fixed8x23`, `fixed8x24`, `fixed8x25`, `fixed8x26`, `fixed8x27`, `fixed8x28`, `fixed8x29`, `fixed8x30`, `fixed8x31`, `fixed8x32`, `fixed8x33`, `fixed8x34`, `fixed8x35`, `fixed8x36`, `fixed8x37`, `fixed8x38`, `fixed8x39`, `fixed8x40`, `fixed8x41`, `fixed8x42`, `fixed8x43`, `fixed8x44`, `fixed8x45`, `fixed8x46`, `fixed8x47`, `fixed8x48`, `fixed8x49`, `fixed8x50`, `fixed8x51`, `fixed8x52`, `fixed8x53`, `fixed8x54`, `fixed8x55`, `fixed8x56`, `fixed8x57`, `fixed8x58`, `fixed8x59`, `fixed8x60`, `fixed8x61`, `fixed8x62`, `fixed8x63`, `fixed8x64`, `fixed8x65`, `fixed8x66`, `fixed8x67`, `fixed8x68`, `fixed8x69`, `fixed8x70`, `fixed8x71`, `fixed8x72`, `fixed8x73`, `fixed8x74`, `fixed8x75`, `fixed8x76`, `fixed8x77`, `fixed8x78`, `fixed8x79`, `fixed8x80`, `fixed16x0`, `fixed16x1`, `fixed16x2`, `fixed16x3`, `fixed16x4`, `fixed16x5`, `fixed16x6`, `fixed16x7`, `fixed16x8`, `fixed16x9`, `fixed16x10`, `fixed16x11`, `fixed16x12`, `fixed16x13`, `fixed16x14`, `fixed16x15`, `fixed16x16`, `fixed16x17`, `fixed16x18`, `fixed16x19`, `fixed16x20`, `fixed16x21`, `fixed16x22`, `fixed16x23`, `fixed16x24`, `fixed16x25`, `fixed16x26`, `fixed16x27`, `fixed16x28`, `fixed16x29`, `fixed16x30`, `fixed16x31`, `fixed16x32`, `fixed16x33`, `fixed16x34`, `fixed16x35`, `fixed16x36`, `fixed16x37`, `fixed16x38`, `fixed16x39`, `fixed16x40`, `fixed16x41`, `fixed16x42`, `fixed16x43`, `fixed16x44`, `fixed16x45`, `fixed16x46`, `fixed16x47`, `fixed16x48`, `fixed16x49`, `fixed16x50`, `fixed16x51`, `fixed16x52`, `fixed16x53`, `fixed16x54`, `fixed16x55`, `fixed16x56`, `fixed16x57`, `fixed16x58`, `fixed16x59`, `fixed16x60`, `fixed16x61`, `fixed16x62`, `fixed16x63`, `fixed16x64`, `fixed16x65`, `fixed16x66`, `fixed16x67`, `fixed16x68`, `fixed16x69`, `fixed16x70`, `fixed16x71`, `fixed16x72`, `fixed16x73`, `fixed16x74`, `fixed16x75`, `fixed16x76`, `fixed16x77`, `fixed16x78`, `fixed16x79`, `fixed16x80`, `fixed24x0`, `fixed24x1`, `fixed24x2`, `fixed24x3`, `fixed24x4`, `fixed24x5`, `fixed24x6`, `fixed24x7`, `fixed24x8`, `fixed24x9`, `fixed24x10`, `fixed24x11`, `fixed24x12`, `fixed24x13`, `fixed24x14`, `fixed24x15`, `fixed24x16`, `fixed24x17`, `fixed24x18`, `fixed24x19`, `fixed24x20`, `fixed24x21`, `fixed24x22`, `fixed24x23`, `fixed24x24`, `fixed24x25`, `fixed24x26`, `fixed24x27`, `fixed24x28`, `fixed24x29`, `fixed24x30`, `fixed24x31`, `fixed24x32`, `fixed24x33`, `fixed24x34`, `fixed24x35`, `fixed24x36`, `fixed24x37`, `fixed24x38`, `fixed24x39`, `fixed24x40`, `fixed24x41`, `fixed24x42`, `fixed24x43`, `fixed24x44`, `fixed24x45`, `fixed24x46`, `fixed24x47`, `fixed24x48`, `fixed24x49`, `fixed24x50`, `fixed24x51`, `fixed24x52`, `fixed24x53`, `fixed24x54`, `fixed24x55`, `fixed24x56`, `fixed24x57`, `fixed24x58`, `fixed24x59`, `fixed24x60`, `fixed24x61`, `fixed24x62`, `fixed24x63`, `fixed24x64`, `fixed24x65`, `fixed24x66`, `fixed24x67`, `fixed24x68`, `fixed24x69`, `fixed24x70`, `fixed24x71`, `fixed24x72`, `fixed24x73`, `fixed24x74`, `fixed24x75`, `fixed24x76`, `fixed24x77`, `fixed24x78`, `fixed24x79`, `fixed24x80`, `fixed32x0`, `fixed32x1`, `fixed32x2`, `fixed32x3`, `fixed32x4`, `fixed32x5`, `fixed32x6`, `fixed32x7`, `fixed32x8`, `fixed32x9`, `fixed32x10`, `fixed32x11`, `fixed32x12`, `fixed32x13`, `fixed32x14`, `fixed32x15`, `fixed32x16`, `fixed32x17`, `fixed32x18`, `fixed32x19`, `fixed32x20`, `fixed32x21`, `fixed32x22`, `fixed32x23`, `fixed32x24`, `fixed32x25`, `fixed32x26`, `fixed32x27`, `fixed32x28`, `fixed32x29`, `fixed32x30`, `fixed32x31`, `fixed32x32`, `fixed32x33`, `fixed32x34`, `fixed32x35`, `fixed32x36`, `fixed32x37`, `fixed32x38`, `fixed32x39`, `fixed32x40`, `fixed32x41`, `fixed32x42`, `fixed32x43`, `fixed32x44`, `fixed32x45`, `fixed32x46`, `fixed32x47`, `fixed32x48`, `fixed32x49`, `fixed32x50`, `fixed32x51`, `fixed32x52`, `fixed32x53`, `fixed32x54`, `fixed32x55`, `fixed32x56`, `fixed32x57`, `fixed32x58`, `fixed32x59`, `fixed32x60`, `fixed32x61`, `fixed32x62`, `fixed32x63`, `fixed32x64`, `fixed32x65`, `fixed32x66`, `fixed32x67`, `fixed32x68`, `fixed32x69`, `fixed32x70`, `fixed32x71`, `fixed32x72`, `fixed32x73`, `fixed32x74`, `fixed32x75`, `fixed32x76`, `fixed32x77`, `fixed32x78`, `fixed32x79`, `fixed32x80`, `fixed40x0`, `fixed40x1`, `fixed40x2`, `fixed40x3`, `fixed40x4`, `fixed40x5`, `fixed40x6`, `fixed40x7`, `fixed40x8`, `fixed40x9`, `fixed40x10`, `fixed40x11`, `fixed40x12`, `fixed40x13`, `fixed40x14`, `fixed40x15`, `fixed40x16`, `fixed40x17`, `fixed40x18`, `fixed40x19`, `fixed40x20`, `fixed40x21`, `fixed40x22`, `fixed40x23`, `fixed40x24`, `fixed40x25`, `fixed40x26`, `fixed40x27`, `fixed40x28`, `fixed40x29`, `fixed40x30`, `fixed40x31`, `fixed40x32`, `fixed40x33`, `fixed40x34`, `fixed40x35`, `fixed40x36`, `fixed40x37`, `fixed40x38`, `fixed40x39`, `fixed40x40`, `fixed40x41`, `fixed40x42`, `fixed40x43`, `fixed40x44`, `fixed40x45`, `fixed40x46`, `fixed40x47`, `fixed40x48`, `fixed40x49`, `fixed40x50`, `fixed40x51`, `fixed40x52`, `fixed40x53`, `fixed40x54`, `fixed40x55`, `fixed40x56`, `fixed40x57`, `fixed40x58`, `fixed40x59`, `fixed40x60`, `fixed40x61`, `fixed40x62`, `fixed40x63`, `fixed40x64`, `fixed40x65`, `fixed40x66`, `fixed40x67`, `fixed40x68`, `fixed40x69`, `fixed40x70`, `fixed40x71`, `fixed40x72`, `fixed40x73`, `fixed40x74`, `fixed40x75`, `fixed40x76`, `fixed40x77`, `fixed40x78`, `fixed40x79`, `fixed40x80`, `fixed48x0`, `fixed48x1`, `fixed48x2`, `fixed48x3`, `fixed48x4`, `fixed48x5`, `fixed48x6`, `fixed48x7`, `fixed48x8`, `fixed48x9`, `fixed48x10`, `fixed48x11`, `fixed48x12`, `fixed48x13`, `fixed48x14`, `fixed48x15`, `fixed48x16`, `fixed48x17`, `fixed48x18`, `fixed48x19`, `fixed48x20`, `fixed48x21`, `fixed48x22`, `fixed48x23`, `fixed48x24`, `fixed48x25`, `fixed48x26`, `fixed48x27`, `fixed48x28`, `fixed48x29`, `fixed48x30`, `fixed48x31`, `fixed48x32`, `fixed48x33`, `fixed48x34`, `fixed48x35`, `fixed48x36`, `fixed48x37`, `fixed48x38`, `fixed48x39`, `fixed48x40`, `fixed48x41`, `fixed48x42`, `fixed48x43`, `fixed48x44`, `fixed48x45`, `fixed48x46`, `fixed48x47`, `fixed48x48`, `fixed48x49`, `fixed48x50`, `fixed48x51`, `fixed48x52`, `fixed48x53`, `fixed48x54`, `fixed48x55`, `fixed48x56`, `fixed48x57`, `fixed48x58`, `fixed48x59`, `fixed48x60`, `fixed48x61`, `fixed48x62`, `fixed48x63`, `fixed48x64`, `fixed48x65`, `fixed48x66`, `fixed48x67`, `fixed48x68`, `fixed48x69`, `fixed48x70`, `fixed48x71`, `fixed48x72`, `fixed48x73`, `fixed48x74`, `fixed48x75`, `fixed48x76`, `fixed48x77`, `fixed48x78`, `fixed48x79`, `fixed48x80`, `fixed56x0`, `fixed56x1`, `fixed56x2`, `fixed56x3`, `fixed56x4`, `fixed56x5`, `fixed56x6`, `fixed56x7`, `fixed56x8`, `fixed56x9`, `fixed56x10`, `fixed56x11`, `fixed56x12`, `fixed56x13`, `fixed56x14`, `fixed56x15`, `fixed56x16`, `fixed56x17`, `fixed56x18`, `fixed56x19`, `fixed56x20`, `fixed56x21`, `fixed56x22`, `fixed56x23`, `fixed56x24`, `fixed56x25`, `fixed56x26`, `fixed56x27`, `fixed56x28`, `fixed56x29`, `fixed56x30`, `fixed56x31`, `fixed56x32`, `fixed56x33`, `fixed56x34`, `fixed56x35`, `fixed56x36`, `fixed56x37`, `fixed56x38`, `fixed56x39`, `fixed56x40`, `fixed56x41`, `fixed56x42`, `fixed56x43`, `fixed56x44`, `fixed56x45`, `fixed56x46`, `fixed56x47`, `fixed56x48`, `fixed56x49`, `fixed56x50`, `fixed56x51`, `fixed56x52`, `fixed56x53`, `fixed56x54`, `fixed56x55`, `fixed56x56`, `fixed56x57`, `fixed56x58`, `fixed56x59`, `fixed56x60`, `fixed56x61`, `fixed56x62`, `fixed56x63`, `fixed56x64`, `fixed56x65`, `fixed56x66`, `fixed56x67`, `fixed56x68`, `fixed56x69`, `fixed56x70`, `fixed56x71`, `fixed56x72`, `fixed56x73`, `fixed56x74`, `fixed56x75`, `fixed56x76`, `fixed56x77`, `fixed56x78`, `fixed56x79`, `fixed56x80`, `fixed64x0`, `fixed64x1`, `fixed64x2`, `fixed64x3`, `fixed64x4`, `fixed64x5`, `fixed64x6`, `fixed64x7`, `fixed64x8`, `fixed64x9`, `fixed64x10`, `fixed64x11`, `fixed64x12`, `fixed64x13`, `fixed64x14`, `fixed64x15`, `fixed64x16`, `fixed64x17`, `fixed64x18`, `fixed64x19`, `fixed64x20`, `fixed64x21`, `fixed64x22`, `fixed64x23`, `fixed64x24`, `fixed64x25`, `fixed64x26`, `fixed64x27`, `fixed64x28`, `fixed64x29`, `fixed64x30`, `fixed64x31`, `fixed64x32`, `fixed64x33`, `fixed64x34`, `fixed64x35`, `fixed64x36`, `fixed64x37`, `fixed64x38`, `fixed64x39`, `fixed64x40`, `fixed64x41`, `fixed64x42`, `fixed64x43`, `fixed64x44`, `fixed64x45`, `fixed64x46`, `fixed64x47`, `fixed64x48`, `fixed64x49`, `fixed64x50`, `fixed64x51`, `fixed64x52`, `fixed64x53`, `fixed64x54`, `fixed64x55`, `fixed64x56`, `fixed64x57`, `fixed64x58`, `fixed64x59`, `fixed64x60`, `fixed64x61`, `fixed64x62`, `fixed64x63`, `fixed64x64`, `fixed64x65`, `fixed64x66`, `fixed64x67`, `fixed64x68`, `fixed64x69`, `fixed64x70`, `fixed64x71`, `fixed64x72`, `fixed64x73`, `fixed64x74`, `fixed64x75`, `fixed64x76`, `fixed64x77`, `fixed64x78`, `fixed64x79`, `fixed64x80`, `fixed72x0`, `fixed72x1`, `fixed72x2`, `fixed72x3`, `fixed72x4`, `fixed72x5`, `fixed72x6`, `fixed72x7`, `fixed72x8`, `fixed72x9`, `fixed72x10`, `fixed72x11`, `fixed72x12`, `fixed72x13`, `fixed72x14`, `fixed72x15`, `fixed72x16`, `fixed72x17`, `fixed72x18`, `fixed72x19`, `fixed72x20`, `fixed72x21`, `fixed72x22`, `fixed72x23`, `fixed72x24`, `fixed72x25`, `fixed72x26`, `fixed72x27`, `fixed72x28`, `fixed72x29`, `fixed72x30`, `fixed72x31`, `fixed72x32`, `fixed72x33`, `fixed72x34`, `fixed72x35`, `fixed72x36`, `fixed72x37`, `fixed72x38`, `fixed72x39`, `fixed72x40`, `fixed72x41`, `fixed72x42`, `fixed72x43`, `fixed72x44`, `fixed72x45`, `fixed72x46`, `fixed72x47`, `fixed72x48`, `fixed72x49`, `fixed72x50`, `fixed72x51`, `fixed72x52`, `fixed72x53`, `fixed72x54`, `fixed72x55`, `fixed72x56`, `fixed72x57`, `fixed72x58`, `fixed72x59`, `fixed72x60`, `fixed72x61`, `fixed72x62`, `fixed72x63`, `fixed72x64`, `fixed72x65`, `fixed72x66`, `fixed72x67`, `fixed72x68`, `fixed72x69`, `fixed72x70`, `fixed72x71`, `fixed72x72`, `fixed72x73`, `fixed72x74`, `fixed72x75`, `fixed72x76`, `fixed72x77`, `fixed72x78`, `fixed72x79`, `fixed72x80`, `fixed80x0`, `fixed80x1`, `fixed80x2`, `fixed80x3`, `fixed80x4`, `fixed80x5`, `fixed80x6`, `fixed80x7`, `fixed80x8`, `fixed80x9`, `fixed80x10`, `fixed80x11`, `fixed80x12`, `fixed80x13`, `fixed80x14`, `fixed80x15`, `fixed80x16`, `fixed80x17`, `fixed80x18`, `fixed80x19`, `fixed80x20`, `fixed80x21`, `fixed80x22`, `fixed80x23`, `fixed80x24`, `fixed80x25`, `fixed80x26`, `fixed80x27`, `fixed80x28`, `fixed80x29`, `fixed80x30`, `fixed80x31`, `fixed80x32`, `fixed80x33`, `fixed80x34`, `fixed80x35`, `fixed80x36`, `fixed80x37`, `fixed80x38`, `fixed80x39`, `fixed80x40`, `fixed80x41`, `fixed80x42`, `fixed80x43`, `fixed80x44`, `fixed80x45`, `fixed80x46`, `fixed80x47`, `fixed80x48`, `fixed80x49`, `fixed80x50`, `fixed80x51`, `fixed80x52`, `fixed80x53`, `fixed80x54`, `fixed80x55`, `fixed80x56`, `fixed80x57`, `fixed80x58`, `fixed80x59`, `fixed80x60`, `fixed80x61`, `fixed80x62`, `fixed80x63`, `fixed80x64`, `fixed80x65`, `fixed80x66`, `fixed80x67`, `fixed80x68`, `fixed80x69`, `fixed80x70`, `fixed80x71`, `fixed80x72`, `fixed80x73`, `fixed80x74`, `fixed80x75`, `fixed80x76`, `fixed80x77`, `fixed80x78`, `fixed80x79`, `fixed80x80`, `fixed88x0`, `fixed88x1`, `fixed88x2`, `fixed88x3`, `fixed88x4`, `fixed88x5`, `fixed88x6`, `fixed88x7`, `fixed88x8`, `fixed88x9`, `fixed88x10`, `fixed88x11`, `fixed88x12`, `fixed88x13`, `fixed88x14`, `fixed88x15`, `fixed88x16`, `fixed88x17`, `fixed88x18`, `fixed88x19`, `fixed88x20`, `fixed88x21`, `fixed88x22`, `fixed88x23`, `fixed88x24`, `fixed88x25`, `fixed88x26`, `fixed88x27`, `fixed88x28`, `fixed88x29`, `fixed88x30`, `fixed88x31`, `fixed88x32`, `fixed88x33`, `fixed88x34`, `fixed88x35`, `fixed88x36`, `fixed88x37`, `fixed88x38`, `fixed88x39`, `fixed88x40`, `fixed88x41`, `fixed88x42`, `fixed88x43`, `fixed88x44`, `fixed88x45`, `fixed88x46`, `fixed88x47`, `fixed88x48`, `fixed88x49`, `fixed88x50`, `fixed88x51`, `fixed88x52`, `fixed88x53`, `fixed88x54`, `fixed88x55`, `fixed88x56`, `fixed88x57`, `fixed88x58`, `fixed88x59`, `fixed88x60`, `fixed88x61`, `fixed88x62`, `fixed88x63`, `fixed88x64`, `fixed88x65`, `fixed88x66`, `fixed88x67`, `fixed88x68`, `fixed88x69`, `fixed88x70`, `fixed88x71`, `fixed88x72`, `fixed88x73`, `fixed88x74`, `fixed88x75`, `fixed88x76`, `fixed88x77`, `fixed88x78`, `fixed88x79`, `fixed88x80`, `fixed96x0`, `fixed96x1`, `fixed96x2`, `fixed96x3`, `fixed96x4`, `fixed96x5`, `fixed96x6`, `fixed96x7`, `fixed96x8`, `fixed96x9`, `fixed96x10`, `fixed96x11`, `fixed96x12`, `fixed96x13`, `fixed96x14`, `fixed96x15`, `fixed96x16`, `fixed96x17`, `fixed96x18`, `fixed96x19`, `fixed96x20`, `fixed96x21`, `fixed96x22`, `fixed96x23`, `fixed96x24`, `fixed96x25`, `fixed96x26`, `fixed96x27`, `fixed96x28`, `fixed96x29`, `fixed96x30`, `fixed96x31`, `fixed96x32`, `fixed96x33`, `fixed96x34`, `fixed96x35`, `fixed96x36`, `fixed96x37`, `fixed96x38`, `fixed96x39`, `fixed96x40`, `fixed96x41`, `fixed96x42`, `fixed96x43`, `fixed96x44`, `fixed96x45`, `fixed96x46`, `fixed96x47`, `fixed96x48`, `fixed96x49`, `fixed96x50`, `fixed96x51`, `fixed96x52`, `fixed96x53`, `fixed96x54`, `fixed96x55`, `fixed96x56`, `fixed96x57`, `fixed96x58`, `fixed96x59`, `fixed96x60`, `fixed96x61`, `fixed96x62`, `fixed96x63`, `fixed96x64`, `fixed96x65`, `fixed96x66`, `fixed96x67`, `fixed96x68`, `fixed96x69`, `fixed96x70`, `fixed96x71`, `fixed96x72`, `fixed96x73`, `fixed96x74`, `fixed96x75`, `fixed96x76`, `fixed96x77`, `fixed96x78`, `fixed96x79`, `fixed96x80`, `fixed104x0`, `fixed104x1`, `fixed104x2`, `fixed104x3`, `fixed104x4`, `fixed104x5`, `fixed104x6`, `fixed104x7`, `fixed104x8`, `fixed104x9`, `fixed104x10`, `fixed104x11`, `fixed104x12`, `fixed104x13`, `fixed104x14`, `fixed104x15`, `fixed104x16`, `fixed104x17`, `fixed104x18`, `fixed104x19`, `fixed104x20`, `fixed104x21`, `fixed104x22`, `fixed104x23`, `fixed104x24`, `fixed104x25`, `fixed104x26`, `fixed104x27`, `fixed104x28`, `fixed104x29`, `fixed104x30`, `fixed104x31`, `fixed104x32`, `fixed104x33`, `fixed104x34`, `fixed104x35`, `fixed104x36`, `fixed104x37`, `fixed104x38`, `fixed104x39`, `fixed104x40`, `fixed104x41`, `fixed104x42`, `fixed104x43`, `fixed104x44`, `fixed104x45`, `fixed104x46`, `fixed104x47`, `fixed104x48`, `fixed104x49`, `fixed104x50`, `fixed104x51`, `fixed104x52`, `fixed104x53`, `fixed104x54`, `fixed104x55`, `fixed104x56`, `fixed104x57`, `fixed104x58`, `fixed104x59`, `fixed104x60`, `fixed104x61`, `fixed104x62`, `fixed104x63`, `fixed104x64`, `fixed104x65`, `fixed104x66`, `fixed104x67`, `fixed104x68`, `fixed104x69`, `fixed104x70`, `fixed104x71`, `fixed104x72`, `fixed104x73`, `fixed104x74`, `fixed104x75`, `fixed104x76`, `fixed104x77`, `fixed104x78`, `fixed104x79`, `fixed104x80`, `fixed112x0`, `fixed112x1`, `fixed112x2`, `fixed112x3`, `fixed112x4`, `fixed112x5`, `fixed112x6`, `fixed112x7`, `fixed112x8`, `fixed112x9`, `fixed112x10`, `fixed112x11`, `fixed112x12`, `fixed112x13`, `fixed112x14`, `fixed112x15`, `fixed112x16`, `fixed112x17`, `fixed112x18`, `fixed112x19`, `fixed112x20`, `fixed112x21`, `fixed112x22`, `fixed112x23`, `fixed112x24`, `fixed112x25`, `fixed112x26`, `fixed112x27`, `fixed112x28`, `fixed112x29`, `fixed112x30`, `fixed112x31`, `fixed112x32`, `fixed112x33`, `fixed112x34`, `fixed112x35`, `fixed112x36`, `fixed112x37`, `fixed112x38`, `fixed112x39`, `fixed112x40`, `fixed112x41`, `fixed112x42`, `fixed112x43`, `fixed112x44`, `fixed112x45`, `fixed112x46`, `fixed112x47`, `fixed112x48`, `fixed112x49`, `fixed112x50`, `fixed112x51`, `fixed112x52`, `fixed112x53`, `fixed112x54`, `fixed112x55`, `fixed112x56`, `fixed112x57`, `fixed112x58`, `fixed112x59`, `fixed112x60`, `fixed112x61`, `fixed112x62`, `fixed112x63`, `fixed112x64`, `fixed112x65`, `fixed112x66`, `fixed112x67`, `fixed112x68`, `fixed112x69`, `fixed112x70`, `fixed112x71`, `fixed112x72`, `fixed112x73`, `fixed112x74`, `fixed112x75`, `fixed112x76`, `fixed112x77`, `fixed112x78`, `fixed112x79`, `fixed112x80`, `fixed120x0`, `fixed120x1`, `fixed120x2`, `fixed120x3`, `fixed120x4`, `fixed120x5`, `fixed120x6`, `fixed120x7`, `fixed120x8`, `fixed120x9`, `fixed120x10`, `fixed120x11`, `fixed120x12`, `fixed120x13`, `fixed120x14`, `fixed120x15`, `fixed120x16`, `fixed120x17`, `fixed120x18`, `fixed120x19`, `fixed120x20`, `fixed120x21`, `fixed120x22`, `fixed120x23`, `fixed120x24`, `fixed120x25`, `fixed120x26`, `fixed120x27`, `fixed120x28`, `fixed120x29`, `fixed120x30`, `fixed120x31`, `fixed120x32`, `fixed120x33`, `fixed120x34`, `fixed120x35`, `fixed120x36`, `fixed120x37`, `fixed120x38`, `fixed120x39`, `fixed120x40`, `fixed120x41`, `fixed120x42`, `fixed120x43`, `fixed120x44`, `fixed120x45`, `fixed120x46`, `fixed120x47`, `fixed120x48`, `fixed120x49`, `fixed120x50`, `fixed120x51`, `fixed120x52`, `fixed120x53`, `fixed120x54`, `fixed120x55`, `fixed120x56`, `fixed120x57`, `fixed120x58`, `fixed120x59`, `fixed120x60`, `fixed120x61`, `fixed120x62`, `fixed120x63`, `fixed120x64`, `fixed120x65`, `fixed120x66`, `fixed120x67`, `fixed120x68`, `fixed120x69`, `fixed120x70`, `fixed120x71`, `fixed120x72`, `fixed120x73`, `fixed120x74`, `fixed120x75`, `fixed120x76`, `fixed120x77`, `fixed120x78`, `fixed120x79`, `fixed120x80`, `fixed128x0`, `fixed128x1`, `fixed128x2`, `fixed128x3`, `fixed128x4`, `fixed128x5`, `fixed128x6`, `fixed128x7`, `fixed128x8`, `fixed128x9`, `fixed128x10`, `fixed128x11`, `fixed128x12`, `fixed128x13`, `fixed128x14`, `fixed128x15`, `fixed128x16`, `fixed128x17`, `fixed128x18`, `fixed128x19`, `fixed128x20`, `fixed128x21`, `fixed128x22`, `fixed128x23`, `fixed128x24`, `fixed128x25`, `fixed128x26`, `fixed128x27`, `fixed128x28`, `fixed128x29`, `fixed128x30`, `fixed128x31`, `fixed128x32`, `fixed128x33`, `fixed128x34`, `fixed128x35`, `fixed128x36`, `fixed128x37`, `fixed128x38`, `fixed128x39`, `fixed128x40`, `fixed128x41`, `fixed128x42`, `fixed128x43`, `fixed128x44`, `fixed128x45`, `fixed128x46`, `fixed128x47`, `fixed128x48`, `fixed128x49`, `fixed128x50`, `fixed128x51`, `fixed128x52`, `fixed128x53`, `fixed128x54`, `fixed128x55`, `fixed128x56`, `fixed128x57`, `fixed128x58`, `fixed128x59`, `fixed128x60`, `fixed128x61`, `fixed128x62`, `fixed128x63`, `fixed128x64`, `fixed128x65`, `fixed128x66`, `fixed128x67`, `fixed128x68`, `fixed128x69`, `fixed128x70`, `fixed128x71`, `fixed128x72`, `fixed128x73`, `fixed128x74`, `fixed128x75`, `fixed128x76`, `fixed128x77`, `fixed128x78`, `fixed128x79`, `fixed128x80`, `fixed136x0`, `fixed136x1`, `fixed136x2`, `fixed136x3`, `fixed136x4`, `fixed136x5`, `fixed136x6`, `fixed136x7`, `fixed136x8`, `fixed136x9`, `fixed136x10`, `fixed136x11`, `fixed136x12`, `fixed136x13`, `fixed136x14`, `fixed136x15`, `fixed136x16`, `fixed136x17`, `fixed136x18`, `fixed136x19`, `fixed136x20`, `fixed136x21`, `fixed136x22`, `fixed136x23`, `fixed136x24`, `fixed136x25`, `fixed136x26`, `fixed136x27`, `fixed136x28`, `fixed136x29`, `fixed136x30`, `fixed136x31`, `fixed136x32`, `fixed136x33`, `fixed136x34`, `fixed136x35`, `fixed136x36`, `fixed136x37`, `fixed136x38`, `fixed136x39`, `fixed136x40`, `fixed136x41`, `fixed136x42`, `fixed136x43`, `fixed136x44`, `fixed136x45`, `fixed136x46`, `fixed136x47`, `fixed136x48`, `fixed136x49`, `fixed136x50`, `fixed136x51`, `fixed136x52`, `fixed136x53`, `fixed136x54`, `fixed136x55`, `fixed136x56`, `fixed136x57`, `fixed136x58`, `fixed136x59`, `fixed136x60`, `fixed136x61`, `fixed136x62`, `fixed136x63`, `fixed136x64`, `fixed136x65`, `fixed136x66`, `fixed136x67`, `fixed136x68`, `fixed136x69`, `fixed136x70`, `fixed136x71`, `fixed136x72`, `fixed136x73`, `fixed136x74`, `fixed136x75`, `fixed136x76`, `fixed136x77`, `fixed136x78`, `fixed136x79`, `fixed136x80`, `fixed144x0`, `fixed144x1`, `fixed144x2`, `fixed144x3`, `fixed144x4`, `fixed144x5`, `fixed144x6`, `fixed144x7`, `fixed144x8`, `fixed144x9`, `fixed144x10`, `fixed144x11`, `fixed144x12`, `fixed144x13`, `fixed144x14`, `fixed144x15`, `fixed144x16`, `fixed144x17`, `fixed144x18`, `fixed144x19`, `fixed144x20`, `fixed144x21`, `fixed144x22`, `fixed144x23`, `fixed144x24`, `fixed144x25`, `fixed144x26`, `fixed144x27`, `fixed144x28`, `fixed144x29`, `fixed144x30`, `fixed144x31`, `fixed144x32`, `fixed144x33`, `fixed144x34`, `fixed144x35`, `fixed144x36`, `fixed144x37`, `fixed144x38`, `fixed144x39`, `fixed144x40`, `fixed144x41`, `fixed144x42`, `fixed144x43`, `fixed144x44`, `fixed144x45`, `fixed144x46`, `fixed144x47`, `fixed144x48`, `fixed144x49`, `fixed144x50`, `fixed144x51`, `fixed144x52`, `fixed144x53`, `fixed144x54`, `fixed144x55`, `fixed144x56`, `fixed144x57`, `fixed144x58`, `fixed144x59`, `fixed144x60`, `fixed144x61`, `fixed144x62`, `fixed144x63`, `fixed144x64`, `fixed144x65`, `fixed144x66`, `fixed144x67`, `fixed144x68`, `fixed144x69`, `fixed144x70`, `fixed144x71`, `fixed144x72`, `fixed144x73`, `fixed144x74`, `fixed144x75`, `fixed144x76`, `fixed144x77`, `fixed144x78`, `fixed144x79`, `fixed144x80`, `fixed152x0`, `fixed152x1`, `fixed152x2`, `fixed152x3`, `fixed152x4`, `fixed152x5`, `fixed152x6`, `fixed152x7`, `fixed152x8`, `fixed152x9`, `fixed152x10`, `fixed152x11`, `fixed152x12`, `fixed152x13`, `fixed152x14`, `fixed152x15`, `fixed152x16`, `fixed152x17`, `fixed152x18`, `fixed152x19`, `fixed152x20`, `fixed152x21`, `fixed152x22`, `fixed152x23`, `fixed152x24`, `fixed152x25`, `fixed152x26`, `fixed152x27`, `fixed152x28`, `fixed152x29`, `fixed152x30`, `fixed152x31`, `fixed152x32`, `fixed152x33`, `fixed152x34`, `fixed152x35`, `fixed152x36`, `fixed152x37`, `fixed152x38`, `fixed152x39`, `fixed152x40`, `fixed152x41`, `fixed152x42`, `fixed152x43`, `fixed152x44`, `fixed152x45`, `fixed152x46`, `fixed152x47`, `fixed152x48`, `fixed152x49`, `fixed152x50`, `fixed152x51`, `fixed152x52`, `fixed152x53`, `fixed152x54`, `fixed152x55`, `fixed152x56`, `fixed152x57`, `fixed152x58`, `fixed152x59`, `fixed152x60`, `fixed152x61`, `fixed152x62`, `fixed152x63`, `fixed152x64`, `fixed152x65`, `fixed152x66`, `fixed152x67`, `fixed152x68`, `fixed152x69`, `fixed152x70`, `fixed152x71`, `fixed152x72`, `fixed152x73`, `fixed152x74`, `fixed152x75`, `fixed152x76`, `fixed152x77`, `fixed152x78`, `fixed152x79`, `fixed152x80`, `fixed160x0`, `fixed160x1`, `fixed160x2`, `fixed160x3`, `fixed160x4`, `fixed160x5`, `fixed160x6`, `fixed160x7`, `fixed160x8`, `fixed160x9`, `fixed160x10`, `fixed160x11`, `fixed160x12`, `fixed160x13`, `fixed160x14`, `fixed160x15`, `fixed160x16`, `fixed160x17`, `fixed160x18`, `fixed160x19`, `fixed160x20`, `fixed160x21`, `fixed160x22`, `fixed160x23`, `fixed160x24`, `fixed160x25`, `fixed160x26`, `fixed160x27`, `fixed160x28`, `fixed160x29`, `fixed160x30`, `fixed160x31`, `fixed160x32`, `fixed160x33`, `fixed160x34`, `fixed160x35`, `fixed160x36`, `fixed160x37`, `fixed160x38`, `fixed160x39`, `fixed160x40`, `fixed160x41`, `fixed160x42`, `fixed160x43`, `fixed160x44`, `fixed160x45`, `fixed160x46`, `fixed160x47`, `fixed160x48`, `fixed160x49`, `fixed160x50`, `fixed160x51`, `fixed160x52`, `fixed160x53`, `fixed160x54`, `fixed160x55`, `fixed160x56`, `fixed160x57`, `fixed160x58`, `fixed160x59`, `fixed160x60`, `fixed160x61`, `fixed160x62`, `fixed160x63`, `fixed160x64`, `fixed160x65`, `fixed160x66`, `fixed160x67`, `fixed160x68`, `fixed160x69`, `fixed160x70`, `fixed160x71`, `fixed160x72`, `fixed160x73`, `fixed160x74`, `fixed160x75`, `fixed160x76`, `fixed160x77`, `fixed160x78`, `fixed160x79`, `fixed160x80`, `fixed168x0`, `fixed168x1`, `fixed168x2`, `fixed168x3`, `fixed168x4`, `fixed168x5`, `fixed168x6`, `fixed168x7`, `fixed168x8`, `fixed168x9`, `fixed168x10`, `fixed168x11`, `fixed168x12`, `fixed168x13`, `fixed168x14`, `fixed168x15`, `fixed168x16`, `fixed168x17`, `fixed168x18`, `fixed168x19`, `fixed168x20`, `fixed168x21`, `fixed168x22`, `fixed168x23`, `fixed168x24`, `fixed168x25`, `fixed168x26`, `fixed168x27`, `fixed168x28`, `fixed168x29`, `fixed168x30`, `fixed168x31`, `fixed168x32`, `fixed168x33`, `fixed168x34`, `fixed168x35`, `fixed168x36`, `fixed168x37`, `fixed168x38`, `fixed168x39`, `fixed168x40`, `fixed168x41`, `fixed168x42`, `fixed168x43`, `fixed168x44`, `fixed168x45`, `fixed168x46`, `fixed168x47`, `fixed168x48`, `fixed168x49`, `fixed168x50`, `fixed168x51`, `fixed168x52`, `fixed168x53`, `fixed168x54`, `fixed168x55`, `fixed168x56`, `fixed168x57`, `fixed168x58`, `fixed168x59`, `fixed168x60`, `fixed168x61`, `fixed168x62`, `fixed168x63`, `fixed168x64`, `fixed168x65`, `fixed168x66`, `fixed168x67`, `fixed168x68`, `fixed168x69`, `fixed168x70`, `fixed168x71`, `fixed168x72`, `fixed168x73`, `fixed168x74`, `fixed168x75`, `fixed168x76`, `fixed168x77`, `fixed168x78`, `fixed168x79`, `fixed168x80`, `fixed176x0`, `fixed176x1`, `fixed176x2`, `fixed176x3`, `fixed176x4`, `fixed176x5`, `fixed176x6`, `fixed176x7`, `fixed176x8`, `fixed176x9`, `fixed176x10`, `fixed176x11`, `fixed176x12`, `fixed176x13`, `fixed176x14`, `fixed176x15`, `fixed176x16`, `fixed176x17`, `fixed176x18`, `fixed176x19`, `fixed176x20`, `fixed176x21`, `fixed176x22`, `fixed176x23`, `fixed176x24`, `fixed176x25`, `fixed176x26`, `fixed176x27`, `fixed176x28`, `fixed176x29`, `fixed176x30`, `fixed176x31`, `fixed176x32`, `fixed176x33`, `fixed176x34`, `fixed176x35`, `fixed176x36`, `fixed176x37`, `fixed176x38`, `fixed176x39`, `fixed176x40`, `fixed176x41`, `fixed176x42`, `fixed176x43`, `fixed176x44`, `fixed176x45`, `fixed176x46`, `fixed176x47`, `fixed176x48`, `fixed176x49`, `fixed176x50`, `fixed176x51`, `fixed176x52`, `fixed176x53`, `fixed176x54`, `fixed176x55`, `fixed176x56`, `fixed176x57`, `fixed176x58`, `fixed176x59`, `fixed176x60`, `fixed176x61`, `fixed176x62`, `fixed176x63`, `fixed176x64`, `fixed176x65`, `fixed176x66`, `fixed176x67`, `fixed176x68`, `fixed176x69`, `fixed176x70`, `fixed176x71`, `fixed176x72`, `fixed176x73`, `fixed176x74`, `fixed176x75`, `fixed176x76`, `fixed176x77`, `fixed176x78`, `fixed176x79`, `fixed176x80`, `fixed184x0`, `fixed184x1`, `fixed184x2`, `fixed184x3`, `fixed184x4`, `fixed184x5`, `fixed184x6`, `fixed184x7`, `fixed184x8`, `fixed184x9`, `fixed184x10`, `fixed184x11`, `fixed184x12`, `fixed184x13`, `fixed184x14`, `fixed184x15`, `fixed184x16`, `fixed184x17`, `fixed184x18`, `fixed184x19`, `fixed184x20`, `fixed184x21`, `fixed184x22`, `fixed184x23`, `fixed184x24`, `fixed184x25`, `fixed184x26`, `fixed184x27`, `fixed184x28`, `fixed184x29`, `fixed184x30`, `fixed184x31`, `fixed184x32`, `fixed184x33`, `fixed184x34`, `fixed184x35`, `fixed184x36`, `fixed184x37`, `fixed184x38`, `fixed184x39`, `fixed184x40`, `fixed184x41`, `fixed184x42`, `fixed184x43`, `fixed184x44`, `fixed184x45`, `fixed184x46`, `fixed184x47`, `fixed184x48`, `fixed184x49`, `fixed184x50`, `fixed184x51`, `fixed184x52`, `fixed184x53`, `fixed184x54`, `fixed184x55`, `fixed184x56`, `fixed184x57`, `fixed184x58`, `fixed184x59`, `fixed184x60`, `fixed184x61`, `fixed184x62`, `fixed184x63`, `fixed184x64`, `fixed184x65`, `fixed184x66`, `fixed184x67`, `fixed184x68`, `fixed184x69`, `fixed184x70`, `fixed184x71`, `fixed184x72`, `fixed192x0`, `fixed192x1`, `fixed192x2`, `fixed192x3`, `fixed192x4`, `fixed192x5`, `fixed192x6`, `fixed192x7`, `fixed192x8`, `fixed192x9`, `fixed192x10`, `fixed192x11`, `fixed192x12`, `fixed192x13`, `fixed192x14`, `fixed192x15`, `fixed192x16`, `fixed192x17`, `fixed192x18`, `fixed192x19`, `fixed192x20`, `fixed192x21`, `fixed192x22`, `fixed192x23`, `fixed192x24`, `fixed192x25`, `fixed192x26`, `fixed192x27`, `fixed192x28`, `fixed192x29`, `fixed192x30`, `fixed192x31`, `fixed192x32`, `fixed192x33`, `fixed192x34`, `fixed192x35`, `fixed192x36`, `fixed192x37`, `fixed192x38`, `fixed192x39`, `fixed192x40`, `fixed192x41`, `fixed192x42`, `fixed192x43`, `fixed192x44`, `fixed192x45`, `fixed192x46`, `fixed192x47`, `fixed192x48`, `fixed192x49`, `fixed192x50`, `fixed192x51`, `fixed192x52`, `fixed192x53`, `fixed192x54`, `fixed192x55`, `fixed192x56`, `fixed192x57`, `fixed192x58`, `fixed192x59`, `fixed192x60`, `fixed192x61`, `fixed192x62`, `fixed192x63`, `fixed192x64`, `fixed200x0`, `fixed200x1`, `fixed200x2`, `fixed200x3`, `fixed200x4`, `fixed200x5`, `fixed200x6`, `fixed200x7`, `fixed200x8`, `fixed200x9`, `fixed200x10`, `fixed200x11`, `fixed200x12`, `fixed200x13`, `fixed200x14`, `fixed200x15`, `fixed200x16`, `fixed200x17`, `fixed200x18`, `fixed200x19`, `fixed200x20`, `fixed200x21`, `fixed200x22`, `fixed200x23`, `fixed200x24`, `fixed200x25`, `fixed200x26`, `fixed200x27`, `fixed200x28`, `fixed200x29`, `fixed200x30`, `fixed200x31`, `fixed200x32`, `fixed200x33`, `fixed200x34`, `fixed200x35`, `fixed200x36`, `fixed200x37`, `fixed200x38`, `fixed200x39`, `fixed200x40`, `fixed200x41`, `fixed200x42`, `fixed200x43`, `fixed200x44`, `fixed200x45`, `fixed200x46`, `fixed200x47`, `fixed200x48`, `fixed200x49`, `fixed200x50`, `fixed200x51`, `fixed200x52`, `fixed200x53`, `fixed200x54`, `fixed200x55`, `fixed200x56`, `fixed208x0`, `fixed208x1`, `fixed208x2`, `fixed208x3`, `fixed208x4`, `fixed208x5`, `fixed208x6`, `fixed208x7`, `fixed208x8`, `fixed208x9`, `fixed208x10`, `fixed208x11`, `fixed208x12`, `fixed208x13`, `fixed208x14`, `fixed208x15`, `fixed208x16`, `fixed208x17`, `fixed208x18`, `fixed208x19`, `fixed208x20`, `fixed208x21`, `fixed208x22`, `fixed208x23`, `fixed208x24`, `fixed208x25`, `fixed208x26`, `fixed208x27`, `fixed208x28`, `fixed208x29`, `fixed208x30`, `fixed208x31`, `fixed208x32`, `fixed208x33`, `fixed208x34`, `fixed208x35`, `fixed208x36`, `fixed208x37`, `fixed208x38`, `fixed208x39`, `fixed208x40`, `fixed208x41`, `fixed208x42`, `fixed208x43`, `fixed208x44`, `fixed208x45`, `fixed208x46`, `fixed208x47`, `fixed208x48`, `fixed216x0`, `fixed216x1`, `fixed216x2`, `fixed216x3`, `fixed216x4`, `fixed216x5`, `fixed216x6`, `fixed216x7`, `fixed216x8`, `fixed216x9`, `fixed216x10`, `fixed216x11`, `fixed216x12`, `fixed216x13`, `fixed216x14`, `fixed216x15`, `fixed216x16`, `fixed216x17`, `fixed216x18`, `fixed216x19`, `fixed216x20`, `fixed216x21`, `fixed216x22`, `fixed216x23`, `fixed216x24`, `fixed216x25`, `fixed216x26`, `fixed216x27`, `fixed216x28`, `fixed216x29`, `fixed216x30`, `fixed216x31`, `fixed216x32`, `fixed216x33`, `fixed216x34`, `fixed216x35`, `fixed216x36`, `fixed216x37`, `fixed216x38`, `fixed216x39`, `fixed216x40`, `fixed224x0`, `fixed224x1`, `fixed224x2`, `fixed224x3`, `fixed224x4`, `fixed224x5`, `fixed224x6`, `fixed224x7`, `fixed224x8`, `fixed224x9`, `fixed224x10`, `fixed224x11`, `fixed224x12`, `fixed224x13`, `fixed224x14`, `fixed224x15`, `fixed224x16`, `fixed224x17`, `fixed224x18`, `fixed224x19`, `fixed224x20`, `fixed224x21`, `fixed224x22`, `fixed224x23`, `fixed224x24`, `fixed224x25`, `fixed224x26`, `fixed224x27`, `fixed224x28`, `fixed224x29`, `fixed224x30`, `fixed224x31`, `fixed224x32`, `fixed232x0`, `fixed232x1`, `fixed232x2`, `fixed232x3`, `fixed232x4`, `fixed232x5`, `fixed232x6`, `fixed232x7`, `fixed232x8`, `fixed232x9`, `fixed232x10`, `fixed232x11`, `fixed232x12`, `fixed232x13`, `fixed232x14`, `fixed232x15`, `fixed232x16`, `fixed232x17`, `fixed232x18`, `fixed232x19`, `fixed232x20`, `fixed232x21`, `fixed232x22`, `fixed232x23`, `fixed232x24`, `fixed240x0`, `fixed240x1`, `fixed240x2`, `fixed240x3`, `fixed240x4`, `fixed240x5`, `fixed240x6`, `fixed240x7`, `fixed240x8`, `fixed240x9`, `fixed240x10`, `fixed240x11`, `fixed240x12`, `fixed240x13`, `fixed240x14`, `fixed240x15`, `fixed240x16`, `fixed248x0`, `fixed248x1`, `fixed248x2`, `fixed248x3`, `fixed248x4`, `fixed248x5`, `fixed248x6`, `fixed248x7`, `fixed248x8`, `fixed256x0`), KeywordType, nil}, + {Words(``, `\b`, `ufixed8x0`, `ufixed8x1`, `ufixed8x2`, `ufixed8x3`, `ufixed8x4`, `ufixed8x5`, `ufixed8x6`, `ufixed8x7`, `ufixed8x8`, `ufixed8x9`, `ufixed8x10`, `ufixed8x11`, `ufixed8x12`, `ufixed8x13`, `ufixed8x14`, `ufixed8x15`, `ufixed8x16`, `ufixed8x17`, `ufixed8x18`, `ufixed8x19`, `ufixed8x20`, `ufixed8x21`, `ufixed8x22`, `ufixed8x23`, `ufixed8x24`, `ufixed8x25`, `ufixed8x26`, `ufixed8x27`, `ufixed8x28`, `ufixed8x29`, `ufixed8x30`, `ufixed8x31`, `ufixed8x32`, `ufixed8x33`, `ufixed8x34`, `ufixed8x35`, `ufixed8x36`, `ufixed8x37`, `ufixed8x38`, `ufixed8x39`, `ufixed8x40`, `ufixed8x41`, `ufixed8x42`, `ufixed8x43`, `ufixed8x44`, `ufixed8x45`, `ufixed8x46`, `ufixed8x47`, `ufixed8x48`, `ufixed8x49`, `ufixed8x50`, `ufixed8x51`, `ufixed8x52`, `ufixed8x53`, `ufixed8x54`, `ufixed8x55`, `ufixed8x56`, `ufixed8x57`, `ufixed8x58`, `ufixed8x59`, `ufixed8x60`, `ufixed8x61`, `ufixed8x62`, `ufixed8x63`, `ufixed8x64`, `ufixed8x65`, `ufixed8x66`, `ufixed8x67`, `ufixed8x68`, `ufixed8x69`, `ufixed8x70`, `ufixed8x71`, `ufixed8x72`, `ufixed8x73`, `ufixed8x74`, `ufixed8x75`, `ufixed8x76`, `ufixed8x77`, `ufixed8x78`, `ufixed8x79`, `ufixed8x80`, `ufixed16x0`, `ufixed16x1`, `ufixed16x2`, `ufixed16x3`, `ufixed16x4`, `ufixed16x5`, `ufixed16x6`, `ufixed16x7`, `ufixed16x8`, `ufixed16x9`, `ufixed16x10`, `ufixed16x11`, `ufixed16x12`, `ufixed16x13`, `ufixed16x14`, `ufixed16x15`, `ufixed16x16`, `ufixed16x17`, `ufixed16x18`, `ufixed16x19`, `ufixed16x20`, `ufixed16x21`, `ufixed16x22`, `ufixed16x23`, `ufixed16x24`, `ufixed16x25`, `ufixed16x26`, `ufixed16x27`, `ufixed16x28`, `ufixed16x29`, `ufixed16x30`, `ufixed16x31`, `ufixed16x32`, `ufixed16x33`, `ufixed16x34`, `ufixed16x35`, `ufixed16x36`, `ufixed16x37`, `ufixed16x38`, `ufixed16x39`, `ufixed16x40`, `ufixed16x41`, `ufixed16x42`, `ufixed16x43`, `ufixed16x44`, `ufixed16x45`, `ufixed16x46`, `ufixed16x47`, `ufixed16x48`, `ufixed16x49`, `ufixed16x50`, `ufixed16x51`, `ufixed16x52`, `ufixed16x53`, `ufixed16x54`, `ufixed16x55`, `ufixed16x56`, `ufixed16x57`, `ufixed16x58`, `ufixed16x59`, `ufixed16x60`, `ufixed16x61`, `ufixed16x62`, `ufixed16x63`, `ufixed16x64`, `ufixed16x65`, `ufixed16x66`, `ufixed16x67`, `ufixed16x68`, `ufixed16x69`, `ufixed16x70`, `ufixed16x71`, `ufixed16x72`, `ufixed16x73`, `ufixed16x74`, `ufixed16x75`, `ufixed16x76`, `ufixed16x77`, `ufixed16x78`, `ufixed16x79`, `ufixed16x80`, `ufixed24x0`, `ufixed24x1`, `ufixed24x2`, `ufixed24x3`, `ufixed24x4`, `ufixed24x5`, `ufixed24x6`, `ufixed24x7`, `ufixed24x8`, `ufixed24x9`, `ufixed24x10`, `ufixed24x11`, `ufixed24x12`, `ufixed24x13`, `ufixed24x14`, `ufixed24x15`, `ufixed24x16`, `ufixed24x17`, `ufixed24x18`, `ufixed24x19`, `ufixed24x20`, `ufixed24x21`, `ufixed24x22`, `ufixed24x23`, `ufixed24x24`, `ufixed24x25`, `ufixed24x26`, `ufixed24x27`, `ufixed24x28`, `ufixed24x29`, `ufixed24x30`, `ufixed24x31`, `ufixed24x32`, `ufixed24x33`, `ufixed24x34`, `ufixed24x35`, `ufixed24x36`, `ufixed24x37`, `ufixed24x38`, `ufixed24x39`, `ufixed24x40`, `ufixed24x41`, `ufixed24x42`, `ufixed24x43`, `ufixed24x44`, `ufixed24x45`, `ufixed24x46`, `ufixed24x47`, `ufixed24x48`, `ufixed24x49`, `ufixed24x50`, `ufixed24x51`, `ufixed24x52`, `ufixed24x53`, `ufixed24x54`, `ufixed24x55`, `ufixed24x56`, `ufixed24x57`, `ufixed24x58`, `ufixed24x59`, `ufixed24x60`, `ufixed24x61`, `ufixed24x62`, `ufixed24x63`, `ufixed24x64`, `ufixed24x65`, `ufixed24x66`, `ufixed24x67`, `ufixed24x68`, `ufixed24x69`, `ufixed24x70`, `ufixed24x71`, `ufixed24x72`, `ufixed24x73`, `ufixed24x74`, `ufixed24x75`, `ufixed24x76`, `ufixed24x77`, `ufixed24x78`, `ufixed24x79`, `ufixed24x80`, `ufixed32x0`, `ufixed32x1`, `ufixed32x2`, `ufixed32x3`, `ufixed32x4`, `ufixed32x5`, `ufixed32x6`, `ufixed32x7`, `ufixed32x8`, `ufixed32x9`, `ufixed32x10`, `ufixed32x11`, `ufixed32x12`, `ufixed32x13`, `ufixed32x14`, `ufixed32x15`, `ufixed32x16`, `ufixed32x17`, `ufixed32x18`, `ufixed32x19`, `ufixed32x20`, `ufixed32x21`, `ufixed32x22`, `ufixed32x23`, `ufixed32x24`, `ufixed32x25`, `ufixed32x26`, `ufixed32x27`, `ufixed32x28`, `ufixed32x29`, `ufixed32x30`, `ufixed32x31`, `ufixed32x32`, `ufixed32x33`, `ufixed32x34`, `ufixed32x35`, `ufixed32x36`, `ufixed32x37`, `ufixed32x38`, `ufixed32x39`, `ufixed32x40`, `ufixed32x41`, `ufixed32x42`, `ufixed32x43`, `ufixed32x44`, `ufixed32x45`, `ufixed32x46`, `ufixed32x47`, `ufixed32x48`, `ufixed32x49`, `ufixed32x50`, `ufixed32x51`, `ufixed32x52`, `ufixed32x53`, `ufixed32x54`, `ufixed32x55`, `ufixed32x56`, `ufixed32x57`, `ufixed32x58`, `ufixed32x59`, `ufixed32x60`, `ufixed32x61`, `ufixed32x62`, `ufixed32x63`, `ufixed32x64`, `ufixed32x65`, `ufixed32x66`, `ufixed32x67`, `ufixed32x68`, `ufixed32x69`, `ufixed32x70`, `ufixed32x71`, `ufixed32x72`, `ufixed32x73`, `ufixed32x74`, `ufixed32x75`, `ufixed32x76`, `ufixed32x77`, `ufixed32x78`, `ufixed32x79`, `ufixed32x80`, `ufixed40x0`, `ufixed40x1`, `ufixed40x2`, `ufixed40x3`, `ufixed40x4`, `ufixed40x5`, `ufixed40x6`, `ufixed40x7`, `ufixed40x8`, `ufixed40x9`, `ufixed40x10`, `ufixed40x11`, `ufixed40x12`, `ufixed40x13`, `ufixed40x14`, `ufixed40x15`, `ufixed40x16`, `ufixed40x17`, `ufixed40x18`, `ufixed40x19`, `ufixed40x20`, `ufixed40x21`, `ufixed40x22`, `ufixed40x23`, `ufixed40x24`, `ufixed40x25`, `ufixed40x26`, `ufixed40x27`, `ufixed40x28`, `ufixed40x29`, `ufixed40x30`, `ufixed40x31`, `ufixed40x32`, `ufixed40x33`, `ufixed40x34`, `ufixed40x35`, `ufixed40x36`, `ufixed40x37`, `ufixed40x38`, `ufixed40x39`, `ufixed40x40`, `ufixed40x41`, `ufixed40x42`, `ufixed40x43`, `ufixed40x44`, `ufixed40x45`, `ufixed40x46`, `ufixed40x47`, `ufixed40x48`, `ufixed40x49`, `ufixed40x50`, `ufixed40x51`, `ufixed40x52`, `ufixed40x53`, `ufixed40x54`, `ufixed40x55`, `ufixed40x56`, `ufixed40x57`, `ufixed40x58`, `ufixed40x59`, `ufixed40x60`, `ufixed40x61`, `ufixed40x62`, `ufixed40x63`, `ufixed40x64`, `ufixed40x65`, `ufixed40x66`, `ufixed40x67`, `ufixed40x68`, `ufixed40x69`, `ufixed40x70`, `ufixed40x71`, `ufixed40x72`, `ufixed40x73`, `ufixed40x74`, `ufixed40x75`, `ufixed40x76`, `ufixed40x77`, `ufixed40x78`, `ufixed40x79`, `ufixed40x80`, `ufixed48x0`, `ufixed48x1`, `ufixed48x2`, `ufixed48x3`, `ufixed48x4`, `ufixed48x5`, `ufixed48x6`, `ufixed48x7`, `ufixed48x8`, `ufixed48x9`, `ufixed48x10`, `ufixed48x11`, `ufixed48x12`, `ufixed48x13`, `ufixed48x14`, `ufixed48x15`, `ufixed48x16`, `ufixed48x17`, `ufixed48x18`, `ufixed48x19`, `ufixed48x20`, `ufixed48x21`, `ufixed48x22`, `ufixed48x23`, `ufixed48x24`, `ufixed48x25`, `ufixed48x26`, `ufixed48x27`, `ufixed48x28`, `ufixed48x29`, `ufixed48x30`, `ufixed48x31`, `ufixed48x32`, `ufixed48x33`, `ufixed48x34`, `ufixed48x35`, `ufixed48x36`, `ufixed48x37`, `ufixed48x38`, `ufixed48x39`, `ufixed48x40`, `ufixed48x41`, `ufixed48x42`, `ufixed48x43`, `ufixed48x44`, `ufixed48x45`, `ufixed48x46`, `ufixed48x47`, `ufixed48x48`, `ufixed48x49`, `ufixed48x50`, `ufixed48x51`, `ufixed48x52`, `ufixed48x53`, `ufixed48x54`, `ufixed48x55`, `ufixed48x56`, `ufixed48x57`, `ufixed48x58`, `ufixed48x59`, `ufixed48x60`, `ufixed48x61`, `ufixed48x62`, `ufixed48x63`, `ufixed48x64`, `ufixed48x65`, `ufixed48x66`, `ufixed48x67`, `ufixed48x68`, `ufixed48x69`, `ufixed48x70`, `ufixed48x71`, `ufixed48x72`, `ufixed48x73`, `ufixed48x74`, `ufixed48x75`, `ufixed48x76`, `ufixed48x77`, `ufixed48x78`, `ufixed48x79`, `ufixed48x80`, `ufixed56x0`, `ufixed56x1`, `ufixed56x2`, `ufixed56x3`, `ufixed56x4`, `ufixed56x5`, `ufixed56x6`, `ufixed56x7`, `ufixed56x8`, `ufixed56x9`, `ufixed56x10`, `ufixed56x11`, `ufixed56x12`, `ufixed56x13`, `ufixed56x14`, `ufixed56x15`, `ufixed56x16`, `ufixed56x17`, `ufixed56x18`, `ufixed56x19`, `ufixed56x20`, `ufixed56x21`, `ufixed56x22`, `ufixed56x23`, `ufixed56x24`, `ufixed56x25`, `ufixed56x26`, `ufixed56x27`, `ufixed56x28`, `ufixed56x29`, `ufixed56x30`, `ufixed56x31`, `ufixed56x32`, `ufixed56x33`, `ufixed56x34`, `ufixed56x35`, `ufixed56x36`, `ufixed56x37`, `ufixed56x38`, `ufixed56x39`, `ufixed56x40`, `ufixed56x41`, `ufixed56x42`, `ufixed56x43`, `ufixed56x44`, `ufixed56x45`, `ufixed56x46`, `ufixed56x47`, `ufixed56x48`, `ufixed56x49`, `ufixed56x50`, `ufixed56x51`, `ufixed56x52`, `ufixed56x53`, `ufixed56x54`, `ufixed56x55`, `ufixed56x56`, `ufixed56x57`, `ufixed56x58`, `ufixed56x59`, `ufixed56x60`, `ufixed56x61`, `ufixed56x62`, `ufixed56x63`, `ufixed56x64`, `ufixed56x65`, `ufixed56x66`, `ufixed56x67`, `ufixed56x68`, `ufixed56x69`, `ufixed56x70`, `ufixed56x71`, `ufixed56x72`, `ufixed56x73`, `ufixed56x74`, `ufixed56x75`, `ufixed56x76`, `ufixed56x77`, `ufixed56x78`, `ufixed56x79`, `ufixed56x80`, `ufixed64x0`, `ufixed64x1`, `ufixed64x2`, `ufixed64x3`, `ufixed64x4`, `ufixed64x5`, `ufixed64x6`, `ufixed64x7`, `ufixed64x8`, `ufixed64x9`, `ufixed64x10`, `ufixed64x11`, `ufixed64x12`, `ufixed64x13`, `ufixed64x14`, `ufixed64x15`, `ufixed64x16`, `ufixed64x17`, `ufixed64x18`, `ufixed64x19`, `ufixed64x20`, `ufixed64x21`, `ufixed64x22`, `ufixed64x23`, `ufixed64x24`, `ufixed64x25`, `ufixed64x26`, `ufixed64x27`, `ufixed64x28`, `ufixed64x29`, `ufixed64x30`, `ufixed64x31`, `ufixed64x32`, `ufixed64x33`, `ufixed64x34`, `ufixed64x35`, `ufixed64x36`, `ufixed64x37`, `ufixed64x38`, `ufixed64x39`, `ufixed64x40`, `ufixed64x41`, `ufixed64x42`, `ufixed64x43`, `ufixed64x44`, `ufixed64x45`, `ufixed64x46`, `ufixed64x47`, `ufixed64x48`, `ufixed64x49`, `ufixed64x50`, `ufixed64x51`, `ufixed64x52`, `ufixed64x53`, `ufixed64x54`, `ufixed64x55`, `ufixed64x56`, `ufixed64x57`, `ufixed64x58`, `ufixed64x59`, `ufixed64x60`, `ufixed64x61`, `ufixed64x62`, `ufixed64x63`, `ufixed64x64`, `ufixed64x65`, `ufixed64x66`, `ufixed64x67`, `ufixed64x68`, `ufixed64x69`, `ufixed64x70`, `ufixed64x71`, `ufixed64x72`, `ufixed64x73`, `ufixed64x74`, `ufixed64x75`, `ufixed64x76`, `ufixed64x77`, `ufixed64x78`, `ufixed64x79`, `ufixed64x80`, `ufixed72x0`, `ufixed72x1`, `ufixed72x2`, `ufixed72x3`, `ufixed72x4`, `ufixed72x5`, `ufixed72x6`, `ufixed72x7`, `ufixed72x8`, `ufixed72x9`, `ufixed72x10`, `ufixed72x11`, `ufixed72x12`, `ufixed72x13`, `ufixed72x14`, `ufixed72x15`, `ufixed72x16`, `ufixed72x17`, `ufixed72x18`, `ufixed72x19`, `ufixed72x20`, `ufixed72x21`, `ufixed72x22`, `ufixed72x23`, `ufixed72x24`, `ufixed72x25`, `ufixed72x26`, `ufixed72x27`, `ufixed72x28`, `ufixed72x29`, `ufixed72x30`, `ufixed72x31`, `ufixed72x32`, `ufixed72x33`, `ufixed72x34`, `ufixed72x35`, `ufixed72x36`, `ufixed72x37`, `ufixed72x38`, `ufixed72x39`, `ufixed72x40`, `ufixed72x41`, `ufixed72x42`, `ufixed72x43`, `ufixed72x44`, `ufixed72x45`, `ufixed72x46`, `ufixed72x47`, `ufixed72x48`, `ufixed72x49`, `ufixed72x50`, `ufixed72x51`, `ufixed72x52`, `ufixed72x53`, `ufixed72x54`, `ufixed72x55`, `ufixed72x56`, `ufixed72x57`, `ufixed72x58`, `ufixed72x59`, `ufixed72x60`, `ufixed72x61`, `ufixed72x62`, `ufixed72x63`, `ufixed72x64`, `ufixed72x65`, `ufixed72x66`, `ufixed72x67`, `ufixed72x68`, `ufixed72x69`, `ufixed72x70`, `ufixed72x71`, `ufixed72x72`, `ufixed72x73`, `ufixed72x74`, `ufixed72x75`, `ufixed72x76`, `ufixed72x77`, `ufixed72x78`, `ufixed72x79`, `ufixed72x80`, `ufixed80x0`, `ufixed80x1`, `ufixed80x2`, `ufixed80x3`, `ufixed80x4`, `ufixed80x5`, `ufixed80x6`, `ufixed80x7`, `ufixed80x8`, `ufixed80x9`, `ufixed80x10`, `ufixed80x11`, `ufixed80x12`, `ufixed80x13`, `ufixed80x14`, `ufixed80x15`, `ufixed80x16`, `ufixed80x17`, `ufixed80x18`, `ufixed80x19`, `ufixed80x20`, `ufixed80x21`, `ufixed80x22`, `ufixed80x23`, `ufixed80x24`, `ufixed80x25`, `ufixed80x26`, `ufixed80x27`, `ufixed80x28`, `ufixed80x29`, `ufixed80x30`, `ufixed80x31`, `ufixed80x32`, `ufixed80x33`, `ufixed80x34`, `ufixed80x35`, `ufixed80x36`, `ufixed80x37`, `ufixed80x38`, `ufixed80x39`, `ufixed80x40`, `ufixed80x41`, `ufixed80x42`, `ufixed80x43`, `ufixed80x44`, `ufixed80x45`, `ufixed80x46`, `ufixed80x47`, `ufixed80x48`, `ufixed80x49`, `ufixed80x50`, `ufixed80x51`, `ufixed80x52`, `ufixed80x53`, `ufixed80x54`, `ufixed80x55`, `ufixed80x56`, `ufixed80x57`, `ufixed80x58`, `ufixed80x59`, `ufixed80x60`, `ufixed80x61`, `ufixed80x62`, `ufixed80x63`, `ufixed80x64`, `ufixed80x65`, `ufixed80x66`, `ufixed80x67`, `ufixed80x68`, `ufixed80x69`, `ufixed80x70`, `ufixed80x71`, `ufixed80x72`, `ufixed80x73`, `ufixed80x74`, `ufixed80x75`, `ufixed80x76`, `ufixed80x77`, `ufixed80x78`, `ufixed80x79`, `ufixed80x80`, `ufixed88x0`, `ufixed88x1`, `ufixed88x2`, `ufixed88x3`, `ufixed88x4`, `ufixed88x5`, `ufixed88x6`, `ufixed88x7`, `ufixed88x8`, `ufixed88x9`, `ufixed88x10`, `ufixed88x11`, `ufixed88x12`, `ufixed88x13`, `ufixed88x14`, `ufixed88x15`, `ufixed88x16`, `ufixed88x17`, `ufixed88x18`, `ufixed88x19`, `ufixed88x20`, `ufixed88x21`, `ufixed88x22`, `ufixed88x23`, `ufixed88x24`, `ufixed88x25`, `ufixed88x26`, `ufixed88x27`, `ufixed88x28`, `ufixed88x29`, `ufixed88x30`, `ufixed88x31`, `ufixed88x32`, `ufixed88x33`, `ufixed88x34`, `ufixed88x35`, `ufixed88x36`, `ufixed88x37`, `ufixed88x38`, `ufixed88x39`, `ufixed88x40`, `ufixed88x41`, `ufixed88x42`, `ufixed88x43`, `ufixed88x44`, `ufixed88x45`, `ufixed88x46`, `ufixed88x47`, `ufixed88x48`, `ufixed88x49`, `ufixed88x50`, `ufixed88x51`, `ufixed88x52`, `ufixed88x53`, `ufixed88x54`, `ufixed88x55`, `ufixed88x56`, `ufixed88x57`, `ufixed88x58`, `ufixed88x59`, `ufixed88x60`, `ufixed88x61`, `ufixed88x62`, `ufixed88x63`, `ufixed88x64`, `ufixed88x65`, `ufixed88x66`, `ufixed88x67`, `ufixed88x68`, `ufixed88x69`, `ufixed88x70`, `ufixed88x71`, `ufixed88x72`, `ufixed88x73`, `ufixed88x74`, `ufixed88x75`, `ufixed88x76`, `ufixed88x77`, `ufixed88x78`, `ufixed88x79`, `ufixed88x80`, `ufixed96x0`, `ufixed96x1`, `ufixed96x2`, `ufixed96x3`, `ufixed96x4`, `ufixed96x5`, `ufixed96x6`, `ufixed96x7`, `ufixed96x8`, `ufixed96x9`, `ufixed96x10`, `ufixed96x11`, `ufixed96x12`, `ufixed96x13`, `ufixed96x14`, `ufixed96x15`, `ufixed96x16`, `ufixed96x17`, `ufixed96x18`, `ufixed96x19`, `ufixed96x20`, `ufixed96x21`, `ufixed96x22`, `ufixed96x23`, `ufixed96x24`, `ufixed96x25`, `ufixed96x26`, `ufixed96x27`, `ufixed96x28`, `ufixed96x29`, `ufixed96x30`, `ufixed96x31`, `ufixed96x32`, `ufixed96x33`, `ufixed96x34`, `ufixed96x35`, `ufixed96x36`, `ufixed96x37`, `ufixed96x38`, `ufixed96x39`, `ufixed96x40`, `ufixed96x41`, `ufixed96x42`, `ufixed96x43`, `ufixed96x44`, `ufixed96x45`, `ufixed96x46`, `ufixed96x47`, `ufixed96x48`, `ufixed96x49`, `ufixed96x50`, `ufixed96x51`, `ufixed96x52`, `ufixed96x53`, `ufixed96x54`, `ufixed96x55`, `ufixed96x56`, `ufixed96x57`, `ufixed96x58`, `ufixed96x59`, `ufixed96x60`, `ufixed96x61`, `ufixed96x62`, `ufixed96x63`, `ufixed96x64`, `ufixed96x65`, `ufixed96x66`, `ufixed96x67`, `ufixed96x68`, `ufixed96x69`, `ufixed96x70`, `ufixed96x71`, `ufixed96x72`, `ufixed96x73`, `ufixed96x74`, `ufixed96x75`, `ufixed96x76`, `ufixed96x77`, `ufixed96x78`, `ufixed96x79`, `ufixed96x80`, `ufixed104x0`, `ufixed104x1`, `ufixed104x2`, `ufixed104x3`, `ufixed104x4`, `ufixed104x5`, `ufixed104x6`, `ufixed104x7`, `ufixed104x8`, `ufixed104x9`, `ufixed104x10`, `ufixed104x11`, `ufixed104x12`, `ufixed104x13`, `ufixed104x14`, `ufixed104x15`, `ufixed104x16`, `ufixed104x17`, `ufixed104x18`, `ufixed104x19`, `ufixed104x20`, `ufixed104x21`, `ufixed104x22`, `ufixed104x23`, `ufixed104x24`, `ufixed104x25`, `ufixed104x26`, `ufixed104x27`, `ufixed104x28`, `ufixed104x29`, `ufixed104x30`, `ufixed104x31`, `ufixed104x32`, `ufixed104x33`, `ufixed104x34`, `ufixed104x35`, `ufixed104x36`, `ufixed104x37`, `ufixed104x38`, `ufixed104x39`, `ufixed104x40`, `ufixed104x41`, `ufixed104x42`, `ufixed104x43`, `ufixed104x44`, `ufixed104x45`, `ufixed104x46`, `ufixed104x47`, `ufixed104x48`, `ufixed104x49`, `ufixed104x50`, `ufixed104x51`, `ufixed104x52`, `ufixed104x53`, `ufixed104x54`, `ufixed104x55`, `ufixed104x56`, `ufixed104x57`, `ufixed104x58`, `ufixed104x59`, `ufixed104x60`, `ufixed104x61`, `ufixed104x62`, `ufixed104x63`, `ufixed104x64`, `ufixed104x65`, `ufixed104x66`, `ufixed104x67`, `ufixed104x68`, `ufixed104x69`, `ufixed104x70`, `ufixed104x71`, `ufixed104x72`, `ufixed104x73`, `ufixed104x74`, `ufixed104x75`, `ufixed104x76`, `ufixed104x77`, `ufixed104x78`, `ufixed104x79`, `ufixed104x80`, `ufixed112x0`, `ufixed112x1`, `ufixed112x2`, `ufixed112x3`, `ufixed112x4`, `ufixed112x5`, `ufixed112x6`, `ufixed112x7`, `ufixed112x8`, `ufixed112x9`, `ufixed112x10`, `ufixed112x11`, `ufixed112x12`, `ufixed112x13`, `ufixed112x14`, `ufixed112x15`, `ufixed112x16`, `ufixed112x17`, `ufixed112x18`, `ufixed112x19`, `ufixed112x20`, `ufixed112x21`, `ufixed112x22`, `ufixed112x23`, `ufixed112x24`, `ufixed112x25`, `ufixed112x26`, `ufixed112x27`, `ufixed112x28`, `ufixed112x29`, `ufixed112x30`, `ufixed112x31`, `ufixed112x32`, `ufixed112x33`, `ufixed112x34`, `ufixed112x35`, `ufixed112x36`, `ufixed112x37`, `ufixed112x38`, `ufixed112x39`, `ufixed112x40`, `ufixed112x41`, `ufixed112x42`, `ufixed112x43`, `ufixed112x44`, `ufixed112x45`, `ufixed112x46`, `ufixed112x47`, `ufixed112x48`, `ufixed112x49`, `ufixed112x50`, `ufixed112x51`, `ufixed112x52`, `ufixed112x53`, `ufixed112x54`, `ufixed112x55`, `ufixed112x56`, `ufixed112x57`, `ufixed112x58`, `ufixed112x59`, `ufixed112x60`, `ufixed112x61`, `ufixed112x62`, `ufixed112x63`, `ufixed112x64`, `ufixed112x65`, `ufixed112x66`, `ufixed112x67`, `ufixed112x68`, `ufixed112x69`, `ufixed112x70`, `ufixed112x71`, `ufixed112x72`, `ufixed112x73`, `ufixed112x74`, `ufixed112x75`, `ufixed112x76`, `ufixed112x77`, `ufixed112x78`, `ufixed112x79`, `ufixed112x80`, `ufixed120x0`, `ufixed120x1`, `ufixed120x2`, `ufixed120x3`, `ufixed120x4`, `ufixed120x5`, `ufixed120x6`, `ufixed120x7`, `ufixed120x8`, `ufixed120x9`, `ufixed120x10`, `ufixed120x11`, `ufixed120x12`, `ufixed120x13`, `ufixed120x14`, `ufixed120x15`, `ufixed120x16`, `ufixed120x17`, `ufixed120x18`, `ufixed120x19`, `ufixed120x20`, `ufixed120x21`, `ufixed120x22`, `ufixed120x23`, `ufixed120x24`, `ufixed120x25`, `ufixed120x26`, `ufixed120x27`, `ufixed120x28`, `ufixed120x29`, `ufixed120x30`, `ufixed120x31`, `ufixed120x32`, `ufixed120x33`, `ufixed120x34`, `ufixed120x35`, `ufixed120x36`, `ufixed120x37`, `ufixed120x38`, `ufixed120x39`, `ufixed120x40`, `ufixed120x41`, `ufixed120x42`, `ufixed120x43`, `ufixed120x44`, `ufixed120x45`, `ufixed120x46`, `ufixed120x47`, `ufixed120x48`, `ufixed120x49`, `ufixed120x50`, `ufixed120x51`, `ufixed120x52`, `ufixed120x53`, `ufixed120x54`, `ufixed120x55`, `ufixed120x56`, `ufixed120x57`, `ufixed120x58`, `ufixed120x59`, `ufixed120x60`, `ufixed120x61`, `ufixed120x62`, `ufixed120x63`, `ufixed120x64`, `ufixed120x65`, `ufixed120x66`, `ufixed120x67`, `ufixed120x68`, `ufixed120x69`, `ufixed120x70`, `ufixed120x71`, `ufixed120x72`, `ufixed120x73`, `ufixed120x74`, `ufixed120x75`, `ufixed120x76`, `ufixed120x77`, `ufixed120x78`, `ufixed120x79`, `ufixed120x80`, `ufixed128x0`, `ufixed128x1`, `ufixed128x2`, `ufixed128x3`, `ufixed128x4`, `ufixed128x5`, `ufixed128x6`, `ufixed128x7`, `ufixed128x8`, `ufixed128x9`, `ufixed128x10`, `ufixed128x11`, `ufixed128x12`, `ufixed128x13`, `ufixed128x14`, `ufixed128x15`, `ufixed128x16`, `ufixed128x17`, `ufixed128x18`, `ufixed128x19`, `ufixed128x20`, `ufixed128x21`, `ufixed128x22`, `ufixed128x23`, `ufixed128x24`, `ufixed128x25`, `ufixed128x26`, `ufixed128x27`, `ufixed128x28`, `ufixed128x29`, `ufixed128x30`, `ufixed128x31`, `ufixed128x32`, `ufixed128x33`, `ufixed128x34`, `ufixed128x35`, `ufixed128x36`, `ufixed128x37`, `ufixed128x38`, `ufixed128x39`, `ufixed128x40`, `ufixed128x41`, `ufixed128x42`, `ufixed128x43`, `ufixed128x44`, `ufixed128x45`, `ufixed128x46`, `ufixed128x47`, `ufixed128x48`, `ufixed128x49`, `ufixed128x50`, `ufixed128x51`, `ufixed128x52`, `ufixed128x53`, `ufixed128x54`, `ufixed128x55`, `ufixed128x56`, `ufixed128x57`, `ufixed128x58`, `ufixed128x59`, `ufixed128x60`, `ufixed128x61`, `ufixed128x62`, `ufixed128x63`, `ufixed128x64`, `ufixed128x65`, `ufixed128x66`, `ufixed128x67`, `ufixed128x68`, `ufixed128x69`, `ufixed128x70`, `ufixed128x71`, `ufixed128x72`, `ufixed128x73`, `ufixed128x74`, `ufixed128x75`, `ufixed128x76`, `ufixed128x77`, `ufixed128x78`, `ufixed128x79`, `ufixed128x80`, `ufixed136x0`, `ufixed136x1`, `ufixed136x2`, `ufixed136x3`, `ufixed136x4`, `ufixed136x5`, `ufixed136x6`, `ufixed136x7`, `ufixed136x8`, `ufixed136x9`, `ufixed136x10`, `ufixed136x11`, `ufixed136x12`, `ufixed136x13`, `ufixed136x14`, `ufixed136x15`, `ufixed136x16`, `ufixed136x17`, `ufixed136x18`, `ufixed136x19`, `ufixed136x20`, `ufixed136x21`, `ufixed136x22`, `ufixed136x23`, `ufixed136x24`, `ufixed136x25`, `ufixed136x26`, `ufixed136x27`, `ufixed136x28`, `ufixed136x29`, `ufixed136x30`, `ufixed136x31`, `ufixed136x32`, `ufixed136x33`, `ufixed136x34`, `ufixed136x35`, `ufixed136x36`, `ufixed136x37`, `ufixed136x38`, `ufixed136x39`, `ufixed136x40`, `ufixed136x41`, `ufixed136x42`, `ufixed136x43`, `ufixed136x44`, `ufixed136x45`, `ufixed136x46`, `ufixed136x47`, `ufixed136x48`, `ufixed136x49`, `ufixed136x50`, `ufixed136x51`, `ufixed136x52`, `ufixed136x53`, `ufixed136x54`, `ufixed136x55`, `ufixed136x56`, `ufixed136x57`, `ufixed136x58`, `ufixed136x59`, `ufixed136x60`, `ufixed136x61`, `ufixed136x62`, `ufixed136x63`, `ufixed136x64`, `ufixed136x65`, `ufixed136x66`, `ufixed136x67`, `ufixed136x68`, `ufixed136x69`, `ufixed136x70`, `ufixed136x71`, `ufixed136x72`, `ufixed136x73`, `ufixed136x74`, `ufixed136x75`, `ufixed136x76`, `ufixed136x77`, `ufixed136x78`, `ufixed136x79`, `ufixed136x80`, `ufixed144x0`, `ufixed144x1`, `ufixed144x2`, `ufixed144x3`, `ufixed144x4`, `ufixed144x5`, `ufixed144x6`, `ufixed144x7`, `ufixed144x8`, `ufixed144x9`, `ufixed144x10`, `ufixed144x11`, `ufixed144x12`, `ufixed144x13`, `ufixed144x14`, `ufixed144x15`, `ufixed144x16`, `ufixed144x17`, `ufixed144x18`, `ufixed144x19`, `ufixed144x20`, `ufixed144x21`, `ufixed144x22`, `ufixed144x23`, `ufixed144x24`, `ufixed144x25`, `ufixed144x26`, `ufixed144x27`, `ufixed144x28`, `ufixed144x29`, `ufixed144x30`, `ufixed144x31`, `ufixed144x32`, `ufixed144x33`, `ufixed144x34`, `ufixed144x35`, `ufixed144x36`, `ufixed144x37`, `ufixed144x38`, `ufixed144x39`, `ufixed144x40`, `ufixed144x41`, `ufixed144x42`, `ufixed144x43`, `ufixed144x44`, `ufixed144x45`, `ufixed144x46`, `ufixed144x47`, `ufixed144x48`, `ufixed144x49`, `ufixed144x50`, `ufixed144x51`, `ufixed144x52`, `ufixed144x53`, `ufixed144x54`, `ufixed144x55`, `ufixed144x56`, `ufixed144x57`, `ufixed144x58`, `ufixed144x59`, `ufixed144x60`, `ufixed144x61`, `ufixed144x62`, `ufixed144x63`, `ufixed144x64`, `ufixed144x65`, `ufixed144x66`, `ufixed144x67`, `ufixed144x68`, `ufixed144x69`, `ufixed144x70`, `ufixed144x71`, `ufixed144x72`, `ufixed144x73`, `ufixed144x74`, `ufixed144x75`, `ufixed144x76`, `ufixed144x77`, `ufixed144x78`, `ufixed144x79`, `ufixed144x80`, `ufixed152x0`, `ufixed152x1`, `ufixed152x2`, `ufixed152x3`, `ufixed152x4`, `ufixed152x5`, `ufixed152x6`, `ufixed152x7`, `ufixed152x8`, `ufixed152x9`, `ufixed152x10`, `ufixed152x11`, `ufixed152x12`, `ufixed152x13`, `ufixed152x14`, `ufixed152x15`, `ufixed152x16`, `ufixed152x17`, `ufixed152x18`, `ufixed152x19`, `ufixed152x20`, `ufixed152x21`, `ufixed152x22`, `ufixed152x23`, `ufixed152x24`, `ufixed152x25`, `ufixed152x26`, `ufixed152x27`, `ufixed152x28`, `ufixed152x29`, `ufixed152x30`, `ufixed152x31`, `ufixed152x32`, `ufixed152x33`, `ufixed152x34`, `ufixed152x35`, `ufixed152x36`, `ufixed152x37`, `ufixed152x38`, `ufixed152x39`, `ufixed152x40`, `ufixed152x41`, `ufixed152x42`, `ufixed152x43`, `ufixed152x44`, `ufixed152x45`, `ufixed152x46`, `ufixed152x47`, `ufixed152x48`, `ufixed152x49`, `ufixed152x50`, `ufixed152x51`, `ufixed152x52`, `ufixed152x53`, `ufixed152x54`, `ufixed152x55`, `ufixed152x56`, `ufixed152x57`, `ufixed152x58`, `ufixed152x59`, `ufixed152x60`, `ufixed152x61`, `ufixed152x62`, `ufixed152x63`, `ufixed152x64`, `ufixed152x65`, `ufixed152x66`, `ufixed152x67`, `ufixed152x68`, `ufixed152x69`, `ufixed152x70`, `ufixed152x71`, `ufixed152x72`, `ufixed152x73`, `ufixed152x74`, `ufixed152x75`, `ufixed152x76`, `ufixed152x77`, `ufixed152x78`, `ufixed152x79`, `ufixed152x80`, `ufixed160x0`, `ufixed160x1`, `ufixed160x2`, `ufixed160x3`, `ufixed160x4`, `ufixed160x5`, `ufixed160x6`, `ufixed160x7`, `ufixed160x8`, `ufixed160x9`, `ufixed160x10`, `ufixed160x11`, `ufixed160x12`, `ufixed160x13`, `ufixed160x14`, `ufixed160x15`, `ufixed160x16`, `ufixed160x17`, `ufixed160x18`, `ufixed160x19`, `ufixed160x20`, `ufixed160x21`, `ufixed160x22`, `ufixed160x23`, `ufixed160x24`, `ufixed160x25`, `ufixed160x26`, `ufixed160x27`, `ufixed160x28`, `ufixed160x29`, `ufixed160x30`, `ufixed160x31`, `ufixed160x32`, `ufixed160x33`, `ufixed160x34`, `ufixed160x35`, `ufixed160x36`, `ufixed160x37`, `ufixed160x38`, `ufixed160x39`, `ufixed160x40`, `ufixed160x41`, `ufixed160x42`, `ufixed160x43`, `ufixed160x44`, `ufixed160x45`, `ufixed160x46`, `ufixed160x47`, `ufixed160x48`, `ufixed160x49`, `ufixed160x50`, `ufixed160x51`, `ufixed160x52`, `ufixed160x53`, `ufixed160x54`, `ufixed160x55`, `ufixed160x56`, `ufixed160x57`, `ufixed160x58`, `ufixed160x59`, `ufixed160x60`, `ufixed160x61`, `ufixed160x62`, `ufixed160x63`, `ufixed160x64`, `ufixed160x65`, `ufixed160x66`, `ufixed160x67`, `ufixed160x68`, `ufixed160x69`, `ufixed160x70`, `ufixed160x71`, `ufixed160x72`, `ufixed160x73`, `ufixed160x74`, `ufixed160x75`, `ufixed160x76`, `ufixed160x77`, `ufixed160x78`, `ufixed160x79`, `ufixed160x80`, `ufixed168x0`, `ufixed168x1`, `ufixed168x2`, `ufixed168x3`, `ufixed168x4`, `ufixed168x5`, `ufixed168x6`, `ufixed168x7`, `ufixed168x8`, `ufixed168x9`, `ufixed168x10`, `ufixed168x11`, `ufixed168x12`, `ufixed168x13`, `ufixed168x14`, `ufixed168x15`, `ufixed168x16`, `ufixed168x17`, `ufixed168x18`, `ufixed168x19`, `ufixed168x20`, `ufixed168x21`, `ufixed168x22`, `ufixed168x23`, `ufixed168x24`, `ufixed168x25`, `ufixed168x26`, `ufixed168x27`, `ufixed168x28`, `ufixed168x29`, `ufixed168x30`, `ufixed168x31`, `ufixed168x32`, `ufixed168x33`, `ufixed168x34`, `ufixed168x35`, `ufixed168x36`, `ufixed168x37`, `ufixed168x38`, `ufixed168x39`, `ufixed168x40`, `ufixed168x41`, `ufixed168x42`, `ufixed168x43`, `ufixed168x44`, `ufixed168x45`, `ufixed168x46`, `ufixed168x47`, `ufixed168x48`, `ufixed168x49`, `ufixed168x50`, `ufixed168x51`, `ufixed168x52`, `ufixed168x53`, `ufixed168x54`, `ufixed168x55`, `ufixed168x56`, `ufixed168x57`, `ufixed168x58`, `ufixed168x59`, `ufixed168x60`, `ufixed168x61`, `ufixed168x62`, `ufixed168x63`, `ufixed168x64`, `ufixed168x65`, `ufixed168x66`, `ufixed168x67`, `ufixed168x68`, `ufixed168x69`, `ufixed168x70`, `ufixed168x71`, `ufixed168x72`, `ufixed168x73`, `ufixed168x74`, `ufixed168x75`, `ufixed168x76`, `ufixed168x77`, `ufixed168x78`, `ufixed168x79`, `ufixed168x80`, `ufixed176x0`, `ufixed176x1`, `ufixed176x2`, `ufixed176x3`, `ufixed176x4`, `ufixed176x5`, `ufixed176x6`, `ufixed176x7`, `ufixed176x8`, `ufixed176x9`, `ufixed176x10`, `ufixed176x11`, `ufixed176x12`, `ufixed176x13`, `ufixed176x14`, `ufixed176x15`, `ufixed176x16`, `ufixed176x17`, `ufixed176x18`, `ufixed176x19`, `ufixed176x20`, `ufixed176x21`, `ufixed176x22`, `ufixed176x23`, `ufixed176x24`, `ufixed176x25`, `ufixed176x26`, `ufixed176x27`, `ufixed176x28`, `ufixed176x29`, `ufixed176x30`, `ufixed176x31`, `ufixed176x32`, `ufixed176x33`, `ufixed176x34`, `ufixed176x35`, `ufixed176x36`, `ufixed176x37`, `ufixed176x38`, `ufixed176x39`, `ufixed176x40`, `ufixed176x41`, `ufixed176x42`, `ufixed176x43`, `ufixed176x44`, `ufixed176x45`, `ufixed176x46`, `ufixed176x47`, `ufixed176x48`, `ufixed176x49`, `ufixed176x50`, `ufixed176x51`, `ufixed176x52`, `ufixed176x53`, `ufixed176x54`, `ufixed176x55`, `ufixed176x56`, `ufixed176x57`, `ufixed176x58`, `ufixed176x59`, `ufixed176x60`, `ufixed176x61`, `ufixed176x62`, `ufixed176x63`, `ufixed176x64`, `ufixed176x65`, `ufixed176x66`, `ufixed176x67`, `ufixed176x68`, `ufixed176x69`, `ufixed176x70`, `ufixed176x71`, `ufixed176x72`, `ufixed176x73`, `ufixed176x74`, `ufixed176x75`, `ufixed176x76`, `ufixed176x77`, `ufixed176x78`, `ufixed176x79`, `ufixed176x80`, `ufixed184x0`, `ufixed184x1`, `ufixed184x2`, `ufixed184x3`, `ufixed184x4`, `ufixed184x5`, `ufixed184x6`, `ufixed184x7`, `ufixed184x8`, `ufixed184x9`, `ufixed184x10`, `ufixed184x11`, `ufixed184x12`, `ufixed184x13`, `ufixed184x14`, `ufixed184x15`, `ufixed184x16`, `ufixed184x17`, `ufixed184x18`, `ufixed184x19`, `ufixed184x20`, `ufixed184x21`, `ufixed184x22`, `ufixed184x23`, `ufixed184x24`, `ufixed184x25`, `ufixed184x26`, `ufixed184x27`, `ufixed184x28`, `ufixed184x29`, `ufixed184x30`, `ufixed184x31`, `ufixed184x32`, `ufixed184x33`, `ufixed184x34`, `ufixed184x35`, `ufixed184x36`, `ufixed184x37`, `ufixed184x38`, `ufixed184x39`, `ufixed184x40`, `ufixed184x41`, `ufixed184x42`, `ufixed184x43`, `ufixed184x44`, `ufixed184x45`, `ufixed184x46`, `ufixed184x47`, `ufixed184x48`, `ufixed184x49`, `ufixed184x50`, `ufixed184x51`, `ufixed184x52`, `ufixed184x53`, `ufixed184x54`, `ufixed184x55`, `ufixed184x56`, `ufixed184x57`, `ufixed184x58`, `ufixed184x59`, `ufixed184x60`, `ufixed184x61`, `ufixed184x62`, `ufixed184x63`, `ufixed184x64`, `ufixed184x65`, `ufixed184x66`, `ufixed184x67`, `ufixed184x68`, `ufixed184x69`, `ufixed184x70`, `ufixed184x71`, `ufixed184x72`, `ufixed192x0`, `ufixed192x1`, `ufixed192x2`, `ufixed192x3`, `ufixed192x4`, `ufixed192x5`, `ufixed192x6`, `ufixed192x7`, `ufixed192x8`, `ufixed192x9`, `ufixed192x10`, `ufixed192x11`, `ufixed192x12`, `ufixed192x13`, `ufixed192x14`, `ufixed192x15`, `ufixed192x16`, `ufixed192x17`, `ufixed192x18`, `ufixed192x19`, `ufixed192x20`, `ufixed192x21`, `ufixed192x22`, `ufixed192x23`, `ufixed192x24`, `ufixed192x25`, `ufixed192x26`, `ufixed192x27`, `ufixed192x28`, `ufixed192x29`, `ufixed192x30`, `ufixed192x31`, `ufixed192x32`, `ufixed192x33`, `ufixed192x34`, `ufixed192x35`, `ufixed192x36`, `ufixed192x37`, `ufixed192x38`, `ufixed192x39`, `ufixed192x40`, `ufixed192x41`, `ufixed192x42`, `ufixed192x43`, `ufixed192x44`, `ufixed192x45`, `ufixed192x46`, `ufixed192x47`, `ufixed192x48`, `ufixed192x49`, `ufixed192x50`, `ufixed192x51`, `ufixed192x52`, `ufixed192x53`, `ufixed192x54`, `ufixed192x55`, `ufixed192x56`, `ufixed192x57`, `ufixed192x58`, `ufixed192x59`, `ufixed192x60`, `ufixed192x61`, `ufixed192x62`, `ufixed192x63`, `ufixed192x64`, `ufixed200x0`, `ufixed200x1`, `ufixed200x2`, `ufixed200x3`, `ufixed200x4`, `ufixed200x5`, `ufixed200x6`, `ufixed200x7`, `ufixed200x8`, `ufixed200x9`, `ufixed200x10`, `ufixed200x11`, `ufixed200x12`, `ufixed200x13`, `ufixed200x14`, `ufixed200x15`, `ufixed200x16`, `ufixed200x17`, `ufixed200x18`, `ufixed200x19`, `ufixed200x20`, `ufixed200x21`, `ufixed200x22`, `ufixed200x23`, `ufixed200x24`, `ufixed200x25`, `ufixed200x26`, `ufixed200x27`, `ufixed200x28`, `ufixed200x29`, `ufixed200x30`, `ufixed200x31`, `ufixed200x32`, `ufixed200x33`, `ufixed200x34`, `ufixed200x35`, `ufixed200x36`, `ufixed200x37`, `ufixed200x38`, `ufixed200x39`, `ufixed200x40`, `ufixed200x41`, `ufixed200x42`, `ufixed200x43`, `ufixed200x44`, `ufixed200x45`, `ufixed200x46`, `ufixed200x47`, `ufixed200x48`, `ufixed200x49`, `ufixed200x50`, `ufixed200x51`, `ufixed200x52`, `ufixed200x53`, `ufixed200x54`, `ufixed200x55`, `ufixed200x56`, `ufixed208x0`, `ufixed208x1`, `ufixed208x2`, `ufixed208x3`, `ufixed208x4`, `ufixed208x5`, `ufixed208x6`, `ufixed208x7`, `ufixed208x8`, `ufixed208x9`, `ufixed208x10`, `ufixed208x11`, `ufixed208x12`, `ufixed208x13`, `ufixed208x14`, `ufixed208x15`, `ufixed208x16`, `ufixed208x17`, `ufixed208x18`, `ufixed208x19`, `ufixed208x20`, `ufixed208x21`, `ufixed208x22`, `ufixed208x23`, `ufixed208x24`, `ufixed208x25`, `ufixed208x26`, `ufixed208x27`, `ufixed208x28`, `ufixed208x29`, `ufixed208x30`, `ufixed208x31`, `ufixed208x32`, `ufixed208x33`, `ufixed208x34`, `ufixed208x35`, `ufixed208x36`, `ufixed208x37`, `ufixed208x38`, `ufixed208x39`, `ufixed208x40`, `ufixed208x41`, `ufixed208x42`, `ufixed208x43`, `ufixed208x44`, `ufixed208x45`, `ufixed208x46`, `ufixed208x47`, `ufixed208x48`, `ufixed216x0`, `ufixed216x1`, `ufixed216x2`, `ufixed216x3`, `ufixed216x4`, `ufixed216x5`, `ufixed216x6`, `ufixed216x7`, `ufixed216x8`, `ufixed216x9`, `ufixed216x10`, `ufixed216x11`, `ufixed216x12`, `ufixed216x13`, `ufixed216x14`, `ufixed216x15`, `ufixed216x16`, `ufixed216x17`, `ufixed216x18`, `ufixed216x19`, `ufixed216x20`, `ufixed216x21`, `ufixed216x22`, `ufixed216x23`, `ufixed216x24`, `ufixed216x25`, `ufixed216x26`, `ufixed216x27`, `ufixed216x28`, `ufixed216x29`, `ufixed216x30`, `ufixed216x31`, `ufixed216x32`, `ufixed216x33`, `ufixed216x34`, `ufixed216x35`, `ufixed216x36`, `ufixed216x37`, `ufixed216x38`, `ufixed216x39`, `ufixed216x40`, `ufixed224x0`, `ufixed224x1`, `ufixed224x2`, `ufixed224x3`, `ufixed224x4`, `ufixed224x5`, `ufixed224x6`, `ufixed224x7`, `ufixed224x8`, `ufixed224x9`, `ufixed224x10`, `ufixed224x11`, `ufixed224x12`, `ufixed224x13`, `ufixed224x14`, `ufixed224x15`, `ufixed224x16`, `ufixed224x17`, `ufixed224x18`, `ufixed224x19`, `ufixed224x20`, `ufixed224x21`, `ufixed224x22`, `ufixed224x23`, `ufixed224x24`, `ufixed224x25`, `ufixed224x26`, `ufixed224x27`, `ufixed224x28`, `ufixed224x29`, `ufixed224x30`, `ufixed224x31`, `ufixed224x32`, `ufixed232x0`, `ufixed232x1`, `ufixed232x2`, `ufixed232x3`, `ufixed232x4`, `ufixed232x5`, `ufixed232x6`, `ufixed232x7`, `ufixed232x8`, `ufixed232x9`, `ufixed232x10`, `ufixed232x11`, `ufixed232x12`, `ufixed232x13`, `ufixed232x14`, `ufixed232x15`, `ufixed232x16`, `ufixed232x17`, `ufixed232x18`, `ufixed232x19`, `ufixed232x20`, `ufixed232x21`, `ufixed232x22`, `ufixed232x23`, `ufixed232x24`, `ufixed240x0`, `ufixed240x1`, `ufixed240x2`, `ufixed240x3`, `ufixed240x4`, `ufixed240x5`, `ufixed240x6`, `ufixed240x7`, `ufixed240x8`, `ufixed240x9`, `ufixed240x10`, `ufixed240x11`, `ufixed240x12`, `ufixed240x13`, `ufixed240x14`, `ufixed240x15`, `ufixed240x16`, `ufixed248x0`, `ufixed248x1`, `ufixed248x2`, `ufixed248x3`, `ufixed248x4`, `ufixed248x5`, `ufixed248x6`, `ufixed248x7`, `ufixed248x8`, `ufixed256x0`), KeywordType, nil}, + }, + "numbers": { + {`0[xX][0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + }, + "string-parse-common": { + {`\\(u[0-9a-fA-F]{4}|x..|[^x])`, LiteralStringEscape, nil}, + {`[^\\"\'\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "string-parse-double": { + {`"`, LiteralString, Pop(1)}, + {`'`, LiteralString, nil}, + }, + "string-parse-single": { + {`'`, LiteralString, Pop(1)}, + {`"`, LiteralString, nil}, + }, + "strings": { + {`hex'[0-9a-fA-F]+'`, LiteralString, nil}, + {`hex"[0-9a-fA-F]+"`, LiteralString, nil}, + {`"`, LiteralString, Combined("string-parse-common", "string-parse-double")}, + {`'`, LiteralString, Combined("string-parse-common", "string-parse-single")}, + }, + "whitespace": { + {`\s+`, Text, nil}, + }, + "root": { + Include("comments"), + Include("keywords-types"), + Include("keywords-other"), + Include("numbers"), + Include("strings"), + Include("whitespace"), + {`\+\+|--|\*\*|\?|:|~|&&|\|\||=>|==?|!=?|(<<|>>>?|[-<>+*%&|^/])=?`, Operator, nil}, + {`[{(\[;,]`, Punctuation, nil}, + {`[})\].]`, Punctuation, nil}, + {`(block|msg|now|this|super|tx)\b`, NameBuiltin, nil}, + {`(sender|origin)\b`, NameBuiltin, nil}, + {`(gas|value)\b`, NameBuiltin, nil}, + {`(selfdestruct|suicide)\b`, NameBuiltin, nil}, + {`(balance|send|transfer)\b`, NameBuiltin, nil}, + {`(assert|revert|require)\b`, NameBuiltin, nil}, + {`(call|callcode|delegatecall)\b`, NameBuiltin, nil}, + {`selector\b`, NameBuiltin, nil}, + {`(addmod|ecrecover|keccak256|mulmod|ripemd160|sha256|sha3)\b`, NameFunction, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go b/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go new file mode 100644 index 00000000..47a1716b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sparql.go @@ -0,0 +1,69 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sparql lexer. +var Sparql = internal.Register(MustNewLexer( + &Config{ + Name: "SPARQL", + Aliases: []string{"sparql"}, + Filenames: []string{"*.rq", "*.sparql"}, + MimeTypes: []string{"application/sparql-query"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`((?i)select|construct|describe|ask|where|filter|group\s+by|minus|distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|offset|bindings|load|clear|drop|create|add|move|copy|insert\s+data|delete\s+data|delete\s+where|delete|insert|using\s+named|using|graph|default|named|all|optional|service|silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b`, Keyword, nil}, + {`(a)\b`, Keyword, nil}, + {"(<(?:[^<>\"{}|^`\\\\\\x00-\\x20])*>)", NameLabel, nil}, + {`(_:[0-9a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_](?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀.]*[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀])?)`, NameLabel, nil}, + {`[?$][0-9a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_][a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_0-9·̀-ͯ‿-⁀]*`, NameVariable, nil}, + {`([a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�](?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀.]*[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀])?)?(\:)((?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_:0-9]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))(?:(?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀.:]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))*(?:[a-zA-ZÀ-ÖØ-öø-˿Ͱ-ͽͿ-῿‌-‍⁰-↏Ⰰ-⿯、-퟿豈-﷏ﷰ-�_\-0-9·̀-ͯ‿-⁀:]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%])))?)?`, ByGroups(NameNamespace, Punctuation, NameTag), nil}, + {`((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|contains|strstarts|strends|strbefore|strafter|year|month|day|hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|count|sum|min|max|avg|sample|group_concat|separator)\b`, NameFunction, nil}, + {`(true|false)`, KeywordConstant, nil}, + {`[+\-]?(\d+\.\d*[eE][+-]?\d+|\.?\d+[eE][+-]?\d+)`, LiteralNumberFloat, nil}, + {`[+\-]?(\d+\.\d*|\.\d+)`, LiteralNumberFloat, nil}, + {`[+\-]?\d+`, LiteralNumberInteger, nil}, + {`(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)`, Operator, nil}, + {`[(){}.;,:^\[\]]`, Punctuation, nil}, + {`#[^\n]*`, Comment, nil}, + {`"""`, LiteralString, Push("triple-double-quoted-string")}, + {`"`, LiteralString, Push("single-double-quoted-string")}, + {`'''`, LiteralString, Push("triple-single-quoted-string")}, + {`'`, LiteralString, Push("single-single-quoted-string")}, + }, + "triple-double-quoted-string": { + {`"""`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-double-quoted-string": { + {`"`, LiteralString, Push("end-of-string")}, + {`[^"\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "triple-single-quoted-string": { + {`'''`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralStringEscape, Push("string-escape")}, + }, + "single-single-quoted-string": { + {`'`, LiteralString, Push("end-of-string")}, + {`[^'\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "string-escape": { + {`u[0-9A-Fa-f]{4}`, LiteralStringEscape, Pop(1)}, + {`U[0-9A-Fa-f]{8}`, LiteralStringEscape, Pop(1)}, + {`.`, LiteralStringEscape, Pop(1)}, + }, + "end-of-string": { + {`(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)`, ByGroups(Operator, NameFunction), Pop(2)}, + {`\^\^`, Operator, Pop(2)}, + Default(Pop(2)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/sql.go b/vendor/github.com/alecthomas/chroma/lexers/s/sql.go new file mode 100644 index 00000000..a9c2bbe1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/sql.go @@ -0,0 +1,49 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Sql lexer. +var Sql = internal.Register(MustNewLexer( + &Config{ + Name: "SQL", + Aliases: []string{"sql"}, + Filenames: []string{"*.sql"}, + MimeTypes: []string{"text/x-sql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`--.*\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringDouble, Push("double-string")}, + {Words(``, `\b`, `ABORT`, `ABS`, `ABSOLUTE`, `ACCESS`, `ADA`, `ADD`, `ADMIN`, `AFTER`, `AGGREGATE`, `ALIAS`, `ALL`, `ALLOCATE`, `ALTER`, `ANALYSE`, `ANALYZE`, `AND`, `ANY`, `ARE`, `AS`, `ASC`, `ASENSITIVE`, `ASSERTION`, `ASSIGNMENT`, `ASYMMETRIC`, `AT`, `ATOMIC`, `AUTHORIZATION`, `AVG`, `BACKWARD`, `BEFORE`, `BEGIN`, `BETWEEN`, `BITVAR`, `BIT_LENGTH`, `BOTH`, `BREADTH`, `BY`, `C`, `CACHE`, `CALL`, `CALLED`, `CARDINALITY`, `CASCADE`, `CASCADED`, `CASE`, `CAST`, `CATALOG`, `CATALOG_NAME`, `CHAIN`, `CHARACTERISTICS`, `CHARACTER_LENGTH`, `CHARACTER_SET_CATALOG`, `CHARACTER_SET_NAME`, `CHARACTER_SET_SCHEMA`, `CHAR_LENGTH`, `CHECK`, `CHECKED`, `CHECKPOINT`, `CLASS`, `CLASS_ORIGIN`, `CLOB`, `CLOSE`, `CLUSTER`, `COALSECE`, `COBOL`, `COLLATE`, `COLLATION`, `COLLATION_CATALOG`, `COLLATION_NAME`, `COLLATION_SCHEMA`, `COLUMN`, `COLUMN_NAME`, `COMMAND_FUNCTION`, `COMMAND_FUNCTION_CODE`, `COMMENT`, `COMMIT`, `COMMITTED`, `COMPLETION`, `CONDITION_NUMBER`, `CONNECT`, `CONNECTION`, `CONNECTION_NAME`, `CONSTRAINT`, `CONSTRAINTS`, `CONSTRAINT_CATALOG`, `CONSTRAINT_NAME`, `CONSTRAINT_SCHEMA`, `CONSTRUCTOR`, `CONTAINS`, `CONTINUE`, `CONVERSION`, `CONVERT`, `COPY`, `CORRESPONTING`, `COUNT`, `CREATE`, `CREATEDB`, `CREATEUSER`, `CROSS`, `CUBE`, `CURRENT`, `CURRENT_DATE`, `CURRENT_PATH`, `CURRENT_ROLE`, `CURRENT_TIME`, `CURRENT_TIMESTAMP`, `CURRENT_USER`, `CURSOR`, `CURSOR_NAME`, `CYCLE`, `DATA`, `DATABASE`, `DATETIME_INTERVAL_CODE`, `DATETIME_INTERVAL_PRECISION`, `DAY`, `DEALLOCATE`, `DECLARE`, `DEFAULT`, `DEFAULTS`, `DEFERRABLE`, `DEFERRED`, `DEFINED`, `DEFINER`, `DELETE`, `DELIMITER`, `DELIMITERS`, `DEREF`, `DESC`, `DESCRIBE`, `DESCRIPTOR`, `DESTROY`, `DESTRUCTOR`, `DETERMINISTIC`, `DIAGNOSTICS`, `DICTIONARY`, `DISCONNECT`, `DISPATCH`, `DISTINCT`, `DO`, `DOMAIN`, `DROP`, `DYNAMIC`, `DYNAMIC_FUNCTION`, `DYNAMIC_FUNCTION_CODE`, `EACH`, `ELSE`, `ELSIF`, `ENCODING`, `ENCRYPTED`, `END`, `END-EXEC`, `EQUALS`, `ESCAPE`, `EVERY`, `EXCEPTION`, `EXCEPT`, `EXCLUDING`, `EXCLUSIVE`, `EXEC`, `EXECUTE`, `EXISTING`, `EXISTS`, `EXPLAIN`, `EXTERNAL`, `EXTRACT`, `FALSE`, `FETCH`, `FINAL`, `FIRST`, `FOR`, `FORCE`, `FOREIGN`, `FORTRAN`, `FORWARD`, `FOUND`, `FREE`, `FREEZE`, `FROM`, `FULL`, `FUNCTION`, `G`, `GENERAL`, `GENERATED`, `GET`, `GLOBAL`, `GO`, `GOTO`, `GRANT`, `GRANTED`, `GROUP`, `GROUPING`, `HANDLER`, `HAVING`, `HIERARCHY`, `HOLD`, `HOST`, `IDENTITY`, `IF`, `IGNORE`, `ILIKE`, `IMMEDIATE`, `IMMUTABLE`, `IMPLEMENTATION`, `IMPLICIT`, `IN`, `INCLUDING`, `INCREMENT`, `INDEX`, `INDITCATOR`, `INFIX`, `INHERITS`, `INITIALIZE`, `INITIALLY`, `INNER`, `INOUT`, `INPUT`, `INSENSITIVE`, `INSERT`, `INSTANTIABLE`, `INSTEAD`, `INTERSECT`, `INTO`, `INVOKER`, `IS`, `ISNULL`, `ISOLATION`, `ITERATE`, `JOIN`, `KEY`, `KEY_MEMBER`, `KEY_TYPE`, `LANCOMPILER`, `LANGUAGE`, `LARGE`, `LAST`, `LATERAL`, `LEADING`, `LEFT`, `LENGTH`, `LESS`, `LEVEL`, `LIKE`, `LIMIT`, `LISTEN`, `LOAD`, `LOCAL`, `LOCALTIME`, `LOCALTIMESTAMP`, `LOCATION`, `LOCATOR`, `LOCK`, `LOWER`, `MAP`, `MATCH`, `MAX`, `MAXVALUE`, `MESSAGE_LENGTH`, `MESSAGE_OCTET_LENGTH`, `MESSAGE_TEXT`, `METHOD`, `MIN`, `MINUTE`, `MINVALUE`, `MOD`, `MODE`, `MODIFIES`, `MODIFY`, `MONTH`, `MORE`, `MOVE`, `MUMPS`, `NAMES`, `NATIONAL`, `NATURAL`, `NCHAR`, `NCLOB`, `NEW`, `NEXT`, `NO`, `NOCREATEDB`, `NOCREATEUSER`, `NONE`, `NOT`, `NOTHING`, `NOTIFY`, `NOTNULL`, `NULL`, `NULLABLE`, `NULLIF`, `OBJECT`, `OCTET_LENGTH`, `OF`, `OFF`, `OFFSET`, `OIDS`, `OLD`, `ON`, `ONLY`, `OPEN`, `OPERATION`, `OPERATOR`, `OPTION`, `OPTIONS`, `OR`, `ORDER`, `ORDINALITY`, `OUT`, `OUTER`, `OUTPUT`, `OVERLAPS`, `OVERLAY`, `OVERRIDING`, `OWNER`, `PAD`, `PARAMETER`, `PARAMETERS`, `PARAMETER_MODE`, `PARAMATER_NAME`, `PARAMATER_ORDINAL_POSITION`, `PARAMETER_SPECIFIC_CATALOG`, `PARAMETER_SPECIFIC_NAME`, `PARAMATER_SPECIFIC_SCHEMA`, `PARTIAL`, `PASCAL`, `PENDANT`, `PLACING`, `PLI`, `POSITION`, `POSTFIX`, `PRECISION`, `PREFIX`, `PREORDER`, `PREPARE`, `PRESERVE`, `PRIMARY`, `PRIOR`, `PRIVILEGES`, `PROCEDURAL`, `PROCEDURE`, `PUBLIC`, `READ`, `READS`, `RECHECK`, `RECURSIVE`, `REF`, `REFERENCES`, `REFERENCING`, `REINDEX`, `RELATIVE`, `RENAME`, `REPEATABLE`, `REPLACE`, `RESET`, `RESTART`, `RESTRICT`, `RESULT`, `RETURN`, `RETURNED_LENGTH`, `RETURNED_OCTET_LENGTH`, `RETURNED_SQLSTATE`, `RETURNS`, `REVOKE`, `RIGHT`, `ROLE`, `ROLLBACK`, `ROLLUP`, `ROUTINE`, `ROUTINE_CATALOG`, `ROUTINE_NAME`, `ROUTINE_SCHEMA`, `ROW`, `ROWS`, `ROW_COUNT`, `RULE`, `SAVE_POINT`, `SCALE`, `SCHEMA`, `SCHEMA_NAME`, `SCOPE`, `SCROLL`, `SEARCH`, `SECOND`, `SECURITY`, `SELECT`, `SELF`, `SENSITIVE`, `SERIALIZABLE`, `SERVER_NAME`, `SESSION`, `SESSION_USER`, `SET`, `SETOF`, `SETS`, `SHARE`, `SHOW`, `SIMILAR`, `SIMPLE`, `SIZE`, `SOME`, `SOURCE`, `SPACE`, `SPECIFIC`, `SPECIFICTYPE`, `SPECIFIC_NAME`, `SQL`, `SQLCODE`, `SQLERROR`, `SQLEXCEPTION`, `SQLSTATE`, `SQLWARNINIG`, `STABLE`, `START`, `STATE`, `STATEMENT`, `STATIC`, `STATISTICS`, `STDIN`, `STDOUT`, `STORAGE`, `STRICT`, `STRUCTURE`, `STYPE`, `SUBCLASS_ORIGIN`, `SUBLIST`, `SUBSTRING`, `SUM`, `SYMMETRIC`, `SYSID`, `SYSTEM`, `SYSTEM_USER`, `TABLE`, `TABLE_NAME`, ` TEMP`, `TEMPLATE`, `TEMPORARY`, `TERMINATE`, `THAN`, `THEN`, `TIMESTAMP`, `TIMEZONE_HOUR`, `TIMEZONE_MINUTE`, `TO`, `TOAST`, `TRAILING`, `TRANSATION`, `TRANSACTIONS_COMMITTED`, `TRANSACTIONS_ROLLED_BACK`, `TRANSATION_ACTIVE`, `TRANSFORM`, `TRANSFORMS`, `TRANSLATE`, `TRANSLATION`, `TREAT`, `TRIGGER`, `TRIGGER_CATALOG`, `TRIGGER_NAME`, `TRIGGER_SCHEMA`, `TRIM`, `TRUE`, `TRUNCATE`, `TRUSTED`, `TYPE`, `UNCOMMITTED`, `UNDER`, `UNENCRYPTED`, `UNION`, `UNIQUE`, `UNKNOWN`, `UNLISTEN`, `UNNAMED`, `UNNEST`, `UNTIL`, `UPDATE`, `UPPER`, `USAGE`, `USER`, `USER_DEFINED_TYPE_CATALOG`, `USER_DEFINED_TYPE_NAME`, `USER_DEFINED_TYPE_SCHEMA`, `USING`, `VACUUM`, `VALID`, `VALIDATOR`, `VALUES`, `VARIABLE`, `VERBOSE`, `VERSION`, `VIEW`, `VOLATILE`, `WHEN`, `WHENEVER`, `WHERE`, `WITH`, `WITHOUT`, `WORK`, `WRITE`, `YEAR`, `ZONE`), Keyword, nil}, + {Words(``, `\b`, `ARRAY`, `BIGINT`, `BINARY`, `BIT`, `BLOB`, `BOOLEAN`, `CHAR`, `CHARACTER`, `DATE`, `DEC`, `DECIMAL`, `FLOAT`, `INT`, `INTEGER`, `INTERVAL`, `NUMBER`, `NUMERIC`, `REAL`, `SERIAL`, `SMALLINT`, `VARCHAR`, `VARYING`, `INT8`, `SERIAL8`, `TEXT`), NameBuiltin, nil}, + {"[+*/<>=~!@#%^&|`?-]", Operator, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[a-z_][\w$]*`, Name, nil}, + {`[;:()\[\],.]`, Punctuation, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "double-string": { + {`[^"]+`, LiteralStringDouble, nil}, + {`""`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/squid.go b/vendor/github.com/alecthomas/chroma/lexers/s/squid.go new file mode 100644 index 00000000..1f161901 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/squid.go @@ -0,0 +1,38 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Squidconf lexer. +var Squidconf = internal.Register(MustNewLexer( + &Config{ + Name: "SquidConf", + Aliases: []string{"squidconf", "squid.conf", "squid"}, + Filenames: []string{"squid.conf"}, + MimeTypes: []string{"text/x-squidconf"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`#`, Comment, Push("comment")}, + {Words(`\b`, `\b`, `access_log`, `acl`, `always_direct`, `announce_host`, `announce_period`, `announce_port`, `announce_to`, `anonymize_headers`, `append_domain`, `as_whois_server`, `auth_param_basic`, `authenticate_children`, `authenticate_program`, `authenticate_ttl`, `broken_posts`, `buffered_logs`, `cache_access_log`, `cache_announce`, `cache_dir`, `cache_dns_program`, `cache_effective_group`, `cache_effective_user`, `cache_host`, `cache_host_acl`, `cache_host_domain`, `cache_log`, `cache_mem`, `cache_mem_high`, `cache_mem_low`, `cache_mgr`, `cachemgr_passwd`, `cache_peer`, `cache_peer_access`, `cahce_replacement_policy`, `cache_stoplist`, `cache_stoplist_pattern`, `cache_store_log`, `cache_swap`, `cache_swap_high`, `cache_swap_log`, `cache_swap_low`, `client_db`, `client_lifetime`, `client_netmask`, `connect_timeout`, `coredump_dir`, `dead_peer_timeout`, `debug_options`, `delay_access`, `delay_class`, `delay_initial_bucket_level`, `delay_parameters`, `delay_pools`, `deny_info`, `dns_children`, `dns_defnames`, `dns_nameservers`, `dns_testnames`, `emulate_httpd_log`, `err_html_text`, `fake_user_agent`, `firewall_ip`, `forwarded_for`, `forward_snmpd_port`, `fqdncache_size`, `ftpget_options`, `ftpget_program`, `ftp_list_width`, `ftp_passive`, `ftp_user`, `half_closed_clients`, `header_access`, `header_replace`, `hierarchy_stoplist`, `high_response_time_warning`, `high_page_fault_warning`, `hosts_file`, `htcp_port`, `http_access`, `http_anonymizer`, `httpd_accel`, `httpd_accel_host`, `httpd_accel_port`, `httpd_accel_uses_host_header`, `httpd_accel_with_proxy`, `http_port`, `http_reply_access`, `icp_access`, `icp_hit_stale`, `icp_port`, `icp_query_timeout`, `ident_lookup`, `ident_lookup_access`, `ident_timeout`, `incoming_http_average`, `incoming_icp_average`, `inside_firewall`, `ipcache_high`, `ipcache_low`, `ipcache_size`, `local_domain`, `local_ip`, `logfile_rotate`, `log_fqdn`, `log_icp_queries`, `log_mime_hdrs`, `maximum_object_size`, `maximum_single_addr_tries`, `mcast_groups`, `mcast_icp_query_timeout`, `mcast_miss_addr`, `mcast_miss_encode_key`, `mcast_miss_port`, `memory_pools`, `memory_pools_limit`, `memory_replacement_policy`, `mime_table`, `min_http_poll_cnt`, `min_icp_poll_cnt`, `minimum_direct_hops`, `minimum_object_size`, `minimum_retry_timeout`, `miss_access`, `negative_dns_ttl`, `negative_ttl`, `neighbor_timeout`, `neighbor_type_domain`, `netdb_high`, `netdb_low`, `netdb_ping_period`, `netdb_ping_rate`, `never_direct`, `no_cache`, `passthrough_proxy`, `pconn_timeout`, `pid_filename`, `pinger_program`, `positive_dns_ttl`, `prefer_direct`, `proxy_auth`, `proxy_auth_realm`, `query_icmp`, `quick_abort`, `quick_abort_max`, `quick_abort_min`, `quick_abort_pct`, `range_offset_limit`, `read_timeout`, `redirect_children`, `redirect_program`, `redirect_rewrites_host_header`, `reference_age`, `refresh_pattern`, `reload_into_ims`, `request_body_max_size`, `request_size`, `request_timeout`, `shutdown_lifetime`, `single_parent_bypass`, `siteselect_timeout`, `snmp_access`, `snmp_incoming_address`, `snmp_port`, `source_ping`, `ssl_proxy`, `store_avg_object_size`, `store_objects_per_bucket`, `strip_query_terms`, `swap_level1_dirs`, `swap_level2_dirs`, `tcp_incoming_address`, `tcp_outgoing_address`, `tcp_recv_bufsize`, `test_reachability`, `udp_hit_obj`, `udp_hit_obj_size`, `udp_incoming_address`, `udp_outgoing_address`, `unique_hostname`, `unlinkd_program`, `uri_whitespace`, `useragent_log`, `visible_hostname`, `wais_relay`, `wais_relay_host`, `wais_relay_port`), Keyword, nil}, + {Words(`\b`, `\b`, `proxy-only`, `weight`, `ttl`, `no-query`, `default`, `round-robin`, `multicast-responder`, `on`, `off`, `all`, `deny`, `allow`, `via`, `parent`, `no-digest`, `heap`, `lru`, `realm`, `children`, `q1`, `q2`, `credentialsttl`, `none`, `disable`, `offline_toggle`, `diskd`), NameConstant, nil}, + {Words(`\b`, `\b`, `shutdown`, `info`, `parameter`, `server_list`, `client_list`, `squid.conf`), LiteralString, nil}, + {Words(`stats/`, `\b`, `objects`, `vm_objects`, `utilization`, `ipcache`, `fqdncache`, `dns`, `redirector`, `io`, `reply_headers`, `filedescriptors`, `netdb`), LiteralString, nil}, + {Words(`log/`, `=`, `status`, `enable`, `disable`, `clear`), LiteralString, nil}, + {Words(`\b`, `\b`, `url_regex`, `urlpath_regex`, `referer_regex`, `port`, `proto`, `req_mime_type`, `rep_mime_type`, `method`, `browser`, `user`, `src`, `dst`, `time`, `dstdomain`, `ident`, `snmp_community`), Keyword, nil}, + {`(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|:(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?|[<&?](?=\\w)|(?<=\\w)[>!?]", Punctuation, nil}, + {`[/=\-+!*%<>&|^?~]+`, Operator, nil}, + {`[a-zA-Z_]\w*`, Name, nil}, + }, + "keywords": { + {Words(``, `\b`, `as`, `break`, `case`, `catch`, `continue`, `default`, `defer`, `do`, `else`, `fallthrough`, `for`, `guard`, `if`, `in`, `is`, `repeat`, `return`, `#selector`, `switch`, `throw`, `try`, `where`, `while`), Keyword, nil}, + {`@availability\([^)]+\)`, KeywordReserved, nil}, + {Words(``, `\b`, `associativity`, `convenience`, `dynamic`, `didSet`, `final`, `get`, `indirect`, `infix`, `inout`, `lazy`, `left`, `mutating`, `none`, `nonmutating`, `optional`, `override`, `postfix`, `precedence`, `prefix`, `Protocol`, `required`, `rethrows`, `right`, `set`, `throws`, `Type`, `unowned`, `weak`, `willSet`, `@availability`, `@autoclosure`, `@noreturn`, `@NSApplicationMain`, `@NSCopying`, `@NSManaged`, `@objc`, `@UIApplicationMain`, `@IBAction`, `@IBDesignable`, `@IBInspectable`, `@IBOutlet`), KeywordReserved, nil}, + {`(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__|__FILE__|__FUNCTION__|__LINE__|_|#(?:file|line|column|function))\b`, KeywordConstant, nil}, + {`import\b`, KeywordDeclaration, Push("module")}, + {`(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameClass), nil}, + {`(func)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameFunction), nil}, + {`(var|let)(\s+)([a-zA-Z_]\w*)`, ByGroups(KeywordDeclaration, Text, NameVariable), nil}, + {Words(``, `\b`, `class`, `deinit`, `enum`, `extension`, `func`, `import`, `init`, `internal`, `let`, `operator`, `private`, `protocol`, `public`, `static`, `struct`, `subscript`, `typealias`, `var`), KeywordDeclaration, nil}, + }, + "comment": { + {`:param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):`, CommentSpecial, nil}, + }, + "comment-single": { + {`\n`, Text, Pop(1)}, + Include("comment"), + {`[^\n]`, CommentSingle, nil}, + }, + "comment-multi": { + Include("comment"), + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "module": { + {`\n`, Text, Pop(1)}, + {`[a-zA-Z_]\w*`, NameClass, nil}, + Include("root"), + }, + "preproc": { + {`\n`, Text, Pop(1)}, + Include("keywords"), + {`[A-Za-z]\w*`, CommentPreproc, nil}, + Include("root"), + }, + "string": { + {`\\\(`, LiteralStringInterpol, Push("string-intp")}, + {`"`, LiteralString, Pop(1)}, + {`\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}`, LiteralStringEscape, nil}, + {`[^\\"]+`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "string-intp": { + {`\(`, LiteralStringInterpol, Push()}, + {`\)`, LiteralStringInterpol, Pop(1)}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go b/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go new file mode 100644 index 00000000..21021a29 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/s/systemverilog.go @@ -0,0 +1,72 @@ +package s + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Systemverilog lexer. +var Systemverilog = internal.Register(MustNewLexer( + &Config{ + Name: "systemverilog", + Aliases: []string{"systemverilog", "sv"}, + Filenames: []string{"*.sv", "*.svh"}, + MimeTypes: []string{"text/x-systemverilog"}, + EnsureNL: true, + }, + Rules{ + "root": { + {"^\\s*`define", CommentPreproc, Push("macro")}, + {`^(\s*)(package)(\s+)`, ByGroups(Text, KeywordNamespace, Text), nil}, + {`^(\s*)(import)(\s+)`, ByGroups(Text, KeywordNamespace, Text), Push("import")}, + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + {`/(\\\n)?/(\n|(.|\n)*?[^\\]\n)`, CommentSingle, nil}, + {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil}, + {`[{}#@]`, Punctuation, nil}, + {`L?"`, LiteralString, Push("string")}, + {`L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'`, LiteralStringChar, nil}, + {`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?`, LiteralNumberFloat, nil}, + {`(\d+\.\d*|\.\d+|\d+[fF])[fF]?`, LiteralNumberFloat, nil}, + {`([0-9]+)|(\'h)[0-9a-fA-F]+`, LiteralNumberHex, nil}, + {`([0-9]+)|(\'b)[01]+`, LiteralNumberBin, nil}, + {`([0-9]+)|(\'d)[0-9]+`, LiteralNumberInteger, nil}, + {`([0-9]+)|(\'o)[0-7]+`, LiteralNumberOct, nil}, + {`\'[01xz]`, LiteralNumber, nil}, + {`\d+[Ll]?`, LiteralNumberInteger, nil}, + {`\*/`, Error, nil}, + {`[~!%^&*+=|?:<>/-]`, Operator, nil}, + {`[()\[\],.;\']`, Punctuation, nil}, + {"`[a-zA-Z_]\\w*", NameConstant, nil}, + {Words(``, `\b`, `accept_on`, `alias`, `always`, `always_comb`, `always_ff`, `always_latch`, `and`, `assert`, `assign`, `assume`, `automatic`, `before`, `begin`, `bind`, `bins`, `binsof`, `bit`, `break`, `buf`, `bufif0`, `bufif1`, `byte`, `case`, `casex`, `casez`, `cell`, `chandle`, `checker`, `class`, `clocking`, `cmos`, `config`, `const`, `constraint`, `context`, `continue`, `cover`, `covergroup`, `coverpoint`, `cross`, `deassign`, `default`, `defparam`, `design`, `disable`, `dist`, `do`, `edge`, `else`, `end`, `endcase`, `endchecker`, `endclass`, `endclocking`, `endconfig`, `endfunction`, `endgenerate`, `endgroup`, `endinterface`, `endmodule`, `endpackage`, `endprimitive`, `endprogram`, `endproperty`, `endsequence`, `endspecify`, `endtable`, `endtask`, `enum`, `event`, `eventually`, `expect`, `export`, `extends`, `extern`, `final`, `first_match`, `for`, `force`, `foreach`, `forever`, `fork`, `forkjoin`, `function`, `generate`, `genvar`, `global`, `highz0`, `highz1`, `if`, `iff`, `ifnone`, `ignore_bins`, `illegal_bins`, `implies`, `import`, `incdir`, `include`, `initial`, `inout`, `input`, `inside`, `instance`, `int`, `integer`, `interface`, `intersect`, `join`, `join_any`, `join_none`, `large`, `let`, `liblist`, `library`, `local`, `localparam`, `logic`, `longint`, `macromodule`, `matches`, `medium`, `modport`, `module`, `nand`, `negedge`, `new`, `nexttime`, `nmos`, `nor`, `noshowcancelled`, `not`, `notif0`, `notif1`, `null`, `or`, `output`, `package`, `packed`, `parameter`, `pmos`, `posedge`, `primitive`, `priority`, `program`, `property`, `protected`, `pull0`, `pull1`, `pulldown`, `pullup`, `pulsestyle_ondetect`, `pulsestyle_onevent`, `pure`, `rand`, `randc`, `randcase`, `randsequence`, `rcmos`, `real`, `realtime`, `ref`, `reg`, `reject_on`, `release`, `repeat`, `restrict`, `return`, `rnmos`, `rpmos`, `rtran`, `rtranif0`, `rtranif1`, `s_always`, `s_eventually`, `s_nexttime`, `s_until`, `s_until_with`, `scalared`, `sequence`, `shortint`, `shortreal`, `showcancelled`, `signed`, `small`, `solve`, `specify`, `specparam`, `static`, `string`, `strong`, `strong0`, `strong1`, `struct`, `super`, `supply0`, `supply1`, `sync_accept_on`, `sync_reject_on`, `table`, `tagged`, `task`, `this`, `throughout`, `time`, `timeprecision`, `timeunit`, `tran`, `tranif0`, `tranif1`, `tri`, `tri0`, `tri1`, `triand`, `trior`, `trireg`, `type`, `typedef`, `union`, `unique`, `unique0`, `unsigned`, `until`, `until_with`, `untyped`, `use`, `uwire`, `var`, `vectored`, `virtual`, `void`, `wait`, `wait_order`, `wand`, `weak`, `weak0`, `weak1`, `while`, `wildcard`, `wire`, `with`, `within`, `wor`, `xnor`, `xor`), Keyword, nil}, + {Words(``, `\b`, "`__FILE__", "`__LINE__", "`begin_keywords", "`celldefine", "`default_nettype", "`define", "`else", "`elsif", "`end_keywords", "`endcelldefine", "`endif", "`ifdef", "`ifndef", "`include", "`line", "`nounconnected_drive", "`pragma", "`resetall", "`timescale", "`unconnected_drive", "`undef", "`undefineall"), CommentPreproc, nil}, + {Words(``, `\b`, `$display`, `$displayb`, `$displayh`, `$displayo`, `$dumpall`, `$dumpfile`, `$dumpflush`, `$dumplimit`, `$dumpoff`, `$dumpon`, `$dumpports`, `$dumpportsall`, `$dumpportsflush`, `$dumpportslimit`, `$dumpportsoff`, `$dumpportson`, `$dumpvars`, `$fclose`, `$fdisplay`, `$fdisplayb`, `$fdisplayh`, `$fdisplayo`, `$feof`, `$ferror`, `$fflush`, `$fgetc`, `$fgets`, `$finish`, `$fmonitor`, `$fmonitorb`, `$fmonitorh`, `$fmonitoro`, `$fopen`, `$fread`, `$fscanf`, `$fseek`, `$fstrobe`, `$fstrobeb`, `$fstrobeh`, `$fstrobeo`, `$ftell`, `$fwrite`, `$fwriteb`, `$fwriteh`, `$fwriteo`, `$monitor`, `$monitorb`, `$monitorh`, `$monitoro`, `$monitoroff`, `$monitoron`, `$plusargs`, `$random`, `$readmemb`, `$readmemh`, `$rewind`, `$sformat`, `$sformatf`, `$sscanf`, `$strobe`, `$strobeb`, `$strobeh`, `$strobeo`, `$swrite`, `$swriteb`, `$swriteh`, `$swriteo`, `$test`, `$ungetc`, `$value$plusargs`, `$write`, `$writeb`, `$writeh`, `$writememb`, `$writememh`, `$writeo`), NameBuiltin, nil}, + {`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")}, + {Words(``, `\b`, `byte`, `shortint`, `int`, `longint`, `integer`, `time`, `bit`, `logic`, `reg`, `supply0`, `supply1`, `tri`, `triand`, `trior`, `tri0`, `tri1`, `trireg`, `uwire`, `wire`, `wand`, `woshortreal`, `real`, `realtime`), KeywordType, nil}, + {`[a-zA-Z_]\w*:(?!:)`, NameLabel, nil}, + {`\$?[a-zA-Z_]\w*`, Name, nil}, + }, + "classname": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + }, + "string": { + {`"`, LiteralString, Pop(1)}, + {`\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})`, LiteralStringEscape, nil}, + {`[^\\"\n]+`, LiteralString, nil}, + {`\\\n`, LiteralString, nil}, + {`\\`, LiteralString, nil}, + }, + "macro": { + {`[^/\n]+`, CommentPreproc, nil}, + {`/[*](.|\n)*?[*]/`, CommentMultiline, nil}, + {`//.*?\n`, CommentSingle, Pop(1)}, + {`/`, CommentPreproc, nil}, + {`(?<=\\)\n`, CommentPreproc, nil}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "import": { + {`[\w:]+\*?`, NameNamespace, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go b/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go new file mode 100644 index 00000000..960399ca --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tasm.go @@ -0,0 +1,61 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tasm lexer. +var Tasm = internal.Register(MustNewLexer( + &Config{ + Name: "TASM", + Aliases: []string{"tasm"}, + Filenames: []string{"*.asm", "*.ASM", "*.tasm"}, + MimeTypes: []string{"text/x-tasm"}, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`^\s*%`, CommentPreproc, Push("preproc")}, + Include("whitespace"), + {`[@a-z$._?][\w$.?#@~]*:`, NameLabel, nil}, + {`BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|P386|MODEL|ASSUME|CODESEG|SIZE`, Keyword, Push("instruction-args")}, + {`([@a-z$._?][\w$.?#@~]*)(\s+)(db|dd|dw|T[A-Z][a-z]+)`, ByGroups(NameConstant, KeywordDeclaration, KeywordDeclaration), Push("instruction-args")}, + {`(?:res|d)[bwdqt]|times`, KeywordDeclaration, Push("instruction-args")}, + {`[@a-z$._?][\w$.?#@~]*`, NameFunction, Push("instruction-args")}, + {`[\r\n]+`, Text, nil}, + }, + "instruction-args": { + {"\"(\\\\\"|[^\"\\n])*\"|'(\\\\'|[^'\\n])*'|`(\\\\`|[^`\\n])*`", LiteralString, nil}, + {`(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)`, LiteralNumberHex, nil}, + {`[0-7]+q`, LiteralNumberOct, nil}, + {`[01]+b`, LiteralNumberBin, nil}, + {`[0-9]+\.e?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + Include("punctuation"), + {`r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]`, NameBuiltin, nil}, + {`[@a-z$._?][\w$.?#@~]*`, NameVariable, nil}, + {`(\\\s*)(;.*)([\r\n])`, ByGroups(Text, CommentSingle, Text), nil}, + {`[\r\n]+`, Text, Pop(1)}, + Include("whitespace"), + }, + "preproc": { + {`[^;\n]+`, CommentPreproc, nil}, + {`;.*?\n`, CommentSingle, Pop(1)}, + {`\n`, CommentPreproc, Pop(1)}, + }, + "whitespace": { + {`[\n\r]`, Text, nil}, + {`\\[\n\r]`, Text, nil}, + {`[ \t]+`, Text, nil}, + {`;.*`, CommentSingle, nil}, + }, + "punctuation": { + {`[,():\[\]]+`, Punctuation, nil}, + {`[&|^<>+*=/%~-]+`, Operator, nil}, + {`[$]+`, KeywordConstant, nil}, + {`seg|wrt|strict`, OperatorWord, nil}, + {`byte|[dq]?word`, KeywordType, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go b/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go new file mode 100644 index 00000000..77951b40 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tcl.go @@ -0,0 +1,116 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tcl lexer. +var Tcl = internal.Register(MustNewLexer( + &Config{ + Name: "Tcl", + Aliases: []string{"tcl"}, + Filenames: []string{"*.tcl", "*.rvt"}, + MimeTypes: []string{"text/x-tcl", "text/x-script.tcl", "application/x-tcl"}, + }, + Rules{ + "root": { + Include("command"), + Include("basic"), + Include("data"), + {`\}`, Keyword, nil}, + }, + "command": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params")}, + {`([\w.-]+)`, NameVariable, Push("params")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-brace": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-brace")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-brace")}, + {`([\w.-]+)`, NameVariable, Push("params-in-brace")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-bracket": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-bracket")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-bracket")}, + {`([\w.-]+)`, NameVariable, Push("params-in-bracket")}, + {`#`, Comment, Push("comment")}, + }, + "command-in-paren": { + {Words(`\b`, `\b`, `after`, `apply`, `array`, `break`, `catch`, `continue`, `elseif`, `else`, `error`, `eval`, `expr`, `for`, `foreach`, `global`, `if`, `namespace`, `proc`, `rename`, `return`, `set`, `switch`, `then`, `trace`, `unset`, `update`, `uplevel`, `upvar`, `variable`, `vwait`, `while`), Keyword, Push("params-in-paren")}, + {Words(`\b`, `\b`, `append`, `bgerror`, `binary`, `cd`, `chan`, `clock`, `close`, `concat`, `dde`, `dict`, `encoding`, `eof`, `exec`, `exit`, `fblocked`, `fconfigure`, `fcopy`, `file`, `fileevent`, `flush`, `format`, `gets`, `glob`, `history`, `http`, `incr`, `info`, `interp`, `join`, `lappend`, `lassign`, `lindex`, `linsert`, `list`, `llength`, `load`, `loadTk`, `lrange`, `lrepeat`, `lreplace`, `lreverse`, `lsearch`, `lset`, `lsort`, `mathfunc`, `mathop`, `memory`, `msgcat`, `open`, `package`, `pid`, `pkg::create`, `pkg_mkIndex`, `platform`, `platform::shell`, `puts`, `pwd`, `re_syntax`, `read`, `refchan`, `regexp`, `registry`, `regsub`, `scan`, `seek`, `socket`, `source`, `split`, `string`, `subst`, `tell`, `time`, `tm`, `unknown`, `unload`), NameBuiltin, Push("params-in-paren")}, + {`([\w.-]+)`, NameVariable, Push("params-in-paren")}, + {`#`, Comment, Push("comment")}, + }, + "basic": { + {`\(`, Keyword, Push("paren")}, + {`\[`, Keyword, Push("bracket")}, + {`\{`, Keyword, Push("brace")}, + {`"`, LiteralStringDouble, Push("string")}, + {`(eq|ne|in|ni)\b`, OperatorWord, nil}, + {`!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]`, Operator, nil}, + }, + "data": { + {`\s+`, Text, nil}, + {`0x[a-fA-F0-9]+`, LiteralNumberHex, nil}, + {`0[0-7]+`, LiteralNumberOct, nil}, + {`\d+\.\d+`, LiteralNumberFloat, nil}, + {`\d+`, LiteralNumberInteger, nil}, + {`\$([\w.:-]+)`, NameVariable, nil}, + {`([\w.:-]+)`, Text, nil}, + }, + "params": { + {`;`, Keyword, Pop(1)}, + {`\n`, Text, Pop(1)}, + {`(else|elseif|then)\b`, Keyword, nil}, + Include("basic"), + Include("data"), + }, + "params-in-brace": { + {`\}`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "params-in-paren": { + {`\)`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "params-in-bracket": { + {`\]`, Keyword, Push("#pop", "#pop")}, + Include("params"), + }, + "string": { + {`\[`, LiteralStringDouble, Push("string-square")}, + {`(?s)(\\\\|\\[0-7]+|\\.|[^"\\])`, LiteralStringDouble, nil}, + {`"`, LiteralStringDouble, Pop(1)}, + }, + "string-square": { + {`\[`, LiteralStringDouble, Push("string-square")}, + {`(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])`, LiteralStringDouble, nil}, + {`\]`, LiteralStringDouble, Pop(1)}, + }, + "brace": { + {`\}`, Keyword, Pop(1)}, + Include("command-in-brace"), + Include("basic"), + Include("data"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("command-in-paren"), + Include("basic"), + Include("data"), + }, + "bracket": { + {`\]`, Keyword, Pop(1)}, + Include("command-in-bracket"), + Include("basic"), + Include("data"), + }, + "comment": { + {`.*[^\\]\n`, Comment, Pop(1)}, + {`.*\\\n`, Comment, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go b/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go new file mode 100644 index 00000000..e36bdfd8 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tcsh.go @@ -0,0 +1,59 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tcsh lexer. +var Tcsh = internal.Register(MustNewLexer( + &Config{ + Name: "Tcsh", + Aliases: []string{"tcsh", "csh"}, + Filenames: []string{"*.tcsh", "*.csh"}, + MimeTypes: []string{"application/x-csh"}, + }, + Rules{ + "root": { + Include("basic"), + {`\$\(`, Keyword, Push("paren")}, + {`\$\{#?`, Keyword, Push("curly")}, + {"`", LiteralStringBacktick, Push("backticks")}, + Include("data"), + }, + "basic": { + {`\b(if|endif|else|while|then|foreach|case|default|continue|goto|breaksw|end|switch|endsw)\s*\b`, Keyword, nil}, + {`\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|source|suspend|telltc|time|umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|ver|wait|warp|watchlog|where|which)\s*\b`, NameBuiltin, nil}, + {`#.*`, Comment, nil}, + {`\\[\w\W]`, LiteralStringEscape, nil}, + {`(\b\w+)(\s*)(=)`, ByGroups(NameVariable, Text, Operator), nil}, + {`[\[\]{}()=]+`, Operator, nil}, + {`<<\s*(\'?)\\?(\w+)[\w\W]+?\2`, LiteralString, nil}, + {`;`, Punctuation, nil}, + }, + "data": { + {`(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"`, LiteralStringDouble, nil}, + {`(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'`, LiteralStringSingle, nil}, + {`\s+`, Text, nil}, + {"[^=\\s\\[\\]{}()$\"\\'`\\\\;#]+", Text, nil}, + {`\d+(?= |\Z)`, LiteralNumber, nil}, + {`\$#?(\w+|.)`, NameVariable, nil}, + }, + "curly": { + {`\}`, Keyword, Pop(1)}, + {`:-`, Keyword, nil}, + {`\w+`, NameVariable, nil}, + {"[^}:\"\\'`$]+", Punctuation, nil}, + {`:`, Punctuation, nil}, + Include("root"), + }, + "paren": { + {`\)`, Keyword, Pop(1)}, + Include("root"), + }, + "backticks": { + {"`", LiteralStringBacktick, Pop(1)}, + Include("root"), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go b/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go new file mode 100644 index 00000000..21b7d15c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/termcap.go @@ -0,0 +1,42 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Termcap lexer. +var Termcap = internal.Register(MustNewLexer( + &Config{ + Name: "Termcap", + Aliases: []string{"termcap"}, + Filenames: []string{"termcap", "termcap.src"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`^#.*$`, Comment, nil}, + {`^[^\s#:|]+`, NameTag, Push("names")}, + }, + "names": { + {`\n`, Text, Pop(1)}, + {`:`, Punctuation, Push("defs")}, + {`\|`, Punctuation, nil}, + {`[^:|]+`, NameAttribute, nil}, + }, + "defs": { + {`\\\n[ \t]*`, Text, nil}, + {`\n[ \t]*`, Text, Pop(2)}, + {`(#)([0-9]+)`, ByGroups(Operator, LiteralNumber), nil}, + {`=`, Operator, Push("data")}, + {`:`, Punctuation, nil}, + {`[^\s:=#]+`, NameClass, nil}, + }, + "data": { + {`\\072`, Literal, nil}, + {`:`, Punctuation, Pop(1)}, + {`[^:\\]+`, Literal, nil}, + {`.`, Literal, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go b/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go new file mode 100644 index 00000000..79749e12 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/terminfo.go @@ -0,0 +1,42 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Terminfo lexer. +var Terminfo = internal.Register(MustNewLexer( + &Config{ + Name: "Terminfo", + Aliases: []string{"terminfo"}, + Filenames: []string{"terminfo", "terminfo.src"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`^#.*$`, Comment, nil}, + {`^[^\s#,|]+`, NameTag, Push("names")}, + }, + "names": { + {`\n`, Text, Pop(1)}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), Push("defs")}, + {`\|`, Punctuation, nil}, + {`[^,|]+`, NameAttribute, nil}, + }, + "defs": { + {`\n[ \t]+`, Text, nil}, + {`\n`, Text, Pop(2)}, + {`(#)([0-9]+)`, ByGroups(Operator, LiteralNumber), nil}, + {`=`, Operator, Push("data")}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), nil}, + {`[^\s,=#]+`, NameClass, nil}, + }, + "data": { + {`\\[,\\]`, Literal, nil}, + {`(,)([ \t]*)`, ByGroups(Punctuation, Text), Pop(1)}, + {`[^\\,]+`, Literal, nil}, + {`.`, Literal, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go b/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go new file mode 100644 index 00000000..05125f70 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/terraform.go @@ -0,0 +1,69 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Terraform lexer. +var Terraform = internal.Register(MustNewLexer( + &Config{ + Name: "Terraform", + Aliases: []string{"terraform", "tf"}, + Filenames: []string{"*.tf"}, + MimeTypes: []string{"application/x-tf", "application/x-terraform"}, + }, + Rules{ + "root": { + Include("string"), + Include("punctuation"), + Include("curly"), + Include("basic"), + Include("whitespace"), + {`[0-9]+`, LiteralNumber, nil}, + }, + "basic": { + {Words(`\b`, `\b`, `true`, `false`), KeywordType, nil}, + {`\s*/\*`, CommentMultiline, Push("comment")}, + {`\s*#.*\n`, CommentSingle, nil}, + {`(.*?)(\s*)(=)`, ByGroups(NameAttribute, Text, Operator), nil}, + {Words(`\b`, `\b`, `variable`, `resource`, `provider`, `provisioner`, `module`), KeywordReserved, Push("function")}, + {Words(`\b`, `\b`, `ingress`, `egress`, `listener`, `default`, `connection`, `alias`), KeywordDeclaration, nil}, + {`\$\{`, LiteralStringInterpol, Push("var_builtin")}, + }, + "function": { + {`(\s+)(".*")(\s+)`, ByGroups(Text, LiteralString, Text), nil}, + Include("punctuation"), + Include("curly"), + }, + "var_builtin": { + {`\$\{`, LiteralStringInterpol, Push()}, + {Words(`\b`, `\b`, `concat`, `file`, `join`, `lookup`, `element`), NameBuiltin, nil}, + Include("string"), + Include("punctuation"), + {`\s+`, Text, nil}, + {`\}`, LiteralStringInterpol, Pop(1)}, + }, + "string": { + {`(".*")`, ByGroups(LiteralStringDouble), nil}, + }, + "punctuation": { + {`[\[\](),.]`, Punctuation, nil}, + }, + "curly": { + {`\{`, TextPunctuation, nil}, + {`\}`, TextPunctuation, nil}, + }, + "comment": { + {`[^*/]`, CommentMultiline, nil}, + {`/\*`, CommentMultiline, Push()}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[*/]`, CommentMultiline, nil}, + }, + "whitespace": { + {`\n`, Text, nil}, + {`\s+`, Text, nil}, + {`\\\n`, Text, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/tex.go b/vendor/github.com/alecthomas/chroma/lexers/t/tex.go new file mode 100644 index 00000000..f1010c4a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/tex.go @@ -0,0 +1,56 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Tex lexer. +var TeX = internal.Register(MustNewLexer( + &Config{ + Name: "TeX", + Aliases: []string{"tex", "latex"}, + Filenames: []string{"*.tex", "*.aux", "*.toc"}, + MimeTypes: []string{"text/x-tex", "text/x-latex"}, + }, + Rules{ + "general": { + {`%.*?\n`, Comment, nil}, + {`[{}]`, NameBuiltin, nil}, + {`[&_^]`, NameBuiltin, nil}, + }, + "root": { + {`\\\[`, LiteralStringBacktick, Push("displaymath")}, + {`\\\(`, LiteralString, Push("inlinemath")}, + {`\$\$`, LiteralStringBacktick, Push("displaymath")}, + {`\$`, LiteralString, Push("inlinemath")}, + {`\\([a-zA-Z]+|.)`, Keyword, Push("command")}, + {`\\$`, Keyword, nil}, + Include("general"), + {`[^\\$%&_^{}]+`, Text, nil}, + }, + "math": { + {`\\([a-zA-Z]+|.)`, NameVariable, nil}, + Include("general"), + {`[0-9]+`, LiteralNumber, nil}, + {`[-=!+*/()\[\]]`, Operator, nil}, + {`[^=!+*/()\[\]\\$%&_^{}0-9-]+`, NameBuiltin, nil}, + }, + "inlinemath": { + {`\\\)`, LiteralString, Pop(1)}, + {`\$`, LiteralString, Pop(1)}, + Include("math"), + }, + "displaymath": { + {`\\\]`, LiteralString, Pop(1)}, + {`\$\$`, LiteralString, Pop(1)}, + {`\$`, NameBuiltin, nil}, + Include("math"), + }, + "command": { + {`\[.*?\]`, NameAttribute, nil}, + {`\*`, Keyword, nil}, + Default(Pop(1)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go b/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go new file mode 100644 index 00000000..5cbd0af9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/thrift.go @@ -0,0 +1,73 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Thrift lexer. +var Thrift = internal.Register(MustNewLexer( + &Config{ + Name: "Thrift", + Aliases: []string{"thrift"}, + Filenames: []string{"*.thrift"}, + MimeTypes: []string{"application/x-thrift"}, + }, + Rules{ + "root": { + Include("whitespace"), + Include("comments"), + {`"`, LiteralStringDouble, Combined("stringescape", "dqs")}, + {`\'`, LiteralStringSingle, Combined("stringescape", "sqs")}, + {`(namespace)(\s+)`, ByGroups(KeywordNamespace, TextWhitespace), Push("namespace")}, + {`(enum|union|struct|service|exception)(\s+)`, ByGroups(KeywordDeclaration, TextWhitespace), Push("class")}, + {`((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil}, + Include("keywords"), + Include("numbers"), + {`[&=]`, Operator, nil}, + {`[:;,{}()<>\[\]]`, Punctuation, nil}, + {`[a-zA-Z_](\.\w|\w)*`, Name, nil}, + }, + "whitespace": { + {`\n`, TextWhitespace, nil}, + {`\s+`, TextWhitespace, nil}, + }, + "comments": { + {`#.*$`, Comment, nil}, + {`//.*?\n`, Comment, nil}, + {`/\*[\w\W]*?\*/`, CommentMultiline, nil}, + }, + "stringescape": { + {`\\([\\nrt"\'])`, LiteralStringEscape, nil}, + }, + "dqs": { + {`"`, LiteralStringDouble, Pop(1)}, + {`[^\\"\n]+`, LiteralStringDouble, nil}, + }, + "sqs": { + {`'`, LiteralStringSingle, Pop(1)}, + {`[^\\\'\n]+`, LiteralStringSingle, nil}, + }, + "namespace": { + {`[a-z*](\.\w|\w)*`, NameNamespace, Pop(1)}, + Default(Pop(1)), + }, + "class": { + {`[a-zA-Z_]\w*`, NameClass, Pop(1)}, + Default(Pop(1)), + }, + "keywords": { + {`(async|oneway|extends|throws|required|optional)\b`, Keyword, nil}, + {`(true|false)\b`, KeywordConstant, nil}, + {`(const|typedef)\b`, KeywordDeclaration, nil}, + {Words(``, `\b`, `cpp_namespace`, `cpp_include`, `cpp_type`, `java_package`, `cocoa_prefix`, `csharp_namespace`, `delphi_namespace`, `php_namespace`, `py_module`, `perl_package`, `ruby_namespace`, `smalltalk_category`, `smalltalk_prefix`, `xsd_all`, `xsd_optional`, `xsd_nillable`, `xsd_namespace`, `xsd_attrs`, `include`), KeywordNamespace, nil}, + {Words(``, `\b`, `void`, `bool`, `byte`, `i16`, `i32`, `i64`, `double`, `string`, `binary`, `map`, `list`, `set`, `slist`, `senum`), KeywordType, nil}, + {Words(`\b`, `\b`, `BEGIN`, `END`, `__CLASS__`, `__DIR__`, `__FILE__`, `__FUNCTION__`, `__LINE__`, `__METHOD__`, `__NAMESPACE__`, `abstract`, `alias`, `and`, `args`, `as`, `assert`, `begin`, `break`, `case`, `catch`, `class`, `clone`, `continue`, `declare`, `def`, `default`, `del`, `delete`, `do`, `dynamic`, `elif`, `else`, `elseif`, `elsif`, `end`, `enddeclare`, `endfor`, `endforeach`, `endif`, `endswitch`, `endwhile`, `ensure`, `except`, `exec`, `finally`, `float`, `for`, `foreach`, `function`, `global`, `goto`, `if`, `implements`, `import`, `in`, `inline`, `instanceof`, `interface`, `is`, `lambda`, `module`, `native`, `new`, `next`, `nil`, `not`, `or`, `pass`, `public`, `print`, `private`, `protected`, `raise`, `redo`, `rescue`, `retry`, `register`, `return`, `self`, `sizeof`, `static`, `super`, `switch`, `synchronized`, `then`, `this`, `throw`, `transient`, `try`, `undef`, `unless`, `unsigned`, `until`, `use`, `var`, `virtual`, `volatile`, `when`, `while`, `with`, `xor`, `yield`), KeywordReserved, nil}, + }, + "numbers": { + {`[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)`, LiteralNumberFloat, nil}, + {`[+-]?0x[0-9A-Fa-f]+`, LiteralNumberHex, nil}, + {`[+-]?[0-9]+`, LiteralNumberInteger, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/toml.go b/vendor/github.com/alecthomas/chroma/lexers/t/toml.go new file mode 100644 index 00000000..68bfc905 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/toml.go @@ -0,0 +1,29 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var TOML = internal.Register(MustNewLexer( + &Config{ + Name: "TOML", + Aliases: []string{"toml"}, + Filenames: []string{"*.toml"}, + MimeTypes: []string{"text/x-toml"}, + }, + Rules{ + "root": { + {`\s+`, Text, nil}, + {`#.*`, Comment, nil}, + {Words(``, `\b`, `true`, `false`), KeywordConstant, nil}, + {`\d\d\d\d-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d\+)?(Z|[+-]\d{2}:\d{2})`, LiteralDate, nil}, + {`[+-]?[0-9](_?\d)*\.\d+`, LiteralNumberFloat, nil}, + {`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil}, + {`"(\\\\|\\"|[^"])*"`, StringDouble, nil}, + {`'(\\\\|\\'|[^'])*'`, StringSingle, nil}, + {`[.,=\[\]]`, Punctuation, nil}, + {`[^\W\d]\w*`, NameOther, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go b/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go new file mode 100644 index 00000000..769a2f91 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/transactsql.go @@ -0,0 +1,60 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TransactSQL lexer. +var TransactSQL = internal.Register(MustNewLexer( + &Config{ + Name: "Transact-SQL", + Aliases: []string{"tsql", "t-sql"}, + MimeTypes: []string{"text/x-tsql"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`--(?m).*?$\n?`, CommentSingle, nil}, + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`'`, LiteralStringSingle, Push("string")}, + {`"`, LiteralStringName, Push("quoted-ident")}, + {Words(``, ``, `!<`, `!=`, `!>`, `<`, `<=`, `<>`, `=`, `>`, `>=`, `+`, `+=`, `-`, `-=`, `*`, `*=`, `/`, `/=`, `%`, `%=`, `&`, `&=`, `|`, `|=`, `^`, `^=`, `~`, `::`), Operator, nil}, + {Words(``, `\b`, `all`, `and`, `any`, `between`, `except`, `exists`, `in`, `intersect`, `like`, `not`, `or`, `some`, `union`), OperatorWord, nil}, + {Words(``, `\b`, `bigint`, `binary`, `bit`, `char`, `cursor`, `date`, `datetime`, `datetime2`, `datetimeoffset`, `decimal`, `float`, `hierarchyid`, `image`, `int`, `money`, `nchar`, `ntext`, `numeric`, `nvarchar`, `real`, `smalldatetime`, `smallint`, `smallmoney`, `sql_variant`, `table`, `text`, `time`, `timestamp`, `tinyint`, `uniqueidentifier`, `varbinary`, `varchar`, `xml`), NameClass, nil}, + {Words(``, `\b`, `$partition`, `abs`, `acos`, `app_name`, `applock_mode`, `applock_test`, `ascii`, `asin`, `assemblyproperty`, `atan`, `atn2`, `avg`, `binary_checksum`, `cast`, `ceiling`, `certencoded`, `certprivatekey`, `char`, `charindex`, `checksum`, `checksum_agg`, `choose`, `col_length`, `col_name`, `columnproperty`, `compress`, `concat`, `connectionproperty`, `context_info`, `convert`, `cos`, `cot`, `count`, `count_big`, `current_request_id`, `current_timestamp`, `current_transaction_id`, `current_user`, `cursor_status`, `database_principal_id`, `databasepropertyex`, `dateadd`, `datediff`, `datediff_big`, `datefromparts`, `datename`, `datepart`, `datetime2fromparts`, `datetimefromparts`, `datetimeoffsetfromparts`, `day`, `db_id`, `db_name`, `decompress`, `degrees`, `dense_rank`, `difference`, `eomonth`, `error_line`, `error_message`, `error_number`, `error_procedure`, `error_severity`, `error_state`, `exp`, `file_id`, `file_idex`, `file_name`, `filegroup_id`, `filegroup_name`, `filegroupproperty`, `fileproperty`, `floor`, `format`, `formatmessage`, `fulltextcatalogproperty`, `fulltextserviceproperty`, `get_filestream_transaction_context`, `getansinull`, `getdate`, `getutcdate`, `grouping`, `grouping_id`, `has_perms_by_name`, `host_id`, `host_name`, `iif`, `index_col`, `indexkey_property`, `indexproperty`, `is_member`, `is_rolemember`, `is_srvrolemember`, `isdate`, `isjson`, `isnull`, `isnumeric`, `json_modify`, `json_query`, `json_value`, `left`, `len`, `log`, `log10`, `lower`, `ltrim`, `max`, `min`, `min_active_rowversion`, `month`, `nchar`, `newid`, `newsequentialid`, `ntile`, `object_definition`, `object_id`, `object_name`, `object_schema_name`, `objectproperty`, `objectpropertyex`, `opendatasource`, `openjson`, `openquery`, `openrowset`, `openxml`, `original_db_name`, `original_login`, `parse`, `parsename`, `patindex`, `permissions`, `pi`, `power`, `pwdcompare`, `pwdencrypt`, `quotename`, `radians`, `rand`, `rank`, `replace`, `replicate`, `reverse`, `right`, `round`, `row_number`, `rowcount_big`, `rtrim`, `schema_id`, `schema_name`, `scope_identity`, `serverproperty`, `session_context`, `session_user`, `sign`, `sin`, `smalldatetimefromparts`, `soundex`, `sp_helplanguage`, `space`, `sqrt`, `square`, `stats_date`, `stdev`, `stdevp`, `str`, `string_escape`, `string_split`, `stuff`, `substring`, `sum`, `suser_id`, `suser_name`, `suser_sid`, `suser_sname`, `switchoffset`, `sysdatetime`, `sysdatetimeoffset`, `system_user`, `sysutcdatetime`, `tan`, `textptr`, `textvalid`, `timefromparts`, `todatetimeoffset`, `try_cast`, `try_convert`, `try_parse`, `type_id`, `type_name`, `typeproperty`, `unicode`, `upper`, `user_id`, `user_name`, `var`, `varp`, `xact_state`, `year`), NameFunction, nil}, + {`(goto)(\s+)(\w+\b)`, ByGroups(Keyword, TextWhitespace, NameLabel), nil}, + {Words(``, `\b`, `absolute`, `action`, `ada`, `add`, `admin`, `after`, `aggregate`, `alias`, `all`, `allocate`, `alter`, `and`, `any`, `are`, `array`, `as`, `asc`, `asensitive`, `assertion`, `asymmetric`, `at`, `atomic`, `authorization`, `avg`, `backup`, `before`, `begin`, `between`, `binary`, `bit`, `bit_length`, `blob`, `boolean`, `both`, `breadth`, `break`, `browse`, `bulk`, `by`, `call`, `called`, `cardinality`, `cascade`, `cascaded`, `case`, `cast`, `catalog`, `catch`, `char`, `char_length`, `character`, `character_length`, `check`, `checkpoint`, `class`, `clob`, `close`, `clustered`, `coalesce`, `collate`, `collation`, `collect`, `column`, `commit`, `completion`, `compute`, `condition`, `connect`, `connection`, `constraint`, `constraints`, `constructor`, `contains`, `containstable`, `continue`, `convert`, `corr`, `corresponding`, `count`, `covar_pop`, `covar_samp`, `create`, `cross`, `cube`, `cume_dist`, `current`, `current_catalog`, `current_date`, `current_default_transform_group`, `current_path`, `current_role`, `current_schema`, `current_time`, `current_timestamp`, `current_transform_group_for_type`, `current_user`, `cursor`, `cycle`, `data`, `database`, `date`, `day`, `dbcc`, `deallocate`, `dec`, `decimal`, `declare`, `default`, `deferrable`, `deferred`, `delete`, `deny`, `depth`, `deref`, `desc`, `describe`, `descriptor`, `destroy`, `destructor`, `deterministic`, `diagnostics`, `dictionary`, `disconnect`, `disk`, `distinct`, `distributed`, `domain`, `double`, `drop`, `dump`, `dynamic`, `each`, `element`, `else`, `end`, `end-exec`, `equals`, `errlvl`, `escape`, `every`, `except`, `exception`, `exec`, `execute`, `exists`, `exit`, `external`, `extract`, `false`, `fetch`, `file`, `fillfactor`, `filter`, `first`, `float`, `for`, `foreign`, `fortran`, `found`, `free`, `freetext`, `freetexttable`, `from`, `full`, `fulltexttable`, `function`, `fusion`, `general`, `get`, `global`, `go`, `goto`, `grant`, `group`, `grouping`, `having`, `hold`, `holdlock`, `host`, `hour`, `identity`, `identity_insert`, `identitycol`, `if`, `ignore`, `immediate`, `in`, `include`, `index`, `indicator`, `initialize`, `initially`, `inner`, `inout`, `input`, `insensitive`, `insert`, `int`, `integer`, `intersect`, `intersection`, `interval`, `into`, `is`, `isolation`, `iterate`, `join`, `key`, `kill`, `language`, `large`, `last`, `lateral`, `leading`, `left`, `less`, `level`, `like`, `like_regex`, `limit`, `lineno`, `ln`, `load`, `local`, `localtime`, `localtimestamp`, `locator`, `lower`, `map`, `match`, `max`, `member`, `merge`, `method`, `min`, `minute`, `mod`, `modifies`, `modify`, `module`, `month`, `multiset`, `names`, `national`, `natural`, `nchar`, `nclob`, `new`, `next`, `no`, `nocheck`, `nonclustered`, `none`, `normalize`, `not`, `null`, `nullif`, `numeric`, `object`, `occurrences_regex`, `octet_length`, `of`, `off`, `offsets`, `old`, `on`, `only`, `open`, `opendatasource`, `openquery`, `openrowset`, `openxml`, `operation`, `option`, `or`, `order`, `ordinality`, `out`, `outer`, `output`, `over`, `overlaps`, `overlay`, `pad`, `parameter`, `parameters`, `partial`, `partition`, `pascal`, `path`, `percent`, `percent_rank`, `percentile_cont`, `percentile_disc`, `pivot`, `plan`, `position`, `position_regex`, `postfix`, `precision`, `prefix`, `preorder`, `prepare`, `preserve`, `primary`, `print`, `prior`, `privileges`, `proc`, `procedure`, `public`, `raiserror`, `range`, `read`, `reads`, `readtext`, `real`, `reconfigure`, `recursive`, `ref`, `references`, `referencing`, `regr_avgx`, `regr_avgy`, `regr_count`, `regr_intercept`, `regr_r2`, `regr_slope`, `regr_sxx`, `regr_sxy`, `regr_syy`, `relative`, `release`, `replication`, `restore`, `restrict`, `result`, `return`, `returns`, `revert`, `revoke`, `right`, `role`, `rollback`, `rollup`, `routine`, `row`, `rowcount`, `rowguidcol`, `rows`, `rule`, `save`, `savepoint`, `schema`, `scope`, `scroll`, `search`, `second`, `section`, `securityaudit`, `select`, `semantickeyphrasetable`, `semanticsimilaritydetailstable`, `semanticsimilaritytable`, `sensitive`, `sequence`, `session`, `session_user`, `set`, `sets`, `setuser`, `shutdown`, `similar`, `size`, `smallint`, `some`, `space`, `specific`, `specifictype`, `sql`, `sqlca`, `sqlcode`, `sqlerror`, `sqlexception`, `sqlstate`, `sqlwarning`, `start`, `state`, `statement`, `static`, `statistics`, `stddev_pop`, `stddev_samp`, `structure`, `submultiset`, `substring`, `substring_regex`, `sum`, `symmetric`, `system`, `system_user`, `table`, `tablesample`, `temporary`, `terminate`, `textsize`, `than`, `then`, `throw`, `time`, `timestamp`, `timezone_hour`, `timezone_minute`, `to`, `top`, `trailing`, `tran`, `transaction`, `translate`, `translate_regex`, `translation`, `treat`, `trigger`, `trim`, `true`, `truncate`, `try`, `try_convert`, `tsequal`, `uescape`, `under`, `union`, `unique`, `unknown`, `unnest`, `unpivot`, `update`, `updatetext`, `upper`, `usage`, `use`, `user`, `using`, `value`, `values`, `var_pop`, `var_samp`, `varchar`, `variable`, `varying`, `view`, `waitfor`, `when`, `whenever`, `where`, `while`, `width_bucket`, `window`, `with`, `within`, `without`, `work`, `write`, `writetext`, `xmlagg`, `xmlattributes`, `xmlbinary`, `xmlcast`, `xmlcomment`, `xmlconcat`, `xmldocument`, `xmlelement`, `xmlexists`, `xmlforest`, `xmliterate`, `xmlnamespaces`, `xmlparse`, `xmlpi`, `xmlquery`, `xmlserialize`, `xmltable`, `xmltext`, `xmlvalidate`, `year`, `zone`), Keyword, nil}, + {`(\[)([^]]+)(\])`, ByGroups(Operator, Name, Operator), nil}, + {`0x[0-9a-f]+`, LiteralNumberHex, nil}, + {`[0-9]+\.[0-9]*(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`\.[0-9]+(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil}, + {`[0-9]+e[+-]?[0-9]+`, LiteralNumberFloat, nil}, + {`[0-9]+`, LiteralNumberInteger, nil}, + {`[;(),.]`, Punctuation, nil}, + {`@@\w+`, NameBuiltin, nil}, + {`@\w+`, NameVariable, nil}, + {`(\w+)(:)`, ByGroups(NameLabel, Punctuation), nil}, + {`#?#?\w+`, Name, nil}, + {`\?`, NameVariableMagic, nil}, + }, + "multiline-comments": { + {`/\*`, CommentMultiline, Push("multiline-comments")}, + {`\*/`, CommentMultiline, Pop(1)}, + {`[^/*]+`, CommentMultiline, nil}, + {`[/*]`, CommentMultiline, nil}, + }, + "string": { + {`[^']+`, LiteralStringSingle, nil}, + {`''`, LiteralStringSingle, nil}, + {`'`, LiteralStringSingle, Pop(1)}, + }, + "quoted-ident": { + {`[^"]+`, LiteralStringName, nil}, + {`""`, LiteralStringName, nil}, + {`"`, LiteralStringName, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go b/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go new file mode 100644 index 00000000..71d3c47b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/turtle.go @@ -0,0 +1,67 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Turtle lexer. +var Turtle = internal.Register(MustNewLexer( + &Config{ + Name: "Turtle", + Aliases: []string{"turtle"}, + Filenames: []string{"*.ttl"}, + MimeTypes: []string{"text/turtle", "application/x-turtle"}, + NotMultiline: true, + CaseInsensitive: true, + }, + Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {"(@base|BASE)(\\s+)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)(\\s*)(\\.?)", ByGroups(Keyword, TextWhitespace, NameVariable, TextWhitespace, Punctuation), nil}, + {"(@prefix|PREFIX)(\\s+)((?:[a-z][\\w-]*)?\\:)(\\s+)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)(\\s*)(\\.?)", ByGroups(Keyword, TextWhitespace, NameNamespace, TextWhitespace, NameVariable, TextWhitespace, Punctuation), nil}, + {`(?<=\s)a(?=\s)`, KeywordType, nil}, + {"(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)", NameVariable, nil}, + {`((?:[a-z][\w-]*)?\:)([a-z][\w-]*)`, ByGroups(NameNamespace, NameTag), nil}, + {`#[^\n]+`, Comment, nil}, + {`\b(true|false)\b`, Literal, nil}, + {`[+\-]?\d*\.\d+`, LiteralNumberFloat, nil}, + {`[+\-]?\d*(:?\.\d+)?E[+\-]?\d+`, LiteralNumberFloat, nil}, + {`[+\-]?\d+`, LiteralNumberInteger, nil}, + {`[\[\](){}.;,:^]`, Punctuation, nil}, + {`"""`, LiteralString, Push("triple-double-quoted-string")}, + {`"`, LiteralString, Push("single-double-quoted-string")}, + {`'''`, LiteralString, Push("triple-single-quoted-string")}, + {`'`, LiteralString, Push("single-single-quoted-string")}, + }, + "triple-double-quoted-string": { + {`"""`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-double-quoted-string": { + {`"`, LiteralString, Push("end-of-string")}, + {`[^"\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "triple-single-quoted-string": { + {`'''`, LiteralString, Push("end-of-string")}, + {`[^\\]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "single-single-quoted-string": { + {`'`, LiteralString, Push("end-of-string")}, + {`[^'\\\n]+`, LiteralString, nil}, + {`\\`, LiteralString, Push("string-escape")}, + }, + "string-escape": { + {`.`, LiteralString, Pop(1)}, + }, + "end-of-string": { + {`(@)([a-z]+(:?-[a-z0-9]+)*)`, ByGroups(Operator, GenericEmph, GenericEmph), Pop(2)}, + {"(\\^\\^)(<[^<>\"{}|^`\\\\\\x00-\\x20]*>)", ByGroups(Operator, GenericEmph), Pop(2)}, + {`(\^\^)((?:[a-z][\w-]*)?\:)([a-z][\w-]*)`, ByGroups(Operator, GenericEmph, GenericEmph), Pop(2)}, + Default(Pop(2)), + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/twig.go b/vendor/github.com/alecthomas/chroma/lexers/t/twig.go new file mode 100644 index 00000000..56aa9b9a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/twig.go @@ -0,0 +1,54 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Twig lexer. +var Twig = internal.Register(MustNewLexer( + &Config{ + Name: "Twig", + Aliases: []string{"twig"}, + Filenames: []string{}, + MimeTypes: []string{"application/x-twig"}, + DotAll: true, + }, + Rules{ + "root": { + {`[^{]+`, Other, nil}, + {`\{\{`, CommentPreproc, Push("var")}, + {`\{\#.*?\#\}`, Comment, nil}, + {`(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Other, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})`, ByGroups(CommentPreproc, Text, Keyword, Text, CommentPreproc, Other, CommentPreproc, Text, Keyword, Text, CommentPreproc), nil}, + {`(\{%)(-?\s*)(filter)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(CommentPreproc, Text, Keyword, Text, NameFunction), Push("tag")}, + {`(\{%)(-?\s*)([a-zA-Z_]\w*)`, ByGroups(CommentPreproc, Text, Keyword), Push("tag")}, + {`\{`, Other, nil}, + }, + "varnames": { + {`(\|)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameFunction), nil}, + {`(is)(\s+)(not)?(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, Keyword, Text, NameFunction), nil}, + {`(?i)(true|false|none|null)\b`, KeywordPseudo, nil}, + {`(in|not|and|b-and|or|b-or|b-xor|isif|elseif|else|importconstant|defined|divisibleby|empty|even|iterable|odd|sameasmatches|starts\s+with|ends\s+with)\b`, Keyword, nil}, + {`(loop|block|parent)\b`, NameBuiltin, nil}, + {`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*`, NameVariable, nil}, + {`\.(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*`, NameVariable, nil}, + {`\.[0-9]+`, LiteralNumber, nil}, + {`:?"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil}, + {`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil}, + {`([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)`, Operator, nil}, + {`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil}, + }, + "var": { + {`\s+`, Text, nil}, + {`(-?)(\}\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + }, + "tag": { + {`\s+`, Text, nil}, + {`(-?)(%\})`, ByGroups(Text, CommentPreproc), Pop(1)}, + Include("varnames"), + {`.`, Punctuation, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go b/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go new file mode 100644 index 00000000..07bae024 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/t/typescript.go @@ -0,0 +1,72 @@ +package t + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// TypeScript lexer. +var TypeScript = internal.Register(MustNewLexer( + &Config{ + Name: "TypeScript", + Aliases: []string{"ts", "typescript"}, + Filenames: []string{"*.ts", "*.tsx"}, + MimeTypes: []string{"text/x-typescript"}, + DotAll: true, + }, + Rules{ + "commentsandwhitespace": { + {`\s+`, Text, nil}, + {``, Comment, Pop(1)}, + {`-`, Comment, nil}, + }, + "tag": { + {`\s+`, Text, nil}, + {`[\w.:-]+\s*=`, NameAttribute, Push("attr")}, + {`/?\s*>`, NameTag, Pop(1)}, + }, + "attr": { + {`\s+`, Text, nil}, + {`".*?"`, LiteralString, Pop(1)}, + {`'.*?'`, LiteralString, Pop(1)}, + {`[^\s>]+`, LiteralString, Pop(1)}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go b/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go new file mode 100644 index 00000000..6e6cbec6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/x/xorg.go @@ -0,0 +1,25 @@ +package x + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +// Xorg lexer. +var Xorg = internal.Register(MustNewLexer( + &Config{ + Name: "Xorg", + Aliases: []string{"xorg.conf"}, + Filenames: []string{"xorg.conf"}, + MimeTypes: []string{}, + }, + Rules{ + "root": { + {`\s+`, TextWhitespace, nil}, + {`#.*$`, Comment, nil}, + {`((|Sub)Section)(\s+)("\w+")`, ByGroups(KeywordNamespace, LiteralStringEscape, TextWhitespace, LiteralStringEscape), nil}, + {`(End(|Sub)Section)`, KeywordNamespace, nil}, + {`(\w+)(\s+)([^\n#]+)`, ByGroups(NameKeyword, TextWhitespace, LiteralString), nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go b/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go new file mode 100644 index 00000000..0d73bace --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/lexers/y/yaml.go @@ -0,0 +1,40 @@ +package y + +import ( + . "github.com/alecthomas/chroma" // nolint + "github.com/alecthomas/chroma/lexers/internal" +) + +var YAML = internal.Register(MustNewLexer( + &Config{ + Name: "YAML", + Aliases: []string{"yaml"}, + Filenames: []string{"*.yaml", "*.yml"}, + MimeTypes: []string{"text/x-yaml"}, + }, + Rules{ + "root": { + Include("whitespace"), + {`#.*`, Comment, nil}, + {`!![^\s]+`, CommentPreproc, nil}, + {`&[^\s]+`, CommentPreproc, nil}, + {`\*[^\s]+`, CommentPreproc, nil}, + {`^%include\s+[^\n\r]+`, CommentPreproc, nil}, + {`([>|])(\s+)((?:(?:.*?$)(?:[\n\r]*?\2)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil}, + Include("value"), + {`[?:,\[\]]`, Punctuation, nil}, + {`.`, Text, nil}, + }, + "value": { + {Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil}, + {`"(?:\\.|[^"])*"`, StringDouble, nil}, + {`'(?:\\.|[^'])*'`, StringSingle, nil}, + {`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil}, + {`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil}, + {`\b[\w]+\b`, Text, nil}, + }, + "whitespace": { + {`\s+`, Whitespace, nil}, + }, + }, +)) diff --git a/vendor/github.com/alecthomas/chroma/mutators.go b/vendor/github.com/alecthomas/chroma/mutators.go new file mode 100644 index 00000000..30f1eedb --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/mutators.go @@ -0,0 +1,131 @@ +package chroma + +import ( + "fmt" + "strings" +) + +// A Mutator modifies the behaviour of the lexer. +type Mutator interface { + // Mutate the lexer state machine as it is processing. + Mutate(state *LexerState) error +} + +// A LexerMutator is an additional interface that a Mutator can implement +// to modify the lexer when it is compiled. +type LexerMutator interface { + // Rules are the lexer rules, state is the state key for the rule the mutator is associated with. + MutateLexer(rules CompiledRules, state string, rule int) error +} + +// A MutatorFunc is a Mutator that mutates the lexer state machine as it is processing. +type MutatorFunc func(state *LexerState) error + +func (m MutatorFunc) Mutate(state *LexerState) error { return m(state) } + +// Mutators applies a set of Mutators in order. +func Mutators(modifiers ...Mutator) MutatorFunc { + return func(state *LexerState) error { + for _, modifier := range modifiers { + if err := modifier.Mutate(state); err != nil { + return err + } + } + return nil + } +} + +type includeMutator struct { + state string +} + +// Include the given state. +func Include(state string) Rule { + return Rule{Mutator: &includeMutator{state}} +} + +func (i *includeMutator) Mutate(s *LexerState) error { + return fmt.Errorf("should never reach here Include(%q)", i.state) +} + +func (i *includeMutator) MutateLexer(rules CompiledRules, state string, rule int) error { + includedRules, ok := rules[i.state] + if !ok { + return fmt.Errorf("invalid include state %q", i.state) + } + rules[state] = append(rules[state][:rule], append(includedRules, rules[state][rule+1:]...)...) + return nil +} + +type combinedMutator struct { + states []string +} + +// Combined creates a new anonymous state from the given states, and pushes that state. +func Combined(states ...string) Mutator { + return &combinedMutator{states} +} + +func (c *combinedMutator) Mutate(s *LexerState) error { + return fmt.Errorf("should never reach here Combined(%v)", c.states) +} + +func (c *combinedMutator) MutateLexer(rules CompiledRules, state string, rule int) error { + name := "__combined_" + strings.Join(c.states, "__") + if _, ok := rules[name]; !ok { + combined := []*CompiledRule{} + for _, state := range c.states { + rules, ok := rules[state] + if !ok { + return fmt.Errorf("invalid combine state %q", state) + } + combined = append(combined, rules...) + } + rules[name] = combined + } + rules[state][rule].Mutator = Push(name) + return nil +} + +// Push states onto the stack. +func Push(states ...string) MutatorFunc { + return func(s *LexerState) error { + if len(states) == 0 { + s.Stack = append(s.Stack, s.State) + } else { + for _, state := range states { + if state == "#pop" { + s.Stack = s.Stack[:len(s.Stack)-1] + } else { + s.Stack = append(s.Stack, state) + } + } + } + return nil + } +} + +// Pop state from the stack when rule matches. +func Pop(n int) MutatorFunc { + return func(state *LexerState) error { + if len(state.Stack) == 0 { + return fmt.Errorf("nothing to pop") + } + state.Stack = state.Stack[:len(state.Stack)-n] + return nil + } +} + +// Default returns a Rule that applies a set of Mutators. +func Default(mutators ...Mutator) Rule { + return Rule{Mutator: Mutators(mutators...)} +} + +// Stringify returns the raw string for a set of tokens. +func Stringify(tokens ...*Token) string { + out := []string{} + for _, t := range tokens { + out = append(out, t.Value) + } + return strings.Join(out, "") +} diff --git a/vendor/github.com/alecthomas/chroma/pygments-lexers.txt b/vendor/github.com/alecthomas/chroma/pygments-lexers.txt new file mode 100644 index 00000000..9c9ced2e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/pygments-lexers.txt @@ -0,0 +1,325 @@ +Generated with: + + g 'class.*RegexLexer' | pawk --strict -F: '"pygments.lexers.%s.%s" % (f[0].split(".")[0], f[2].split()[1].split("(")[0])' > lexers.txt + +perl6: + Requires a bunch of helpers that I do not have the time to convert. + +kotlin: + invalid unicode escape sequences + FIXED: Have to disable wide Unicode characters in unistring.py + +pygments.lexers.ambient.AmbientTalkLexer +pygments.lexers.ampl.AmplLexer +pygments.lexers.actionscript.ActionScriptLexer +pygments.lexers.actionscript.ActionScript3Lexer +pygments.lexers.actionscript.MxmlLexer +pygments.lexers.algebra.GAPLexer +pygments.lexers.algebra.MathematicaLexer +pygments.lexers.algebra.MuPADLexer +pygments.lexers.algebra.BCLexer +pygments.lexers.apl.APLLexer +pygments.lexers.bibtex.BibTeXLexer +pygments.lexers.bibtex.BSTLexer +pygments.lexers.basic.BlitzMaxLexer +pygments.lexers.basic.BlitzBasicLexer +pygments.lexers.basic.MonkeyLexer +pygments.lexers.basic.CbmBasicV2Lexer +pygments.lexers.basic.QBasicLexer +pygments.lexers.automation.AutohotkeyLexer +pygments.lexers.automation.AutoItLexer +pygments.lexers.archetype.AtomsLexer +pygments.lexers.c_like.ClayLexer +pygments.lexers.c_like.ValaLexer +pygments.lexers.asm.GasLexer +pygments.lexers.asm.ObjdumpLexer +pygments.lexers.asm.HsailLexer +pygments.lexers.asm.LlvmLexer +pygments.lexers.asm.NasmLexer +pygments.lexers.asm.TasmLexer +pygments.lexers.asm.Ca65Lexer +pygments.lexers.business.CobolLexer +pygments.lexers.business.ABAPLexer +pygments.lexers.business.OpenEdgeLexer +pygments.lexers.business.GoodDataCLLexer +pygments.lexers.business.MaqlLexer +pygments.lexers.capnproto.CapnProtoLexer +pygments.lexers.chapel.ChapelLexer +pygments.lexers.clean.CleanLexer +pygments.lexers.c_cpp.CFamilyLexer +pygments.lexers.console.VCTreeStatusLexer +pygments.lexers.console.PyPyLogLexer +pygments.lexers.csound.CsoundLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.csound.CsoundDocumentLexer +pygments.lexers.crystal.CrystalLexer +pygments.lexers.dalvik.SmaliLexer +pygments.lexers.css.CssLexer +pygments.lexers.css.SassLexer +pygments.lexers.css.ScssLexer +pygments.lexers.configs.IniLexer +pygments.lexers.configs.RegeditLexer +pygments.lexers.configs.PropertiesLexer +pygments.lexers.configs.KconfigLexer +pygments.lexers.configs.Cfengine3Lexer +pygments.lexers.configs.ApacheConfLexer +pygments.lexers.configs.SquidConfLexer +pygments.lexers.configs.NginxConfLexer +pygments.lexers.configs.LighttpdConfLexer +pygments.lexers.configs.DockerLexer +pygments.lexers.configs.TerraformLexer +pygments.lexers.configs.TermcapLexer +pygments.lexers.configs.TerminfoLexer +pygments.lexers.configs.PkgConfigLexer +pygments.lexers.configs.PacmanConfLexer +pygments.lexers.data.YamlLexer +pygments.lexers.data.JsonLexer +pygments.lexers.diff.DiffLexer +pygments.lexers.diff.DarcsPatchLexer +pygments.lexers.diff.WDiffLexer +pygments.lexers.dotnet.CSharpLexer +pygments.lexers.dotnet.NemerleLexer +pygments.lexers.dotnet.BooLexer +pygments.lexers.dotnet.VbNetLexer +pygments.lexers.dotnet.GenericAspxLexer +pygments.lexers.dotnet.FSharpLexer +pygments.lexers.dylan.DylanLexer +pygments.lexers.dylan.DylanLidLexer +pygments.lexers.ecl.ECLLexer +pygments.lexers.eiffel.EiffelLexer +pygments.lexers.dsls.ProtoBufLexer +pygments.lexers.dsls.ThriftLexer +pygments.lexers.dsls.BroLexer +pygments.lexers.dsls.PuppetLexer +pygments.lexers.dsls.RslLexer +pygments.lexers.dsls.MscgenLexer +pygments.lexers.dsls.VGLLexer +pygments.lexers.dsls.AlloyLexer +pygments.lexers.dsls.PanLexer +pygments.lexers.dsls.CrmshLexer +pygments.lexers.dsls.FlatlineLexer +pygments.lexers.dsls.SnowballLexer +pygments.lexers.elm.ElmLexer +pygments.lexers.erlang.ErlangLexer +pygments.lexers.erlang.ElixirLexer +pygments.lexers.ezhil.EzhilLexer +pygments.lexers.esoteric.BrainfuckLexer +pygments.lexers.esoteric.BefungeLexer +pygments.lexers.esoteric.CAmkESLexer +pygments.lexers.esoteric.CapDLLexer +pygments.lexers.esoteric.RedcodeLexer +pygments.lexers.esoteric.AheuiLexer +pygments.lexers.factor.FactorLexer +pygments.lexers.fantom.FantomLexer +pygments.lexers.felix.FelixLexer +pygments.lexers.forth.ForthLexer +pygments.lexers.fortran.FortranLexer +pygments.lexers.fortran.FortranFixedLexer +pygments.lexers.go.GoLexer +pygments.lexers.foxpro.FoxProLexer +pygments.lexers.graph.CypherLexer +pygments.lexers.grammar_notation.BnfLexer +pygments.lexers.grammar_notation.AbnfLexer +pygments.lexers.grammar_notation.JsgfLexer +pygments.lexers.graphics.GLShaderLexer +pygments.lexers.graphics.PostScriptLexer +pygments.lexers.graphics.AsymptoteLexer +pygments.lexers.graphics.GnuplotLexer +pygments.lexers.graphics.PovrayLexer +pygments.lexers.hexdump.HexdumpLexer +pygments.lexers.haskell.HaskellLexer +pygments.lexers.haskell.IdrisLexer +pygments.lexers.haskell.AgdaLexer +pygments.lexers.haskell.CryptolLexer +pygments.lexers.haskell.KokaLexer +pygments.lexers.haxe.HaxeLexer +pygments.lexers.haxe.HxmlLexer +pygments.lexers.hdl.VerilogLexer +pygments.lexers.hdl.SystemVerilogLexer +pygments.lexers.hdl.VhdlLexer +pygments.lexers.idl.IDLLexer +pygments.lexers.inferno.LimboLexer +pygments.lexers.igor.IgorLexer +pygments.lexers.html.HtmlLexer +pygments.lexers.html.DtdLexer +pygments.lexers.html.XmlLexer +pygments.lexers.html.HamlLexer +pygments.lexers.html.ScamlLexer +pygments.lexers.html.PugLexer +pygments.lexers.installers.NSISLexer +pygments.lexers.installers.RPMSpecLexer +pygments.lexers.installers.SourcesListLexer +pygments.lexers.installers.DebianControlLexer +pygments.lexers.iolang.IoLexer +pygments.lexers.julia.JuliaLexer +pygments.lexers.int_fiction.Inform6Lexer +pygments.lexers.int_fiction.Inform7Lexer +pygments.lexers.int_fiction.Tads3Lexer +pygments.lexers.make.BaseMakefileLexer +pygments.lexers.make.CMakeLexer +pygments.lexers.javascript.JavascriptLexer +pygments.lexers.javascript.KalLexer +pygments.lexers.javascript.LiveScriptLexer +pygments.lexers.javascript.DartLexer +pygments.lexers.javascript.TypeScriptLexer +pygments.lexers.javascript.LassoLexer +pygments.lexers.javascript.ObjectiveJLexer +pygments.lexers.javascript.CoffeeScriptLexer +pygments.lexers.javascript.MaskLexer +pygments.lexers.javascript.EarlGreyLexer +pygments.lexers.javascript.JuttleLexer +pygments.lexers.jvm.JavaLexer +pygments.lexers.jvm.ScalaLexer +pygments.lexers.jvm.GosuLexer +pygments.lexers.jvm.GroovyLexer +pygments.lexers.jvm.IokeLexer +pygments.lexers.jvm.ClojureLexer +pygments.lexers.jvm.TeaLangLexer +pygments.lexers.jvm.CeylonLexer +pygments.lexers.jvm.KotlinLexer +pygments.lexers.jvm.XtendLexer +pygments.lexers.jvm.PigLexer +pygments.lexers.jvm.GoloLexer +pygments.lexers.jvm.JasminLexer +pygments.lexers.markup.BBCodeLexer +pygments.lexers.markup.MoinWikiLexer +pygments.lexers.markup.RstLexer +pygments.lexers.markup.TexLexer +pygments.lexers.markup.GroffLexer +pygments.lexers.markup.MozPreprocHashLexer +pygments.lexers.markup.MarkdownLexer +pygments.lexers.ml.SMLLexer +pygments.lexers.ml.OcamlLexer +pygments.lexers.ml.OpaLexer +pygments.lexers.modeling.ModelicaLexer +pygments.lexers.modeling.BugsLexer +pygments.lexers.modeling.JagsLexer +pygments.lexers.modeling.StanLexer +pygments.lexers.matlab.MatlabLexer +pygments.lexers.matlab.OctaveLexer +pygments.lexers.matlab.ScilabLexer +pygments.lexers.monte.MonteLexer +pygments.lexers.lisp.SchemeLexer +pygments.lexers.lisp.CommonLispLexer +pygments.lexers.lisp.HyLexer +pygments.lexers.lisp.RacketLexer +pygments.lexers.lisp.NewLispLexer +pygments.lexers.lisp.EmacsLispLexer +pygments.lexers.lisp.ShenLexer +pygments.lexers.lisp.XtlangLexer +pygments.lexers.modula2.Modula2Lexer +pygments.lexers.ncl.NCLLexer +pygments.lexers.nim.NimLexer +pygments.lexers.nit.NitLexer +pygments.lexers.nix.NixLexer +pygments.lexers.oberon.ComponentPascalLexer +pygments.lexers.ooc.OocLexer +pygments.lexers.objective.SwiftLexer +pygments.lexers.parasail.ParaSailLexer +pygments.lexers.pawn.SourcePawnLexer +pygments.lexers.pawn.PawnLexer +pygments.lexers.pascal.AdaLexer +pygments.lexers.parsers.RagelLexer +pygments.lexers.parsers.RagelEmbeddedLexer +pygments.lexers.parsers.AntlrLexer +pygments.lexers.parsers.TreetopBaseLexer +pygments.lexers.parsers.EbnfLexer +pygments.lexers.php.ZephirLexer +pygments.lexers.php.PhpLexer +pygments.lexers.perl.PerlLexer +pygments.lexers.perl.Perl6Lexer +pygments.lexers.praat.PraatLexer +pygments.lexers.prolog.PrologLexer +pygments.lexers.prolog.LogtalkLexer +pygments.lexers.qvt.QVToLexer +pygments.lexers.rdf.SparqlLexer +pygments.lexers.rdf.TurtleLexer +pygments.lexers.python.PythonLexer +pygments.lexers.python.Python3Lexer +pygments.lexers.python.PythonTracebackLexer +pygments.lexers.python.Python3TracebackLexer +pygments.lexers.python.CythonLexer +pygments.lexers.python.DgLexer +pygments.lexers.rebol.RebolLexer +pygments.lexers.rebol.RedLexer +pygments.lexers.resource.ResourceLexer +pygments.lexers.rnc.RNCCompactLexer +pygments.lexers.roboconf.RoboconfGraphLexer +pygments.lexers.roboconf.RoboconfInstancesLexer +pygments.lexers.rust.RustLexer +pygments.lexers.ruby.RubyLexer +pygments.lexers.ruby.FancyLexer +pygments.lexers.sas.SASLexer +pygments.lexers.smalltalk.SmalltalkLexer +pygments.lexers.smalltalk.NewspeakLexer +pygments.lexers.smv.NuSMVLexer +pygments.lexers.shell.BashLexer +pygments.lexers.shell.BatchLexer +pygments.lexers.shell.TcshLexer +pygments.lexers.shell.PowerShellLexer +pygments.lexers.shell.FishShellLexer +pygments.lexers.snobol.SnobolLexer +pygments.lexers.scripting.LuaLexer +pygments.lexers.scripting.ChaiscriptLexer +pygments.lexers.scripting.LSLLexer +pygments.lexers.scripting.AppleScriptLexer +pygments.lexers.scripting.RexxLexer +pygments.lexers.scripting.MOOCodeLexer +pygments.lexers.scripting.HybrisLexer +pygments.lexers.scripting.EasytrieveLexer +pygments.lexers.scripting.JclLexer +pygments.lexers.supercollider.SuperColliderLexer +pygments.lexers.stata.StataLexer +pygments.lexers.tcl.TclLexer +pygments.lexers.sql.PostgresLexer +pygments.lexers.sql.PlPgsqlLexer +pygments.lexers.sql.PsqlRegexLexer +pygments.lexers.sql.SqlLexer +pygments.lexers.sql.TransactSqlLexer +pygments.lexers.sql.MySqlLexer +pygments.lexers.sql.RqlLexer +pygments.lexers.testing.GherkinLexer +pygments.lexers.testing.TAPLexer +pygments.lexers.textedit.AwkLexer +pygments.lexers.textedit.VimLexer +pygments.lexers.textfmts.IrcLogsLexer +pygments.lexers.textfmts.GettextLexer +pygments.lexers.textfmts.HttpLexer +pygments.lexers.textfmts.TodotxtLexer +pygments.lexers.trafficscript.RtsLexer +pygments.lexers.theorem.CoqLexer +pygments.lexers.theorem.IsabelleLexer +pygments.lexers.theorem.LeanLexer +pygments.lexers.templates.SmartyLexer +pygments.lexers.templates.VelocityLexer +pygments.lexers.templates.DjangoLexer +pygments.lexers.templates.MyghtyLexer +pygments.lexers.templates.MasonLexer +pygments.lexers.templates.MakoLexer +pygments.lexers.templates.CheetahLexer +pygments.lexers.templates.GenshiTextLexer +pygments.lexers.templates.GenshiMarkupLexer +pygments.lexers.templates.JspRootLexer +pygments.lexers.templates.EvoqueLexer +pygments.lexers.templates.ColdfusionLexer +pygments.lexers.templates.ColdfusionMarkupLexer +pygments.lexers.templates.TeaTemplateRootLexer +pygments.lexers.templates.HandlebarsLexer +pygments.lexers.templates.LiquidLexer +pygments.lexers.templates.TwigLexer +pygments.lexers.templates.Angular2Lexer +pygments.lexers.urbi.UrbiscriptLexer +pygments.lexers.typoscript.TypoScriptCssDataLexer +pygments.lexers.typoscript.TypoScriptHtmlDataLexer +pygments.lexers.typoscript.TypoScriptLexer +pygments.lexers.varnish.VCLLexer +pygments.lexers.verification.BoogieLexer +pygments.lexers.verification.SilverLexer +pygments.lexers.x10.X10Lexer +pygments.lexers.whiley.WhileyLexer +pygments.lexers.xorg.XorgLexer +pygments.lexers.webmisc.DuelLexer +pygments.lexers.webmisc.XQueryLexer +pygments.lexers.webmisc.QmlLexer +pygments.lexers.webmisc.CirruLexer +pygments.lexers.webmisc.SlimLexer diff --git a/vendor/github.com/alecthomas/chroma/regexp.go b/vendor/github.com/alecthomas/chroma/regexp.go new file mode 100644 index 00000000..7c0586b6 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/regexp.go @@ -0,0 +1,409 @@ +package chroma + +import ( + "fmt" + "os" + "regexp" + "strings" + "sync" + "unicode/utf8" + + "github.com/dlclark/regexp2" +) + +type Rule struct { + Pattern string + Type Emitter + Mutator Mutator +} + +// An Emitter takes group matches and returns tokens. +type Emitter interface { + // Emit tokens for the given regex groups. + Emit(groups []string, lexer Lexer) Iterator +} + +// EmitterFunc is a function that is an Emitter. +type EmitterFunc func(groups []string, lexer Lexer) Iterator + +// Emit tokens for groups. +func (e EmitterFunc) Emit(groups []string, lexer Lexer) Iterator { return e(groups, lexer) } + +// ByGroups emits a token for each matching group in the rule's regex. +func ByGroups(emitters ...Emitter) Emitter { + return EmitterFunc(func(groups []string, lexer Lexer) Iterator { + iterators := make([]Iterator, 0, len(groups)-1) + // NOTE: If this panics, there is a mismatch with groups + for i, group := range groups[1:] { + iterators = append(iterators, emitters[i].Emit([]string{group}, lexer)) + } + return Concaterator(iterators...) + }) +} + +// UsingByGroup emits tokens for the matched groups in the regex using a +// "sublexer". Used when lexing code blocks where the name of a sublexer is +// contained within the block, for example on a Markdown text block or SQL +// language block. +// +// The sublexer will be retrieved using sublexerGetFunc (typically +// internal.Get), using the captured value from the matched sublexerNameGroup. +// +// If sublexerGetFunc returns a non-nil lexer for the captured sublexerNameGroup, +// then tokens for the matched codeGroup will be emitted using the retrieved +// lexer. Otherwise, if the sublexer is nil, then tokens will be emitted from +// the passed emitter. +// +// Example: +// +// var Markdown = internal.Register(MustNewLexer( +// &Config{ +// Name: "markdown", +// Aliases: []string{"md", "mkd"}, +// Filenames: []string{"*.md", "*.mkd", "*.markdown"}, +// MimeTypes: []string{"text/x-markdown"}, +// }, +// Rules{ +// "root": { +// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)", +// UsingByGroup( +// internal.Get, +// 2, 4, +// String, String, String, Text, String, +// ), +// nil, +// }, +// }, +// }, +// )) +// +// See the lexers/m/markdown.go for the complete example. +// +// Note: panic's if the number emitters does not equal the number of matched +// groups in the regex. +func UsingByGroup(sublexerGetFunc func(string) Lexer, sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter { + return EmitterFunc(func(groups []string, lexer Lexer) Iterator { + // bounds check + if len(emitters) != len(groups)-1 { + panic("UsingByGroup expects number of emitters to be the same as len(groups)-1") + } + + // grab sublexer + sublexer := sublexerGetFunc(groups[sublexerNameGroup]) + + // build iterators + iterators := make([]Iterator, len(groups)-1) + for i, group := range groups[1:] { + if i == codeGroup-1 && sublexer != nil { + var err error + iterators[i], err = sublexer.Tokenise(nil, groups[codeGroup]) + if err != nil { + panic(err) + } + } else { + iterators[i] = emitters[i].Emit([]string{group}, lexer) + } + } + + return Concaterator(iterators...) + }) +} + +// Using returns an Emitter that uses a given Lexer for parsing and emitting. +func Using(lexer Lexer) Emitter { + return EmitterFunc(func(groups []string, _ Lexer) Iterator { + it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +// UsingSelf is like Using, but uses the current Lexer. +func UsingSelf(state string) Emitter { + return EmitterFunc(func(groups []string, lexer Lexer) Iterator { + it, err := lexer.Tokenise(&TokeniseOptions{State: state, Nested: true}, groups[0]) + if err != nil { + panic(err) + } + return it + }) +} + +// Words creates a regex that matches any of the given literal words. +func Words(prefix, suffix string, words ...string) string { + for i, word := range words { + words[i] = regexp.QuoteMeta(word) + } + return prefix + `(` + strings.Join(words, `|`) + `)` + suffix +} + +// Tokenise text using lexer, returning tokens as a slice. +func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]*Token, error) { + out := []*Token{} + it, err := lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + for t := it(); t != nil; t = it() { + out = append(out, t) + } + return out, nil +} + +// Rules maps from state to a sequence of Rules. +type Rules map[string][]Rule + +func (r Rules) Clone() Rules { + out := map[string][]Rule{} + for key, rules := range r { + out[key] = make([]Rule, len(rules)) + copy(out[key], rules) + } + return out +} + +// MustNewLexer creates a new Lexer or panics. +func MustNewLexer(config *Config, rules Rules) *RegexLexer { + lexer, err := NewLexer(config, rules) + if err != nil { + panic(err) + } + return lexer +} + +// NewLexer creates a new regex-based Lexer. +// +// "rules" is a state machine transitition map. Each key is a state. Values are sets of rules +// that match input, optionally modify lexer state, and output tokens. +func NewLexer(config *Config, rules Rules) (*RegexLexer, error) { + if config == nil { + config = &Config{} + } + if _, ok := rules["root"]; !ok { + return nil, fmt.Errorf("no \"root\" state") + } + compiledRules := map[string][]*CompiledRule{} + for state, rules := range rules { + compiledRules[state] = nil + for _, rule := range rules { + flags := "" + if !config.NotMultiline { + flags += "m" + } + if config.CaseInsensitive { + flags += "i" + } + if config.DotAll { + flags += "s" + } + compiledRules[state] = append(compiledRules[state], &CompiledRule{Rule: rule, flags: flags}) + } + } + return &RegexLexer{ + config: config, + rules: compiledRules, + }, nil +} + +func (r *RegexLexer) Trace(trace bool) *RegexLexer { + r.trace = trace + return r +} + +// A CompiledRule is a Rule with a pre-compiled regex. +// +// Note that regular expressions are lazily compiled on first use of the lexer. +type CompiledRule struct { + Rule + Regexp *regexp2.Regexp + flags string +} + +type CompiledRules map[string][]*CompiledRule + +type LexerState struct { + Lexer *RegexLexer + Text []rune + Pos int + Rules CompiledRules + Stack []string + State string + Rule int + // Group matches. + Groups []string + // Custum context for mutators. + MutatorContext map[interface{}]interface{} + iteratorStack []Iterator +} + +func (l *LexerState) Set(key interface{}, value interface{}) { + l.MutatorContext[key] = value +} + +func (l *LexerState) Get(key interface{}) interface{} { + return l.MutatorContext[key] +} + +func (l *LexerState) Iterator() *Token { + for l.Pos < len(l.Text) && len(l.Stack) > 0 { + // Exhaust the iterator stack, if any. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == nil { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + l.State = l.Stack[len(l.Stack)-1] + if l.Lexer.trace { + fmt.Fprintf(os.Stderr, "%s: pos=%d, text=%q\n", l.State, l.Pos, string(l.Text[l.Pos:])) + } + selectedRule, ok := l.Rules[l.State] + if !ok { + panic("unknown state " + l.State) + } + ruleIndex, rule, groups := matchRules(l.Text[l.Pos:], selectedRule) + // No match. + if groups == nil { + l.Pos++ + return &Token{Error, string(l.Text[l.Pos-1 : l.Pos])} + } + l.Rule = ruleIndex + l.Groups = groups + l.Pos += utf8.RuneCountInString(groups[0]) + if rule.Mutator != nil { + if err := rule.Mutator.Mutate(l); err != nil { + panic(err) + } + } + if rule.Type != nil { + l.iteratorStack = append(l.iteratorStack, rule.Type.Emit(l.Groups, l.Lexer)) + } + } + // Exhaust the IteratorStack, if any. + // Duplicate code, but eh. + for len(l.iteratorStack) > 0 { + n := len(l.iteratorStack) - 1 + t := l.iteratorStack[n]() + if t == nil { + l.iteratorStack = l.iteratorStack[:n] + continue + } + return t + } + + // If we get to here and we still have text, return it as an error. + if l.Pos != len(l.Text) && len(l.Stack) == 0 { + value := string(l.Text[l.Pos:]) + l.Pos = len(l.Text) + return &Token{Type: Error, Value: value} + } + return nil +} + +type RegexLexer struct { + config *Config + analyser func(text string) float32 + trace bool + + mu sync.Mutex + compiled bool + rules map[string][]*CompiledRule +} + +// SetAnalyser sets the analyser function used to perform content inspection. +func (r *RegexLexer) SetAnalyser(analyser func(text string) float32) *RegexLexer { + r.analyser = analyser + return r +} + +func (r *RegexLexer) AnalyseText(text string) float32 { + if r.analyser != nil { + return r.analyser(text) + } + return 0.0 +} + +func (r *RegexLexer) Config() *Config { + return r.config +} + +// Regex compilation is deferred until the lexer is used. This is to avoid significant init() time costs. +func (r *RegexLexer) maybeCompile() (err error) { + r.mu.Lock() + defer r.mu.Unlock() + if r.compiled { + return nil + } + for state, rules := range r.rules { + for i, rule := range rules { + if rule.Regexp == nil { + rule.Regexp, err = regexp2.Compile("^(?"+rule.flags+")(?:"+rule.Pattern+")", 0) + if err != nil { + return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err) + } + } + } + } +restart: + seen := map[LexerMutator]bool{} + for state := range r.rules { + for i := 0; i < len(r.rules[state]); i++ { + rule := r.rules[state][i] + if compile, ok := rule.Mutator.(LexerMutator); ok { + if seen[compile] { + return fmt.Errorf("saw mutator %T twice; this should not happen", compile) + } + seen[compile] = true + if err := compile.MutateLexer(r.rules, state, i); err != nil { + return err + } + // Process the rules again in case the mutator added/removed rules. + // + // This sounds bad, but shouldn't be significant in practice. + goto restart + } + } + } + r.compiled = true + return nil +} + +func (r *RegexLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + if err := r.maybeCompile(); err != nil { + return nil, err + } + if options == nil { + options = defaultOptions + } + if !options.Nested && r.config.EnsureNL && !strings.HasSuffix(text, "\n") { + text += "\n" + } + state := &LexerState{ + Lexer: r, + Text: []rune(text), + Stack: []string{options.State}, + Rules: r.rules, + MutatorContext: map[interface{}]interface{}{}, + } + return state.Iterator, nil +} + +func matchRules(text []rune, rules []*CompiledRule) (int, *CompiledRule, []string) { + for i, rule := range rules { + match, err := rule.Regexp.FindRunesMatch(text) + if match != nil && err == nil { + groups := []string{} + for _, g := range match.Groups() { + groups = append(groups, g.String()) + } + return i, rule, groups + } + } + return 0, &CompiledRule{}, nil +} diff --git a/vendor/github.com/alecthomas/chroma/remap.go b/vendor/github.com/alecthomas/chroma/remap.go new file mode 100644 index 00000000..223b5c60 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/remap.go @@ -0,0 +1,80 @@ +package chroma + +type remappingLexer struct { + lexer Lexer + mapper func(*Token) []*Token +} + +// RemappingLexer remaps a token to a set of, potentially empty, tokens. +func RemappingLexer(lexer Lexer, mapper func(*Token) []*Token) Lexer { + return &remappingLexer{lexer, mapper} +} + +func (r *remappingLexer) Config() *Config { + return r.lexer.Config() +} + +func (r *remappingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { + it, err := r.lexer.Tokenise(options, text) + if err != nil { + return nil, err + } + buffer := []*Token{} + return func() *Token { + for { + if len(buffer) > 0 { + t := buffer[0] + buffer = buffer[1:] + return t + } + t := it() + if t == nil { + return t + } + buffer = r.mapper(t) + } + }, nil +} + +type TypeMapping []struct { + From, To TokenType + Words []string +} + +// TypeRemappingLexer remaps types of tokens coming from a parent Lexer. +// +// eg. Map "defvaralias" tokens of type NameVariable to NameFunction: +// +// mapping := TypeMapping{ +// {NameVariable, NameFunction, []string{"defvaralias"}, +// } +// lexer = TypeRemappingLexer(lexer, mapping) +func TypeRemappingLexer(lexer Lexer, mapping TypeMapping) Lexer { + // Lookup table for fast remapping. + lut := map[TokenType]map[string]TokenType{} + for _, rt := range mapping { + km, ok := lut[rt.From] + if !ok { + km = map[string]TokenType{} + lut[rt.From] = km + } + if len(rt.Words) == 0 { + km[""] = rt.To + } else { + for _, k := range rt.Words { + km[k] = rt.To + } + } + + } + return RemappingLexer(lexer, func(t *Token) []*Token { + if k, ok := lut[t.Type]; ok { + if tt, ok := k[t.Value]; ok { + t.Type = tt + } else if tt, ok := k[""]; ok { + t.Type = tt + } + } + return []*Token{t} + }) +} diff --git a/vendor/github.com/alecthomas/chroma/style.go b/vendor/github.com/alecthomas/chroma/style.go new file mode 100644 index 00000000..c768293b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/style.go @@ -0,0 +1,315 @@ +package chroma + +import ( + "fmt" + "strings" +) + +// Trilean value for StyleEntry value inheritance. +type Trilean uint8 + +const ( + Pass Trilean = iota + Yes + No +) + +func (t Trilean) String() string { + switch t { + case Yes: + return "Yes" + case No: + return "No" + default: + return "Pass" + } +} + +func (t Trilean) Prefix(s string) string { + if t == Yes { + return s + } else if t == No { + return "no" + s + } + return "" +} + +// A StyleEntry in the Style map. +type StyleEntry struct { + // Hex colours. + Colour Colour + Background Colour + Border Colour + + Bold Trilean + Italic Trilean + Underline Trilean + NoInherit bool +} + +func (s StyleEntry) String() string { + out := []string{} + if s.Bold != Pass { + out = append(out, s.Bold.Prefix("bold")) + } + if s.Italic != Pass { + out = append(out, s.Italic.Prefix("italic")) + } + if s.Underline != Pass { + out = append(out, s.Underline.Prefix("underline")) + } + if s.NoInherit { + out = append(out, "noinherit") + } + if s.Colour.IsSet() { + out = append(out, s.Colour.String()) + } + if s.Background.IsSet() { + out = append(out, "bg:"+s.Background.String()) + } + if s.Border.IsSet() { + out = append(out, "border:"+s.Border.String()) + } + return strings.Join(out, " ") +} + +func (s StyleEntry) Sub(e StyleEntry) StyleEntry { + out := StyleEntry{} + if e.Colour != s.Colour { + out.Colour = s.Colour + } + if e.Background != s.Background { + out.Background = s.Background + } + if e.Bold != s.Bold { + out.Bold = s.Bold + } + if e.Italic != s.Italic { + out.Italic = s.Italic + } + if e.Underline != s.Underline { + out.Underline = s.Underline + } + if e.Border != s.Border { + out.Border = s.Border + } + return out +} + +// Inherit styles from ancestors. +// +// Ancestors should be provided from oldest to newest. +func (s StyleEntry) Inherit(ancestors ...StyleEntry) StyleEntry { + out := s + for i := len(ancestors) - 1; i >= 0; i-- { + if out.NoInherit { + return out + } + ancestor := ancestors[i] + if !out.Colour.IsSet() { + out.Colour = ancestor.Colour + } + if !out.Background.IsSet() { + out.Background = ancestor.Background + } + if !out.Border.IsSet() { + out.Border = ancestor.Border + } + if out.Bold == Pass { + out.Bold = ancestor.Bold + } + if out.Italic == Pass { + out.Italic = ancestor.Italic + } + if out.Underline == Pass { + out.Underline = ancestor.Underline + } + } + return out +} + +func (s StyleEntry) IsZero() bool { + return s.Colour == 0 && s.Background == 0 && s.Border == 0 && s.Bold == Pass && s.Italic == Pass && + s.Underline == Pass && !s.NoInherit +} + +// A StyleBuilder is a mutable structure for building styles. +// +// Once built, a Style is immutable. +type StyleBuilder struct { + entries map[TokenType]string + name string + parent *Style +} + +func NewStyleBuilder(name string) *StyleBuilder { + return &StyleBuilder{name: name, entries: map[TokenType]string{}} +} + +func (s *StyleBuilder) AddAll(entries StyleEntries) *StyleBuilder { + for ttype, entry := range entries { + s.entries[ttype] = entry + } + return s +} + +func (s *StyleBuilder) Get(ttype TokenType) StyleEntry { + // This is less than ideal, but it's the price for having to check errors on each Add(). + entry, _ := ParseStyleEntry(s.entries[ttype]) + return entry.Inherit(s.parent.Get(ttype)) +} + +// Add an entry to the Style map. +// +// See http://pygments.org/docs/styles/#style-rules for details. +func (s *StyleBuilder) Add(ttype TokenType, entry string) *StyleBuilder { // nolint: gocyclo + s.entries[ttype] = entry + return s +} + +func (s *StyleBuilder) AddEntry(ttype TokenType, entry StyleEntry) *StyleBuilder { + s.entries[ttype] = entry.String() + return s +} + +func (s *StyleBuilder) Build() (*Style, error) { + style := &Style{ + Name: s.name, + entries: map[TokenType]StyleEntry{}, + parent: s.parent, + } + for ttype, descriptor := range s.entries { + entry, err := ParseStyleEntry(descriptor) + if err != nil { + return nil, fmt.Errorf("invalid entry for %s: %s", ttype, err) + } + style.entries[ttype] = entry + } + return style, nil +} + +// StyleEntries mapping TokenType to colour definition. +type StyleEntries map[TokenType]string + +// NewStyle creates a new style definition. +func NewStyle(name string, entries StyleEntries) (*Style, error) { + return NewStyleBuilder(name).AddAll(entries).Build() +} + +// MustNewStyle creates a new style or panics. +func MustNewStyle(name string, entries StyleEntries) *Style { + style, err := NewStyle(name, entries) + if err != nil { + panic(err) + } + return style +} + +// A Style definition. +// +// See http://pygments.org/docs/styles/ for details. Semantics are intended to be identical. +type Style struct { + Name string + entries map[TokenType]StyleEntry + parent *Style +} + +// Types that are styled. +func (s *Style) Types() []TokenType { + dedupe := map[TokenType]bool{} + for tt := range s.entries { + dedupe[tt] = true + } + if s.parent != nil { + for _, tt := range s.parent.Types() { + dedupe[tt] = true + } + } + out := make([]TokenType, 0, len(dedupe)) + for tt := range dedupe { + out = append(out, tt) + } + return out +} + +// Builder creates a mutable builder from this Style. +// +// The builder can then be safely modified. This is a cheap operation. +func (s *Style) Builder() *StyleBuilder { + return &StyleBuilder{ + name: s.Name, + entries: map[TokenType]string{}, + parent: s, + } +} + +// Has checks if an exact style entry match exists for a token type. +// +// This is distinct from Get() which will merge parent tokens. +func (s *Style) Has(ttype TokenType) bool { + return !s.get(ttype).IsZero() +} + +func (s *Style) get(ttype TokenType) StyleEntry { + out := s.entries[ttype] + if out.IsZero() && s.parent != nil { + return s.parent.get(ttype) + } + return out +} + +// Get a style entry. Will try sub-category or category if an exact match is not found, and +// finally return the Background. +func (s *Style) Get(ttype TokenType) StyleEntry { + return s.get(ttype).Inherit( + s.get(Background), + s.get(Text), + s.get(ttype.Category()), + s.get(ttype.SubCategory())) +} + +// ParseStyleEntry parses a Pygments style entry. +func ParseStyleEntry(entry string) (StyleEntry, error) { // nolint: gocyclo + out := StyleEntry{} + parts := strings.Fields(entry) + for _, part := range parts { + switch { + case part == "italic": + out.Italic = Yes + case part == "noitalic": + out.Italic = No + case part == "bold": + out.Bold = Yes + case part == "nobold": + out.Bold = No + case part == "underline": + out.Underline = Yes + case part == "nounderline": + out.Underline = No + case part == "inherit": + out.NoInherit = false + case part == "noinherit": + out.NoInherit = true + case part == "bg:": + out.Background = 0 + case strings.HasPrefix(part, "bg:#"): + out.Background = ParseColour(part[3:]) + if !out.Background.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid background colour %q", part) + } + case strings.HasPrefix(part, "border:#"): + out.Border = ParseColour(part[7:]) + if !out.Border.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid border colour %q", part) + } + case strings.HasPrefix(part, "#"): + out.Colour = ParseColour(part) + if !out.Colour.IsSet() { + return StyleEntry{}, fmt.Errorf("invalid colour %q", part) + } + default: + return StyleEntry{}, fmt.Errorf("unknown style element %q", part) + } + } + return out, nil +} diff --git a/vendor/github.com/alecthomas/chroma/styles/abap.go b/vendor/github.com/alecthomas/chroma/styles/abap.go new file mode 100644 index 00000000..b6d07fb2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/abap.go @@ -0,0 +1,18 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Abap style. +var Abap = Register(chroma.MustNewStyle("abap", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentSpecial: "#888", + chroma.Keyword: "#00f", + chroma.OperatorWord: "#00f", + chroma.Name: "#000", + chroma.LiteralNumber: "#3af", + chroma.LiteralString: "#5a2", + chroma.Error: "#F00", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol.go b/vendor/github.com/alecthomas/chroma/styles/algol.go new file mode 100644 index 00000000..1e8a7b44 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/algol.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Algol style. +var Algol = Register(chroma.MustNewStyle("algol", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentPreproc: "bold noitalic #888", + chroma.CommentSpecial: "bold noitalic #888", + chroma.Keyword: "underline bold", + chroma.KeywordDeclaration: "italic", + chroma.NameBuiltin: "bold italic", + chroma.NameBuiltinPseudo: "bold italic", + chroma.NameNamespace: "bold italic #666", + chroma.NameClass: "bold italic #666", + chroma.NameFunction: "bold italic #666", + chroma.NameVariable: "bold italic #666", + chroma.NameConstant: "bold italic #666", + chroma.OperatorWord: "bold", + chroma.LiteralString: "italic #666", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/algol_nu.go b/vendor/github.com/alecthomas/chroma/styles/algol_nu.go new file mode 100644 index 00000000..3d278735 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/algol_nu.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Algol_Nu style. +var Algol_Nu = Register(chroma.MustNewStyle("algol_nu", chroma.StyleEntries{ + chroma.Comment: "italic #888", + chroma.CommentPreproc: "bold noitalic #888", + chroma.CommentSpecial: "bold noitalic #888", + chroma.Keyword: "bold", + chroma.KeywordDeclaration: "italic", + chroma.NameBuiltin: "bold italic", + chroma.NameBuiltinPseudo: "bold italic", + chroma.NameNamespace: "bold italic #666", + chroma.NameClass: "bold italic #666", + chroma.NameFunction: "bold italic #666", + chroma.NameVariable: "bold italic #666", + chroma.NameConstant: "bold italic #666", + chroma.OperatorWord: "bold", + chroma.LiteralString: "italic #666", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/api.go b/vendor/github.com/alecthomas/chroma/styles/api.go new file mode 100644 index 00000000..f3ce6739 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/api.go @@ -0,0 +1,37 @@ +package styles + +import ( + "sort" + + "github.com/alecthomas/chroma" +) + +// Registry of Styles. +var Registry = map[string]*chroma.Style{} + +// Fallback style. Reassign to change the default fallback style. +var Fallback = SwapOff + +// Register a chroma.Style. +func Register(style *chroma.Style) *chroma.Style { + Registry[style.Name] = style + return style +} + +// Names of all available styles. +func Names() []string { + out := []string{} + for name := range Registry { + out = append(out, name) + } + sort.Strings(out) + return out +} + +// Get named style, or Fallback. +func Get(name string) *chroma.Style { + if style, ok := Registry[name]; ok { + return style + } + return Fallback +} diff --git a/vendor/github.com/alecthomas/chroma/styles/arduino.go b/vendor/github.com/alecthomas/chroma/styles/arduino.go new file mode 100644 index 00000000..9e48fb4a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/arduino.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Arduino style. +var Arduino = Register(chroma.MustNewStyle("arduino", chroma.StyleEntries{ + chroma.Error: "#a61717", + chroma.Comment: "#95a5a6", + chroma.CommentPreproc: "#728E00", + chroma.Keyword: "#728E00", + chroma.KeywordConstant: "#00979D", + chroma.KeywordPseudo: "#00979D", + chroma.KeywordReserved: "#00979D", + chroma.KeywordType: "#00979D", + chroma.Operator: "#728E00", + chroma.Name: "#434f54", + chroma.NameBuiltin: "#728E00", + chroma.NameFunction: "#D35400", + chroma.NameOther: "#728E00", + chroma.LiteralNumber: "#8A7B52", + chroma.LiteralString: "#7F8C8D", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/autumn.go b/vendor/github.com/alecthomas/chroma/styles/autumn.go new file mode 100644 index 00000000..3966372b --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/autumn.go @@ -0,0 +1,43 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Autumn style. +var Autumn = Register(chroma.MustNewStyle("autumn", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #aaaaaa", + chroma.CommentPreproc: "noitalic #4c8317", + chroma.CommentSpecial: "italic #0000aa", + chroma.Keyword: "#0000aa", + chroma.KeywordType: "#00aaaa", + chroma.OperatorWord: "#0000aa", + chroma.NameBuiltin: "#00aaaa", + chroma.NameFunction: "#00aa00", + chroma.NameClass: "underline #00aa00", + chroma.NameNamespace: "underline #00aaaa", + chroma.NameVariable: "#aa0000", + chroma.NameConstant: "#aa0000", + chroma.NameEntity: "bold #800", + chroma.NameAttribute: "#1e90ff", + chroma.NameTag: "bold #1e90ff", + chroma.NameDecorator: "#888888", + chroma.LiteralString: "#aa5500", + chroma.LiteralStringSymbol: "#0000aa", + chroma.LiteralStringRegex: "#009999", + chroma.LiteralNumber: "#009999", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#aa0000", + chroma.GenericInserted: "#00aa00", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/borland.go b/vendor/github.com/alecthomas/chroma/styles/borland.go new file mode 100644 index 00000000..9c0fff6f --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/borland.go @@ -0,0 +1,33 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Borland style. +var Borland = Register(chroma.MustNewStyle("borland", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #008800", + chroma.CommentPreproc: "noitalic #008080", + chroma.CommentSpecial: "noitalic bold", + chroma.LiteralString: "#0000FF", + chroma.LiteralStringChar: "#800080", + chroma.LiteralNumber: "#0000FF", + chroma.Keyword: "bold #000080", + chroma.OperatorWord: "bold", + chroma.NameTag: "bold #000080", + chroma.NameAttribute: "#FF0000", + chroma.GenericHeading: "#999999", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/bw.go b/vendor/github.com/alecthomas/chroma/styles/bw.go new file mode 100644 index 00000000..3e800d5a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/bw.go @@ -0,0 +1,30 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// BlackWhite style. +var BlackWhite = Register(chroma.MustNewStyle("bw", chroma.StyleEntries{ + chroma.Comment: "italic", + chroma.CommentPreproc: "noitalic", + chroma.Keyword: "bold", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold", + chroma.OperatorWord: "bold", + chroma.NameClass: "bold", + chroma.NameNamespace: "bold", + chroma.NameException: "bold", + chroma.NameEntity: "bold", + chroma.NameTag: "bold", + chroma.LiteralString: "italic", + chroma.LiteralStringInterpol: "bold", + chroma.LiteralStringEscape: "bold", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/colorful.go b/vendor/github.com/alecthomas/chroma/styles/colorful.go new file mode 100644 index 00000000..dc77c5bf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/colorful.go @@ -0,0 +1,59 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Colorful style. +var Colorful = Register(chroma.MustNewStyle("colorful", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#888", + chroma.CommentPreproc: "#579", + chroma.CommentSpecial: "bold #cc0000", + chroma.Keyword: "bold #080", + chroma.KeywordPseudo: "#038", + chroma.KeywordType: "#339", + chroma.Operator: "#333", + chroma.OperatorWord: "bold #000", + chroma.NameBuiltin: "#007020", + chroma.NameFunction: "bold #06B", + chroma.NameClass: "bold #B06", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "bold #F00", + chroma.NameVariable: "#963", + chroma.NameVariableInstance: "#33B", + chroma.NameVariableClass: "#369", + chroma.NameVariableGlobal: "bold #d70", + chroma.NameConstant: "bold #036", + chroma.NameLabel: "bold #970", + chroma.NameEntity: "bold #800", + chroma.NameAttribute: "#00C", + chroma.NameTag: "#070", + chroma.NameDecorator: "bold #555", + chroma.LiteralString: "bg:#fff0f0", + chroma.LiteralStringChar: "#04D bg:", + chroma.LiteralStringDoc: "#D42 bg:", + chroma.LiteralStringInterpol: "bg:#eee", + chroma.LiteralStringEscape: "bold #666", + chroma.LiteralStringRegex: "bg:#fff0ff #000", + chroma.LiteralStringSymbol: "#A60 bg:", + chroma.LiteralStringOther: "#D20", + chroma.LiteralNumber: "bold #60E", + chroma.LiteralNumberInteger: "bold #00D", + chroma.LiteralNumberFloat: "bold #60E", + chroma.LiteralNumberHex: "bold #058", + chroma.LiteralNumberOct: "bold #40E", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/dracula.go b/vendor/github.com/alecthomas/chroma/styles/dracula.go new file mode 100644 index 00000000..46e9d5b5 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/dracula.go @@ -0,0 +1,81 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Dracula Style +var Dracula = Register(chroma.MustNewStyle("dracula", chroma.StyleEntries{ + chroma.Comment: "#6272a4", + chroma.CommentHashbang: "#6272a4", + chroma.CommentMultiline: "#6272a4", + chroma.CommentPreproc: "#ff79c6", + chroma.CommentSingle: "#6272a4", + chroma.CommentSpecial: "#6272a4", + chroma.Generic: "#f8f8f2", + chroma.GenericDeleted: "#8b080b", + chroma.GenericEmph: "#f8f8f2 underline", + chroma.GenericError: "#f8f8f2", + chroma.GenericHeading: "#f8f8f2 bold", + chroma.GenericInserted: "#f8f8f2 bold", + chroma.GenericOutput: "#44475a", + chroma.GenericPrompt: "#f8f8f2", + chroma.GenericStrong: "#f8f8f2", + chroma.GenericSubheading: "#f8f8f2 bold", + chroma.GenericTraceback: "#f8f8f2", + chroma.GenericUnderline: "underline", + chroma.Error: "#f8f8f2", + chroma.Keyword: "#ff79c6", + chroma.KeywordConstant: "#ff79c6", + chroma.KeywordDeclaration: "#8be9fd italic", + chroma.KeywordNamespace: "#ff79c6", + chroma.KeywordPseudo: "#ff79c6", + chroma.KeywordReserved: "#ff79c6", + chroma.KeywordType: "#8be9fd", + chroma.Literal: "#f8f8f2", + chroma.LiteralDate: "#f8f8f2", + chroma.Name: "#f8f8f2", + chroma.NameAttribute: "#50fa7b", + chroma.NameBuiltin: "#8be9fd italic", + chroma.NameBuiltinPseudo: "#f8f8f2", + chroma.NameClass: "#50fa7b", + chroma.NameConstant: "#f8f8f2", + chroma.NameDecorator: "#f8f8f2", + chroma.NameEntity: "#f8f8f2", + chroma.NameException: "#f8f8f2", + chroma.NameFunction: "#50fa7b", + chroma.NameLabel: "#8be9fd italic", + chroma.NameNamespace: "#f8f8f2", + chroma.NameOther: "#f8f8f2", + chroma.NameTag: "#ff79c6", + chroma.NameVariable: "#8be9fd italic", + chroma.NameVariableClass: "#8be9fd italic", + chroma.NameVariableGlobal: "#8be9fd italic", + chroma.NameVariableInstance: "#8be9fd italic", + chroma.LiteralNumber: "#bd93f9", + chroma.LiteralNumberBin: "#bd93f9", + chroma.LiteralNumberFloat: "#bd93f9", + chroma.LiteralNumberHex: "#bd93f9", + chroma.LiteralNumberInteger: "#bd93f9", + chroma.LiteralNumberIntegerLong: "#bd93f9", + chroma.LiteralNumberOct: "#bd93f9", + chroma.Operator: "#ff79c6", + chroma.OperatorWord: "#ff79c6", + chroma.Other: "#f8f8f2", + chroma.Punctuation: "#f8f8f2", + chroma.LiteralString: "#f1fa8c", + chroma.LiteralStringBacktick: "#f1fa8c", + chroma.LiteralStringChar: "#f1fa8c", + chroma.LiteralStringDoc: "#f1fa8c", + chroma.LiteralStringDouble: "#f1fa8c", + chroma.LiteralStringEscape: "#f1fa8c", + chroma.LiteralStringHeredoc: "#f1fa8c", + chroma.LiteralStringInterpol: "#f1fa8c", + chroma.LiteralStringOther: "#f1fa8c", + chroma.LiteralStringRegex: "#f1fa8c", + chroma.LiteralStringSingle: "#f1fa8c", + chroma.LiteralStringSymbol: "#f1fa8c", + chroma.Text: "#f8f8f2", + chroma.TextWhitespace: "#f8f8f2", + chroma.Background: " bg:#282a36", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/emacs.go b/vendor/github.com/alecthomas/chroma/styles/emacs.go new file mode 100644 index 00000000..4835abd7 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/emacs.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Emacs style. +var Emacs = Register(chroma.MustNewStyle("emacs", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #008800", + chroma.CommentPreproc: "noitalic", + chroma.CommentSpecial: "noitalic bold", + chroma.Keyword: "bold #AA22FF", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "bold #00BB00", + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #AA22FF", + chroma.NameBuiltin: "#AA22FF", + chroma.NameFunction: "#00A000", + chroma.NameClass: "#0000FF", + chroma.NameNamespace: "bold #0000FF", + chroma.NameException: "bold #D2413A", + chroma.NameVariable: "#B8860B", + chroma.NameConstant: "#880000", + chroma.NameLabel: "#A0A000", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#BB4444", + chroma.NameTag: "bold #008000", + chroma.NameDecorator: "#AA22FF", + chroma.LiteralString: "#BB4444", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "bold #BB6688", + chroma.LiteralStringEscape: "bold #BB6622", + chroma.LiteralStringRegex: "#BB6688", + chroma.LiteralStringSymbol: "#B8860B", + chroma.LiteralStringOther: "#008000", + chroma.LiteralNumber: "#666666", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#f8f8f8", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/friendly.go b/vendor/github.com/alecthomas/chroma/styles/friendly.go new file mode 100644 index 00000000..ad02341d --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/friendly.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Friendly style. +var Friendly = Register(chroma.MustNewStyle("friendly", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #60a0b0", + chroma.CommentPreproc: "noitalic #007020", + chroma.CommentSpecial: "noitalic bg:#fff0f0", + chroma.Keyword: "bold #007020", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold #902000", + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #007020", + chroma.NameBuiltin: "#007020", + chroma.NameFunction: "#06287e", + chroma.NameClass: "bold #0e84b5", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "#007020", + chroma.NameVariable: "#bb60d5", + chroma.NameConstant: "#60add5", + chroma.NameLabel: "bold #002070", + chroma.NameEntity: "bold #d55537", + chroma.NameAttribute: "#4070a0", + chroma.NameTag: "bold #062873", + chroma.NameDecorator: "bold #555555", + chroma.LiteralString: "#4070a0", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "italic #70a0d0", + chroma.LiteralStringEscape: "bold #4070a0", + chroma.LiteralStringRegex: "#235388", + chroma.LiteralStringSymbol: "#517918", + chroma.LiteralStringOther: "#c65d09", + chroma.LiteralNumber: "#40a070", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#f0f0f0", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/fruity.go b/vendor/github.com/alecthomas/chroma/styles/fruity.go new file mode 100644 index 00000000..c2577fa2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/fruity.go @@ -0,0 +1,26 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Fruity style. +var Fruity = Register(chroma.MustNewStyle("fruity", chroma.StyleEntries{ + chroma.TextWhitespace: "#888888", + chroma.Background: "#ffffff bg:#111111", + chroma.GenericOutput: "#444444 bg:#222222", + chroma.Keyword: "#fb660a bold", + chroma.KeywordPseudo: "nobold", + chroma.LiteralNumber: "#0086f7 bold", + chroma.NameTag: "#fb660a bold", + chroma.NameVariable: "#fb660a", + chroma.Comment: "#008800 bg:#0f140f italic", + chroma.NameAttribute: "#ff0086 bold", + chroma.LiteralString: "#0086d2", + chroma.NameFunction: "#ff0086 bold", + chroma.GenericHeading: "#ffffff bold", + chroma.KeywordType: "#cdcaa9 bold", + chroma.GenericSubheading: "#ffffff bold", + chroma.NameConstant: "#0086d2", + chroma.CommentPreproc: "#ff0007 bold", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/github.go b/vendor/github.com/alecthomas/chroma/styles/github.go new file mode 100644 index 00000000..7ef2481c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/github.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// GitHub style. +var GitHub = Register(chroma.MustNewStyle("github", chroma.StyleEntries{ + chroma.CommentMultiline: "italic #999988", + chroma.CommentPreproc: "bold #999999", + chroma.CommentSingle: "italic #999988", + chroma.CommentSpecial: "bold italic #999999", + chroma.Comment: "italic #999988", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericEmph: "italic #000000", + chroma.GenericError: "#aa0000", + chroma.GenericHeading: "#999999", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericOutput: "#888888", + chroma.GenericPrompt: "#555555", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.KeywordType: "bold #445588", + chroma.Keyword: "bold #000000", + chroma.LiteralNumber: "#009999", + chroma.LiteralStringRegex: "#009926", + chroma.LiteralStringSymbol: "#990073", + chroma.LiteralString: "#d14", + chroma.NameAttribute: "#008080", + chroma.NameBuiltinPseudo: "#999999", + chroma.NameBuiltin: "#0086B3", + chroma.NameClass: "bold #445588", + chroma.NameConstant: "#008080", + chroma.NameDecorator: "bold #3c5d5d", + chroma.NameEntity: "#800080", + chroma.NameException: "bold #990000", + chroma.NameFunction: "bold #990000", + chroma.NameLabel: "bold #990000", + chroma.NameNamespace: "#555555", + chroma.NameTag: "#000080", + chroma.NameVariableClass: "#008080", + chroma.NameVariableGlobal: "#008080", + chroma.NameVariableInstance: "#008080", + chroma.NameVariable: "#008080", + chroma.Operator: "bold #000000", + chroma.TextWhitespace: "#bbbbbb", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/igor.go b/vendor/github.com/alecthomas/chroma/styles/igor.go new file mode 100644 index 00000000..6a6d4cd0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/igor.go @@ -0,0 +1,16 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Igor style. +var Igor = Register(chroma.MustNewStyle("igor", chroma.StyleEntries{ + chroma.Comment: "italic #FF0000", + chroma.Keyword: "#0000FF", + chroma.NameFunction: "#C34E00", + chroma.NameDecorator: "#CC00A3", + chroma.NameClass: "#007575", + chroma.LiteralString: "#009C00", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/lovelace.go b/vendor/github.com/alecthomas/chroma/styles/lovelace.go new file mode 100644 index 00000000..074cc089 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/lovelace.go @@ -0,0 +1,60 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Lovelace style. +var Lovelace = Register(chroma.MustNewStyle("lovelace", chroma.StyleEntries{ + chroma.TextWhitespace: "#a89028", + chroma.Comment: "italic #888888", + chroma.CommentHashbang: "#287088", + chroma.CommentMultiline: "#888888", + chroma.CommentPreproc: "noitalic #289870", + chroma.Keyword: "#2838b0", + chroma.KeywordConstant: "italic #444444", + chroma.KeywordDeclaration: "italic", + chroma.KeywordType: "italic", + chroma.Operator: "#666666", + chroma.OperatorWord: "#a848a8", + chroma.Punctuation: "#888888", + chroma.NameAttribute: "#388038", + chroma.NameBuiltin: "#388038", + chroma.NameBuiltinPseudo: "italic", + chroma.NameClass: "#287088", + chroma.NameConstant: "#b85820", + chroma.NameDecorator: "#287088", + chroma.NameEntity: "#709030", + chroma.NameException: "#908828", + chroma.NameFunction: "#785840", + chroma.NameFunctionMagic: "#b85820", + chroma.NameLabel: "#289870", + chroma.NameNamespace: "#289870", + chroma.NameTag: "#2838b0", + chroma.NameVariable: "#b04040", + chroma.NameVariableGlobal: "#908828", + chroma.NameVariableMagic: "#b85820", + chroma.LiteralString: "#b83838", + chroma.LiteralStringAffix: "#444444", + chroma.LiteralStringChar: "#a848a8", + chroma.LiteralStringDelimiter: "#b85820", + chroma.LiteralStringDoc: "italic #b85820", + chroma.LiteralStringEscape: "#709030", + chroma.LiteralStringInterpol: "underline", + chroma.LiteralStringOther: "#a848a8", + chroma.LiteralStringRegex: "#a848a8", + chroma.LiteralNumber: "#444444", + chroma.GenericDeleted: "#c02828", + chroma.GenericEmph: "italic", + chroma.GenericError: "#c02828", + chroma.GenericHeading: "#666666", + chroma.GenericSubheading: "#444444", + chroma.GenericInserted: "#388038", + chroma.GenericOutput: "#666666", + chroma.GenericPrompt: "#444444", + chroma.GenericStrong: "bold", + chroma.GenericTraceback: "#2838b0", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#a848a8", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/manni.go b/vendor/github.com/alecthomas/chroma/styles/manni.go new file mode 100644 index 00000000..9942e7d0 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/manni.go @@ -0,0 +1,51 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Manni style. +var Manni = Register(chroma.MustNewStyle("manni", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #0099FF", + chroma.CommentPreproc: "noitalic #009999", + chroma.CommentSpecial: "bold", + chroma.Keyword: "bold #006699", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#007788", + chroma.Operator: "#555555", + chroma.OperatorWord: "bold #000000", + chroma.NameBuiltin: "#336666", + chroma.NameFunction: "#CC00FF", + chroma.NameClass: "bold #00AA88", + chroma.NameNamespace: "bold #00CCFF", + chroma.NameException: "bold #CC0000", + chroma.NameVariable: "#003333", + chroma.NameConstant: "#336600", + chroma.NameLabel: "#9999FF", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#330099", + chroma.NameTag: "bold #330099", + chroma.NameDecorator: "#9999FF", + chroma.LiteralString: "#CC3300", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringInterpol: "#AA0000", + chroma.LiteralStringEscape: "bold #CC3300", + chroma.LiteralStringRegex: "#33AAAA", + chroma.LiteralStringSymbol: "#FFCC33", + chroma.LiteralStringOther: "#CC3300", + chroma.LiteralNumber: "#FF6600", + chroma.GenericHeading: "bold #003300", + chroma.GenericSubheading: "bold #003300", + chroma.GenericDeleted: "border:#CC0000 bg:#FFCCCC", + chroma.GenericInserted: "border:#00CC00 bg:#CCFFCC", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000099", + chroma.GenericOutput: "#AAAAAA", + chroma.GenericTraceback: "#99CC66", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#FFAAAA #AA0000", + chroma.Background: " bg:#f0f3f3", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokai.go b/vendor/github.com/alecthomas/chroma/styles/monokai.go new file mode 100644 index 00000000..2586795a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/monokai.go @@ -0,0 +1,36 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Monokai style. +var Monokai = Register(chroma.MustNewStyle("monokai", chroma.StyleEntries{ + chroma.Text: "#f8f8f2", + chroma.Error: "#960050 bg:#1e0010", + chroma.Comment: "#75715e", + chroma.Keyword: "#66d9ef", + chroma.KeywordNamespace: "#f92672", + chroma.Operator: "#f92672", + chroma.Punctuation: "#f8f8f2", + chroma.Name: "#f8f8f2", + chroma.NameAttribute: "#a6e22e", + chroma.NameClass: "#a6e22e", + chroma.NameConstant: "#66d9ef", + chroma.NameDecorator: "#a6e22e", + chroma.NameException: "#a6e22e", + chroma.NameFunction: "#a6e22e", + chroma.NameOther: "#a6e22e", + chroma.NameTag: "#f92672", + chroma.LiteralNumber: "#ae81ff", + chroma.Literal: "#ae81ff", + chroma.LiteralDate: "#e6db74", + chroma.LiteralString: "#e6db74", + chroma.LiteralStringEscape: "#ae81ff", + chroma.GenericDeleted: "#f92672", + chroma.GenericEmph: "italic", + chroma.GenericInserted: "#a6e22e", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#75715e", + chroma.Background: "bg:#272822", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/monokailight.go b/vendor/github.com/alecthomas/chroma/styles/monokailight.go new file mode 100644 index 00000000..61818a68 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/monokailight.go @@ -0,0 +1,33 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// MonokaiLight style. +var MonokaiLight = Register(chroma.MustNewStyle("monokailight", chroma.StyleEntries{ + chroma.Text: "#272822", + chroma.Error: "#960050 bg:#1e0010", + chroma.Comment: "#75715e", + chroma.Keyword: "#00a8c8", + chroma.KeywordNamespace: "#f92672", + chroma.Operator: "#f92672", + chroma.Punctuation: "#111111", + chroma.Name: "#111111", + chroma.NameAttribute: "#75af00", + chroma.NameClass: "#75af00", + chroma.NameConstant: "#00a8c8", + chroma.NameDecorator: "#75af00", + chroma.NameException: "#75af00", + chroma.NameFunction: "#75af00", + chroma.NameOther: "#75af00", + chroma.NameTag: "#f92672", + chroma.LiteralNumber: "#ae81ff", + chroma.Literal: "#ae81ff", + chroma.LiteralDate: "#d88200", + chroma.LiteralString: "#d88200", + chroma.LiteralStringEscape: "#8045FF", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.Background: " bg:#fafafa", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/murphy.go b/vendor/github.com/alecthomas/chroma/styles/murphy.go new file mode 100644 index 00000000..90e83c76 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/murphy.go @@ -0,0 +1,59 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Murphy style. +var Murphy = Register(chroma.MustNewStyle("murphy", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#666 italic", + chroma.CommentPreproc: "#579 noitalic", + chroma.CommentSpecial: "#c00 bold", + chroma.Keyword: "bold #289", + chroma.KeywordPseudo: "#08f", + chroma.KeywordType: "#66f", + chroma.Operator: "#333", + chroma.OperatorWord: "bold #000", + chroma.NameBuiltin: "#072", + chroma.NameFunction: "bold #5ed", + chroma.NameClass: "bold #e9e", + chroma.NameNamespace: "bold #0e84b5", + chroma.NameException: "bold #F00", + chroma.NameVariable: "#036", + chroma.NameVariableInstance: "#aaf", + chroma.NameVariableClass: "#ccf", + chroma.NameVariableGlobal: "#f84", + chroma.NameConstant: "bold #5ed", + chroma.NameLabel: "bold #970", + chroma.NameEntity: "#800", + chroma.NameAttribute: "#007", + chroma.NameTag: "#070", + chroma.NameDecorator: "bold #555", + chroma.LiteralString: "bg:#e0e0ff", + chroma.LiteralStringChar: "#88F bg:", + chroma.LiteralStringDoc: "#D42 bg:", + chroma.LiteralStringInterpol: "bg:#eee", + chroma.LiteralStringEscape: "bold #666", + chroma.LiteralStringRegex: "bg:#e0e0ff #000", + chroma.LiteralStringSymbol: "#fc8 bg:", + chroma.LiteralStringOther: "#f88", + chroma.LiteralNumber: "bold #60E", + chroma.LiteralNumberInteger: "bold #66f", + chroma.LiteralNumberFloat: "bold #60E", + chroma.LiteralNumberHex: "bold #058", + chroma.LiteralNumberOct: "bold #40E", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #c65d09", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "#F00 bg:#FAA", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/native.go b/vendor/github.com/alecthomas/chroma/styles/native.go new file mode 100644 index 00000000..9fae09ac --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/native.go @@ -0,0 +1,42 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Native style. +var Native = Register(chroma.MustNewStyle("native", chroma.StyleEntries{ + chroma.Background: "#d0d0d0 bg:#202020", + chroma.TextWhitespace: "#666666", + chroma.Comment: "italic #999999", + chroma.CommentPreproc: "noitalic bold #cd2828", + chroma.CommentSpecial: "noitalic bold #e50808 bg:#520000", + chroma.Keyword: "bold #6ab825", + chroma.KeywordPseudo: "nobold", + chroma.OperatorWord: "bold #6ab825", + chroma.LiteralString: "#ed9d13", + chroma.LiteralStringOther: "#ffa500", + chroma.LiteralNumber: "#3677a9", + chroma.NameBuiltin: "#24909d", + chroma.NameVariable: "#40ffff", + chroma.NameConstant: "#40ffff", + chroma.NameClass: "underline #447fcf", + chroma.NameFunction: "#447fcf", + chroma.NameNamespace: "underline #447fcf", + chroma.NameException: "#bbbbbb", + chroma.NameTag: "bold #6ab825", + chroma.NameAttribute: "#bbbbbb", + chroma.NameDecorator: "#ffa500", + chroma.GenericHeading: "bold #ffffff", + chroma.GenericSubheading: "underline #ffffff", + chroma.GenericDeleted: "#d22323", + chroma.GenericInserted: "#589819", + chroma.GenericError: "#d22323", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#aaaaaa", + chroma.GenericOutput: "#cccccc", + chroma.GenericTraceback: "#d22323", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go new file mode 100644 index 00000000..c8cf4731 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/paraiso-dark.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// ParaisoDark style. +var ParaisoDark = Register(chroma.MustNewStyle("paraiso-dark", chroma.StyleEntries{ + chroma.Text: "#e7e9db", + chroma.Error: "#ef6155", + chroma.Comment: "#776e71", + chroma.Keyword: "#815ba4", + chroma.KeywordNamespace: "#5bc4bf", + chroma.KeywordType: "#fec418", + chroma.Operator: "#5bc4bf", + chroma.Punctuation: "#e7e9db", + chroma.Name: "#e7e9db", + chroma.NameAttribute: "#06b6ef", + chroma.NameClass: "#fec418", + chroma.NameConstant: "#ef6155", + chroma.NameDecorator: "#5bc4bf", + chroma.NameException: "#ef6155", + chroma.NameFunction: "#06b6ef", + chroma.NameNamespace: "#fec418", + chroma.NameOther: "#06b6ef", + chroma.NameTag: "#5bc4bf", + chroma.NameVariable: "#ef6155", + chroma.LiteralNumber: "#f99b15", + chroma.Literal: "#f99b15", + chroma.LiteralDate: "#48b685", + chroma.LiteralString: "#48b685", + chroma.LiteralStringChar: "#e7e9db", + chroma.LiteralStringDoc: "#776e71", + chroma.LiteralStringEscape: "#f99b15", + chroma.LiteralStringInterpol: "#f99b15", + chroma.GenericDeleted: "#ef6155", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #e7e9db", + chroma.GenericInserted: "#48b685", + chroma.GenericPrompt: "bold #776e71", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #5bc4bf", + chroma.Background: "bg:#2f1e2e", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go b/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go new file mode 100644 index 00000000..b514dfa1 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/paraiso-light.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// ParaisoLight style. +var ParaisoLight = Register(chroma.MustNewStyle("paraiso-light", chroma.StyleEntries{ + chroma.Text: "#2f1e2e", + chroma.Error: "#ef6155", + chroma.Comment: "#8d8687", + chroma.Keyword: "#815ba4", + chroma.KeywordNamespace: "#5bc4bf", + chroma.KeywordType: "#fec418", + chroma.Operator: "#5bc4bf", + chroma.Punctuation: "#2f1e2e", + chroma.Name: "#2f1e2e", + chroma.NameAttribute: "#06b6ef", + chroma.NameClass: "#fec418", + chroma.NameConstant: "#ef6155", + chroma.NameDecorator: "#5bc4bf", + chroma.NameException: "#ef6155", + chroma.NameFunction: "#06b6ef", + chroma.NameNamespace: "#fec418", + chroma.NameOther: "#06b6ef", + chroma.NameTag: "#5bc4bf", + chroma.NameVariable: "#ef6155", + chroma.LiteralNumber: "#f99b15", + chroma.Literal: "#f99b15", + chroma.LiteralDate: "#48b685", + chroma.LiteralString: "#48b685", + chroma.LiteralStringChar: "#2f1e2e", + chroma.LiteralStringDoc: "#8d8687", + chroma.LiteralStringEscape: "#f99b15", + chroma.LiteralStringInterpol: "#f99b15", + chroma.GenericDeleted: "#ef6155", + chroma.GenericEmph: "italic", + chroma.GenericHeading: "bold #2f1e2e", + chroma.GenericInserted: "#48b685", + chroma.GenericPrompt: "bold #8d8687", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #5bc4bf", + chroma.Background: "bg:#e7e9db", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pastie.go b/vendor/github.com/alecthomas/chroma/styles/pastie.go new file mode 100644 index 00000000..9a685443 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/pastie.go @@ -0,0 +1,52 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Pastie style. +var Pastie = Register(chroma.MustNewStyle("pastie", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#888888", + chroma.CommentPreproc: "bold #cc0000", + chroma.CommentSpecial: "bg:#fff0f0 bold #cc0000", + chroma.LiteralString: "bg:#fff0f0 #dd2200", + chroma.LiteralStringRegex: "bg:#fff0ff #008800", + chroma.LiteralStringOther: "bg:#f0fff0 #22bb22", + chroma.LiteralStringSymbol: "#aa6600", + chroma.LiteralStringInterpol: "#3333bb", + chroma.LiteralStringEscape: "#0044dd", + chroma.OperatorWord: "#008800", + chroma.Keyword: "bold #008800", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#888888", + chroma.NameClass: "bold #bb0066", + chroma.NameException: "bold #bb0066", + chroma.NameFunction: "bold #0066bb", + chroma.NameProperty: "bold #336699", + chroma.NameNamespace: "bold #bb0066", + chroma.NameBuiltin: "#003388", + chroma.NameVariable: "#336699", + chroma.NameVariableClass: "#336699", + chroma.NameVariableInstance: "#3333bb", + chroma.NameVariableGlobal: "#dd7700", + chroma.NameConstant: "bold #003366", + chroma.NameTag: "bold #bb0066", + chroma.NameAttribute: "#336699", + chroma.NameDecorator: "#555555", + chroma.NameLabel: "italic #336699", + chroma.LiteralNumber: "bold #0000DD", + chroma.GenericHeading: "#333", + chroma.GenericSubheading: "#666", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/perldoc.go b/vendor/github.com/alecthomas/chroma/styles/perldoc.go new file mode 100644 index 00000000..e1372fdf --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/perldoc.go @@ -0,0 +1,44 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Perldoc style. +var Perldoc = Register(chroma.MustNewStyle("perldoc", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "#228B22", + chroma.CommentPreproc: "#1e889b", + chroma.CommentSpecial: "#8B008B bold", + chroma.LiteralString: "#CD5555", + chroma.LiteralStringHeredoc: "#1c7e71 italic", + chroma.LiteralStringRegex: "#1c7e71", + chroma.LiteralStringOther: "#cb6c20", + chroma.LiteralNumber: "#B452CD", + chroma.OperatorWord: "#8B008B", + chroma.Keyword: "#8B008B bold", + chroma.KeywordType: "#00688B", + chroma.NameClass: "#008b45 bold", + chroma.NameException: "#008b45 bold", + chroma.NameFunction: "#008b45", + chroma.NameNamespace: "#008b45 underline", + chroma.NameVariable: "#00688B", + chroma.NameConstant: "#00688B", + chroma.NameDecorator: "#707a7c", + chroma.NameTag: "#8B008B bold", + chroma.NameAttribute: "#658b00", + chroma.NameBuiltin: "#658b00", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#aa0000", + chroma.GenericInserted: "#00aa00", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#eeeedd", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/pygments.go b/vendor/github.com/alecthomas/chroma/styles/pygments.go new file mode 100644 index 00000000..70d1f2ad --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/pygments.go @@ -0,0 +1,55 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Pygments default theme. +var Pygments = Register(chroma.MustNewStyle("pygments", map[chroma.TokenType]string{ + chroma.Whitespace: "#bbbbbb", + chroma.Comment: "italic #408080", + chroma.CommentPreproc: "noitalic #BC7A00", + + chroma.Keyword: "bold #008000", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "nobold #B00040", + + chroma.Operator: "#666666", + chroma.OperatorWord: "bold #AA22FF", + + chroma.NameBuiltin: "#008000", + chroma.NameFunction: "#0000FF", + chroma.NameClass: "bold #0000FF", + chroma.NameNamespace: "bold #0000FF", + chroma.NameException: "bold #D2413A", + chroma.NameVariable: "#19177C", + chroma.NameConstant: "#880000", + chroma.NameLabel: "#A0A000", + chroma.NameEntity: "bold #999999", + chroma.NameAttribute: "#7D9029", + chroma.NameTag: "bold #008000", + chroma.NameDecorator: "#AA22FF", + + chroma.String: "#BA2121", + chroma.StringDoc: "italic", + chroma.StringInterpol: "bold #BB6688", + chroma.StringEscape: "bold #BB6622", + chroma.StringRegex: "#BB6688", + chroma.StringSymbol: "#19177C", + chroma.StringOther: "#008000", + chroma.Number: "#666666", + + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#A00000", + chroma.GenericInserted: "#00A000", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + + chroma.Error: "border:#FF0000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go b/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go new file mode 100644 index 00000000..37d66ca2 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/rainbow_dash.go @@ -0,0 +1,47 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// RainbowDash style. +var RainbowDash = Register(chroma.MustNewStyle("rainbow_dash", chroma.StyleEntries{ + chroma.Comment: "italic #0080ff", + chroma.CommentPreproc: "noitalic", + chroma.CommentSpecial: "bold", + chroma.Error: "bg:#cc0000 #ffffff", + chroma.GenericDeleted: "border:#c5060b bg:#ffcccc", + chroma.GenericEmph: "italic", + chroma.GenericError: "#ff0000", + chroma.GenericHeading: "bold #2c5dcd", + chroma.GenericInserted: "border:#00cc00 bg:#ccffcc", + chroma.GenericOutput: "#aaaaaa", + chroma.GenericPrompt: "bold #2c5dcd", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "bold #2c5dcd", + chroma.GenericTraceback: "#c5060b", + chroma.GenericUnderline: "underline", + chroma.Keyword: "bold #2c5dcd", + chroma.KeywordPseudo: "nobold", + chroma.KeywordType: "#5918bb", + chroma.NameAttribute: "italic #2c5dcd", + chroma.NameBuiltin: "bold #5918bb", + chroma.NameClass: "underline", + chroma.NameConstant: "#318495", + chroma.NameDecorator: "bold #ff8000", + chroma.NameEntity: "bold #5918bb", + chroma.NameException: "bold #5918bb", + chroma.NameFunction: "bold #ff8000", + chroma.NameTag: "bold #2c5dcd", + chroma.LiteralNumber: "bold #5918bb", + chroma.Operator: "#2c5dcd", + chroma.OperatorWord: "bold", + chroma.LiteralString: "#00cc66", + chroma.LiteralStringDoc: "italic", + chroma.LiteralStringEscape: "bold #c5060b", + chroma.LiteralStringOther: "#318495", + chroma.LiteralStringSymbol: "bold #c5060b", + chroma.Text: "#4d4d4d", + chroma.TextWhitespace: "#cbcbcb", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/rrt.go b/vendor/github.com/alecthomas/chroma/styles/rrt.go new file mode 100644 index 00000000..7c76961e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/rrt.go @@ -0,0 +1,18 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Rrt style. +var Rrt = Register(chroma.MustNewStyle("rrt", chroma.StyleEntries{ + chroma.Comment: "#00ff00", + chroma.NameFunction: "#ffff00", + chroma.NameVariable: "#eedd82", + chroma.NameConstant: "#7fffd4", + chroma.Keyword: "#ff0000", + chroma.CommentPreproc: "#e5e5e5", + chroma.LiteralString: "#87ceeb", + chroma.KeywordType: "#ee82ee", + chroma.Background: " bg:#000000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go new file mode 100644 index 00000000..2724df24 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-dark.go @@ -0,0 +1,46 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedDark style. +var SolarizedDark = Register(chroma.MustNewStyle("solarized-dark", chroma.StyleEntries{ + chroma.Keyword: "#719e07", + chroma.KeywordConstant: "#CB4B16", + chroma.KeywordDeclaration: "#268BD2", + chroma.KeywordReserved: "#268BD2", + chroma.KeywordType: "#DC322F", + chroma.NameAttribute: "#93A1A1", + chroma.NameBuiltin: "#B58900", + chroma.NameBuiltinPseudo: "#268BD2", + chroma.NameClass: "#268BD2", + chroma.NameConstant: "#CB4B16", + chroma.NameDecorator: "#268BD2", + chroma.NameEntity: "#CB4B16", + chroma.NameException: "#CB4B16", + chroma.NameFunction: "#268BD2", + chroma.NameTag: "#268BD2", + chroma.NameVariable: "#268BD2", + chroma.LiteralString: "#2AA198", + chroma.LiteralStringBacktick: "#586E75", + chroma.LiteralStringChar: "#2AA198", + chroma.LiteralStringDoc: "#93A1A1", + chroma.LiteralStringEscape: "#CB4B16", + chroma.LiteralStringHeredoc: "#93A1A1", + chroma.LiteralStringRegex: "#DC322F", + chroma.LiteralNumber: "#2AA198", + chroma.Operator: "#719e07", + chroma.Comment: "#586E75", + chroma.CommentPreproc: "#719e07", + chroma.CommentSpecial: "#719e07", + chroma.GenericDeleted: "#DC322F", + chroma.GenericEmph: "italic", + chroma.GenericError: "#DC322F bold", + chroma.GenericHeading: "#CB4B16", + chroma.GenericInserted: "#719e07", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#268BD2", + chroma.Background: "#93A1A1 bg:#002B36", + chroma.Other: "#CB4B16", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go b/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go new file mode 100644 index 00000000..a24ddc15 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-dark256.go @@ -0,0 +1,48 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedDark256 style. +var SolarizedDark256 = Register(chroma.MustNewStyle("solarized-dark256", chroma.StyleEntries{ + chroma.Keyword: "#5f8700", + chroma.KeywordConstant: "#d75f00", + chroma.KeywordDeclaration: "#0087ff", + chroma.KeywordNamespace: "#d75f00", + chroma.KeywordReserved: "#0087ff", + chroma.KeywordType: "#af0000", + chroma.NameAttribute: "#8a8a8a", + chroma.NameBuiltin: "#0087ff", + chroma.NameBuiltinPseudo: "#0087ff", + chroma.NameClass: "#0087ff", + chroma.NameConstant: "#d75f00", + chroma.NameDecorator: "#0087ff", + chroma.NameEntity: "#d75f00", + chroma.NameException: "#af8700", + chroma.NameFunction: "#0087ff", + chroma.NameTag: "#0087ff", + chroma.NameVariable: "#0087ff", + chroma.LiteralString: "#00afaf", + chroma.LiteralStringBacktick: "#4e4e4e", + chroma.LiteralStringChar: "#00afaf", + chroma.LiteralStringDoc: "#00afaf", + chroma.LiteralStringEscape: "#af0000", + chroma.LiteralStringHeredoc: "#00afaf", + chroma.LiteralStringRegex: "#af0000", + chroma.LiteralNumber: "#00afaf", + chroma.Operator: "#8a8a8a", + chroma.OperatorWord: "#5f8700", + chroma.Comment: "#4e4e4e", + chroma.CommentPreproc: "#5f8700", + chroma.CommentSpecial: "#5f8700", + chroma.GenericDeleted: "#af0000", + chroma.GenericEmph: "italic", + chroma.GenericError: "#af0000 bold", + chroma.GenericHeading: "#d75f00", + chroma.GenericInserted: "#5f8700", + chroma.GenericStrong: "bold", + chroma.GenericSubheading: "#0087ff", + chroma.Background: "#8a8a8a bg:#1c1c1c", + chroma.Other: "#d75f00", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/solarized-light.go b/vendor/github.com/alecthomas/chroma/styles/solarized-light.go new file mode 100644 index 00000000..b6d5234a --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/solarized-light.go @@ -0,0 +1,24 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SolarizedLight style. +var SolarizedLight = Register(chroma.MustNewStyle("solarized-light", chroma.StyleEntries{ + chroma.Text: "bg: #eee8d5 #586e75", + chroma.Keyword: "#859900", + chroma.KeywordConstant: "bold", + chroma.KeywordNamespace: "#dc322f bold", + chroma.KeywordType: "bold", + chroma.Name: "#268bd2", + chroma.NameBuiltin: "#cb4b16", + chroma.NameClass: "#cb4b16", + chroma.NameTag: "bold", + chroma.Literal: "#2aa198", + chroma.LiteralNumber: "bold", + chroma.OperatorWord: "#859900", + chroma.Comment: "#93a1a1 italic", + chroma.Generic: "#d33682", + chroma.Background: " bg:#eee8d5", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/swapoff.go b/vendor/github.com/alecthomas/chroma/styles/swapoff.go new file mode 100644 index 00000000..09713783 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/swapoff.go @@ -0,0 +1,25 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// SwapOff theme. +var SwapOff = Register(chroma.MustNewStyle("swapoff", map[chroma.TokenType]string{ + chroma.Background: "#lightgray bg:#black", + chroma.Number: "bold #ansiyellow", + chroma.Comment: "#ansiteal", + chroma.CommentPreproc: "bold #ansigreen", + chroma.String: "bold #ansiturquoise", + chroma.Keyword: "bold #ansiwhite", + chroma.NameKeyword: "bold #ansiwhite", + chroma.NameBuiltin: "bold #ansiwhite", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericStrong: "bold", + chroma.GenericUnderline: "underline", + chroma.NameTag: "bold", + chroma.NameAttribute: "#ansiteal", + chroma.Error: "#ansired", + chroma.LiteralDate: "bold #ansiyellow", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/tango.go b/vendor/github.com/alecthomas/chroma/styles/tango.go new file mode 100644 index 00000000..ae1afe06 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/tango.go @@ -0,0 +1,79 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Tango style. +var Tango = Register(chroma.MustNewStyle("tango", chroma.StyleEntries{ + chroma.TextWhitespace: "underline #f8f8f8", + chroma.Error: "#a40000 border:#ef2929", + chroma.Other: "#000000", + chroma.Comment: "italic #8f5902", + chroma.CommentMultiline: "italic #8f5902", + chroma.CommentPreproc: "italic #8f5902", + chroma.CommentSingle: "italic #8f5902", + chroma.CommentSpecial: "italic #8f5902", + chroma.Keyword: "bold #204a87", + chroma.KeywordConstant: "bold #204a87", + chroma.KeywordDeclaration: "bold #204a87", + chroma.KeywordNamespace: "bold #204a87", + chroma.KeywordPseudo: "bold #204a87", + chroma.KeywordReserved: "bold #204a87", + chroma.KeywordType: "bold #204a87", + chroma.Operator: "bold #ce5c00", + chroma.OperatorWord: "bold #204a87", + chroma.Punctuation: "bold #000000", + chroma.Name: "#000000", + chroma.NameAttribute: "#c4a000", + chroma.NameBuiltin: "#204a87", + chroma.NameBuiltinPseudo: "#3465a4", + chroma.NameClass: "#000000", + chroma.NameConstant: "#000000", + chroma.NameDecorator: "bold #5c35cc", + chroma.NameEntity: "#ce5c00", + chroma.NameException: "bold #cc0000", + chroma.NameFunction: "#000000", + chroma.NameProperty: "#000000", + chroma.NameLabel: "#f57900", + chroma.NameNamespace: "#000000", + chroma.NameOther: "#000000", + chroma.NameTag: "bold #204a87", + chroma.NameVariable: "#000000", + chroma.NameVariableClass: "#000000", + chroma.NameVariableGlobal: "#000000", + chroma.NameVariableInstance: "#000000", + chroma.LiteralNumber: "bold #0000cf", + chroma.LiteralNumberFloat: "bold #0000cf", + chroma.LiteralNumberHex: "bold #0000cf", + chroma.LiteralNumberInteger: "bold #0000cf", + chroma.LiteralNumberIntegerLong: "bold #0000cf", + chroma.LiteralNumberOct: "bold #0000cf", + chroma.Literal: "#000000", + chroma.LiteralDate: "#000000", + chroma.LiteralString: "#4e9a06", + chroma.LiteralStringBacktick: "#4e9a06", + chroma.LiteralStringChar: "#4e9a06", + chroma.LiteralStringDoc: "italic #8f5902", + chroma.LiteralStringDouble: "#4e9a06", + chroma.LiteralStringEscape: "#4e9a06", + chroma.LiteralStringHeredoc: "#4e9a06", + chroma.LiteralStringInterpol: "#4e9a06", + chroma.LiteralStringOther: "#4e9a06", + chroma.LiteralStringRegex: "#4e9a06", + chroma.LiteralStringSingle: "#4e9a06", + chroma.LiteralStringSymbol: "#4e9a06", + chroma.Generic: "#000000", + chroma.GenericDeleted: "#a40000", + chroma.GenericEmph: "italic #000000", + chroma.GenericError: "#ef2929", + chroma.GenericHeading: "bold #000080", + chroma.GenericInserted: "#00A000", + chroma.GenericOutput: "italic #000000", + chroma.GenericPrompt: "#8f5902", + chroma.GenericStrong: "bold #000000", + chroma.GenericSubheading: "bold #800080", + chroma.GenericTraceback: "bold #a40000", + chroma.GenericUnderline: "underline", + chroma.Background: " bg:#f8f8f8", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/trac.go b/vendor/github.com/alecthomas/chroma/styles/trac.go new file mode 100644 index 00000000..3b09c44e --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/trac.go @@ -0,0 +1,42 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Trac style. +var Trac = Register(chroma.MustNewStyle("trac", chroma.StyleEntries{ + chroma.TextWhitespace: "#bbbbbb", + chroma.Comment: "italic #999988", + chroma.CommentPreproc: "bold noitalic #999999", + chroma.CommentSpecial: "bold #999999", + chroma.Operator: "bold", + chroma.LiteralString: "#bb8844", + chroma.LiteralStringRegex: "#808000", + chroma.LiteralNumber: "#009999", + chroma.Keyword: "bold", + chroma.KeywordType: "#445588", + chroma.NameBuiltin: "#999999", + chroma.NameFunction: "bold #990000", + chroma.NameClass: "bold #445588", + chroma.NameException: "bold #990000", + chroma.NameNamespace: "#555555", + chroma.NameVariable: "#008080", + chroma.NameConstant: "#008080", + chroma.NameTag: "#000080", + chroma.NameAttribute: "#008080", + chroma.NameEntity: "#800080", + chroma.GenericHeading: "#999999", + chroma.GenericSubheading: "#aaaaaa", + chroma.GenericDeleted: "bg:#ffdddd #000000", + chroma.GenericInserted: "bg:#ddffdd #000000", + chroma.GenericError: "#aa0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "#555555", + chroma.GenericOutput: "#888888", + chroma.GenericTraceback: "#aa0000", + chroma.GenericUnderline: "underline", + chroma.Error: "bg:#e3d2d2 #a61717", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vim.go b/vendor/github.com/alecthomas/chroma/styles/vim.go new file mode 100644 index 00000000..6296042c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/vim.go @@ -0,0 +1,36 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Vim style. +var Vim = Register(chroma.MustNewStyle("vim", chroma.StyleEntries{ + chroma.Background: "#cccccc bg:#000000", + chroma.Comment: "#000080", + chroma.CommentSpecial: "bold #cd0000", + chroma.Keyword: "#cdcd00", + chroma.KeywordDeclaration: "#00cd00", + chroma.KeywordNamespace: "#cd00cd", + chroma.KeywordType: "#00cd00", + chroma.Operator: "#3399cc", + chroma.OperatorWord: "#cdcd00", + chroma.NameClass: "#00cdcd", + chroma.NameBuiltin: "#cd00cd", + chroma.NameException: "bold #666699", + chroma.NameVariable: "#00cdcd", + chroma.LiteralString: "#cd0000", + chroma.LiteralNumber: "#cd00cd", + chroma.GenericHeading: "bold #000080", + chroma.GenericSubheading: "bold #800080", + chroma.GenericDeleted: "#cd0000", + chroma.GenericInserted: "#00cd00", + chroma.GenericError: "#FF0000", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold #000080", + chroma.GenericOutput: "#888", + chroma.GenericTraceback: "#04D", + chroma.GenericUnderline: "underline", + chroma.Error: "border:#FF0000", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/vs.go b/vendor/github.com/alecthomas/chroma/styles/vs.go new file mode 100644 index 00000000..acbcb915 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/vs.go @@ -0,0 +1,23 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// VisualStudio style. +var VisualStudio = Register(chroma.MustNewStyle("vs", chroma.StyleEntries{ + chroma.Comment: "#008000", + chroma.CommentPreproc: "#0000ff", + chroma.Keyword: "#0000ff", + chroma.OperatorWord: "#0000ff", + chroma.KeywordType: "#2b91af", + chroma.NameClass: "#2b91af", + chroma.LiteralString: "#a31515", + chroma.GenericHeading: "bold", + chroma.GenericSubheading: "bold", + chroma.GenericEmph: "italic", + chroma.GenericStrong: "bold", + chroma.GenericPrompt: "bold", + chroma.Error: "border:#FF0000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/styles/xcode.go b/vendor/github.com/alecthomas/chroma/styles/xcode.go new file mode 100644 index 00000000..115cf71c --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/styles/xcode.go @@ -0,0 +1,29 @@ +package styles + +import ( + "github.com/alecthomas/chroma" +) + +// Xcode style. +var Xcode = Register(chroma.MustNewStyle("xcode", chroma.StyleEntries{ + chroma.Comment: "#177500", + chroma.CommentPreproc: "#633820", + chroma.LiteralString: "#C41A16", + chroma.LiteralStringChar: "#2300CE", + chroma.Operator: "#000000", + chroma.Keyword: "#A90D91", + chroma.Name: "#000000", + chroma.NameAttribute: "#836C28", + chroma.NameClass: "#3F6E75", + chroma.NameFunction: "#000000", + chroma.NameBuiltin: "#A90D91", + chroma.NameBuiltinPseudo: "#5B269A", + chroma.NameVariable: "#000000", + chroma.NameTag: "#000000", + chroma.NameDecorator: "#000000", + chroma.NameLabel: "#000000", + chroma.Literal: "#1C01CE", + chroma.LiteralNumber: "#1C01CE", + chroma.Error: "#000000", + chroma.Background: " bg:#ffffff", +})) diff --git a/vendor/github.com/alecthomas/chroma/tokentype_string.go b/vendor/github.com/alecthomas/chroma/tokentype_string.go new file mode 100644 index 00000000..44135dc9 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/tokentype_string.go @@ -0,0 +1,111 @@ +// Code generated by "stringer -type TokenType"; DO NOT EDIT. + +package chroma + +import "fmt" + +const _TokenType_name = "NoneOtherErrorLineTableTDLineTableLineHighlightLineNumbersTableLineNumbersBackgroundKeywordKeywordConstantKeywordDeclarationKeywordNamespaceKeywordPseudoKeywordReservedKeywordTypeNameNameAttributeNameBuiltinNameBuiltinPseudoNameClassNameConstantNameDecoratorNameEntityNameExceptionNameFunctionNameFunctionMagicNameKeywordNameLabelNameNamespaceNameOperatorNameOtherNamePseudoNamePropertyNameTagNameVariableNameVariableAnonymousNameVariableClassNameVariableGlobalNameVariableInstanceNameVariableMagicLiteralLiteralDateLiteralOtherLiteralStringLiteralStringAffixLiteralStringAtomLiteralStringBacktickLiteralStringBooleanLiteralStringCharLiteralStringDelimiterLiteralStringDocLiteralStringDoubleLiteralStringEscapeLiteralStringHeredocLiteralStringInterpolLiteralStringNameLiteralStringOtherLiteralStringRegexLiteralStringSingleLiteralStringSymbolLiteralNumberLiteralNumberBinLiteralNumberFloatLiteralNumberHexLiteralNumberIntegerLiteralNumberIntegerLongLiteralNumberOctOperatorOperatorWordPunctuationCommentCommentHashbangCommentMultilineCommentSingleCommentSpecialCommentPreprocCommentPreprocFileGenericGenericDeletedGenericEmphGenericErrorGenericHeadingGenericInsertedGenericOutputGenericPromptGenericStrongGenericSubheadingGenericTracebackGenericUnderlineTextTextWhitespaceTextSymbolTextPunctuation" + +var _TokenType_map = map[TokenType]string{ + -9: _TokenType_name[0:4], + -8: _TokenType_name[4:9], + -7: _TokenType_name[9:14], + -6: _TokenType_name[14:25], + -5: _TokenType_name[25:34], + -4: _TokenType_name[34:47], + -3: _TokenType_name[47:63], + -2: _TokenType_name[63:74], + -1: _TokenType_name[74:84], + 1000: _TokenType_name[84:91], + 1001: _TokenType_name[91:106], + 1002: _TokenType_name[106:124], + 1003: _TokenType_name[124:140], + 1004: _TokenType_name[140:153], + 1005: _TokenType_name[153:168], + 1006: _TokenType_name[168:179], + 2000: _TokenType_name[179:183], + 2001: _TokenType_name[183:196], + 2002: _TokenType_name[196:207], + 2003: _TokenType_name[207:224], + 2004: _TokenType_name[224:233], + 2005: _TokenType_name[233:245], + 2006: _TokenType_name[245:258], + 2007: _TokenType_name[258:268], + 2008: _TokenType_name[268:281], + 2009: _TokenType_name[281:293], + 2010: _TokenType_name[293:310], + 2011: _TokenType_name[310:321], + 2012: _TokenType_name[321:330], + 2013: _TokenType_name[330:343], + 2014: _TokenType_name[343:355], + 2015: _TokenType_name[355:364], + 2016: _TokenType_name[364:374], + 2017: _TokenType_name[374:386], + 2018: _TokenType_name[386:393], + 2019: _TokenType_name[393:405], + 2020: _TokenType_name[405:426], + 2021: _TokenType_name[426:443], + 2022: _TokenType_name[443:461], + 2023: _TokenType_name[461:481], + 2024: _TokenType_name[481:498], + 3000: _TokenType_name[498:505], + 3001: _TokenType_name[505:516], + 3002: _TokenType_name[516:528], + 3100: _TokenType_name[528:541], + 3101: _TokenType_name[541:559], + 3102: _TokenType_name[559:576], + 3103: _TokenType_name[576:597], + 3104: _TokenType_name[597:617], + 3105: _TokenType_name[617:634], + 3106: _TokenType_name[634:656], + 3107: _TokenType_name[656:672], + 3108: _TokenType_name[672:691], + 3109: _TokenType_name[691:710], + 3110: _TokenType_name[710:730], + 3111: _TokenType_name[730:751], + 3112: _TokenType_name[751:768], + 3113: _TokenType_name[768:786], + 3114: _TokenType_name[786:804], + 3115: _TokenType_name[804:823], + 3116: _TokenType_name[823:842], + 3200: _TokenType_name[842:855], + 3201: _TokenType_name[855:871], + 3202: _TokenType_name[871:889], + 3203: _TokenType_name[889:905], + 3204: _TokenType_name[905:925], + 3205: _TokenType_name[925:949], + 3206: _TokenType_name[949:965], + 4000: _TokenType_name[965:973], + 4001: _TokenType_name[973:985], + 5000: _TokenType_name[985:996], + 6000: _TokenType_name[996:1003], + 6001: _TokenType_name[1003:1018], + 6002: _TokenType_name[1018:1034], + 6003: _TokenType_name[1034:1047], + 6004: _TokenType_name[1047:1061], + 6100: _TokenType_name[1061:1075], + 6101: _TokenType_name[1075:1093], + 7000: _TokenType_name[1093:1100], + 7001: _TokenType_name[1100:1114], + 7002: _TokenType_name[1114:1125], + 7003: _TokenType_name[1125:1137], + 7004: _TokenType_name[1137:1151], + 7005: _TokenType_name[1151:1166], + 7006: _TokenType_name[1166:1179], + 7007: _TokenType_name[1179:1192], + 7008: _TokenType_name[1192:1205], + 7009: _TokenType_name[1205:1222], + 7010: _TokenType_name[1222:1238], + 7011: _TokenType_name[1238:1254], + 8000: _TokenType_name[1254:1258], + 8001: _TokenType_name[1258:1272], + 8002: _TokenType_name[1272:1282], + 8003: _TokenType_name[1282:1297], +} + +func (i TokenType) String() string { + if str, ok := _TokenType_map[i]; ok { + return str + } + return fmt.Sprintf("TokenType(%d)", i) +} diff --git a/vendor/github.com/alecthomas/chroma/types.go b/vendor/github.com/alecthomas/chroma/types.go new file mode 100644 index 00000000..0e4a7506 --- /dev/null +++ b/vendor/github.com/alecthomas/chroma/types.go @@ -0,0 +1,345 @@ +package chroma + +import ( + "encoding/json" + "fmt" +) + +//go:generate stringer -type TokenType + +// TokenType is the type of token to highlight. +// +// It is also an Emitter, emitting a single token of itself +type TokenType int + +func (t *TokenType) MarshalJSON() ([]byte, error) { return json.Marshal(t.String()) } +func (t *TokenType) UnmarshalJSON(data []byte) error { + key := "" + err := json.Unmarshal(data, &key) + if err != nil { + return err + } + for tt, text := range _TokenType_map { + if text == key { + *t = tt + return nil + } + } + return fmt.Errorf("unknown TokenType %q", data) +} + +// Set of TokenTypes. +// +// Categories of types are grouped in ranges of 1000, while sub-categories are in ranges of 100. For +// example, the literal category is in the range 3000-3999. The sub-category for literal strings is +// in the range 3100-3199. + +// Meta token types. +const ( + // Default background style. + Background TokenType = -1 - iota + // Line numbers in output. + LineNumbers + // Line numbers in output when in table. + LineNumbersTable + // Line higlight style. + LineHighlight + // Line numbers table wrapper style. + LineTable + // Line numbers table TD wrapper style. + LineTableTD + // Input that could not be tokenised. + Error + // Other is used by the Delegate lexer to indicate which tokens should be handled by the delegate. + Other + // No highlighting. + None +) + +// Keywords. +const ( + Keyword TokenType = 1000 + iota + KeywordConstant + KeywordDeclaration + KeywordNamespace + KeywordPseudo + KeywordReserved + KeywordType +) + +// Names. +const ( + Name TokenType = 2000 + iota + NameAttribute + NameBuiltin + NameBuiltinPseudo + NameClass + NameConstant + NameDecorator + NameEntity + NameException + NameFunction + NameFunctionMagic + NameKeyword + NameLabel + NameNamespace + NameOperator + NameOther + NamePseudo + NameProperty + NameTag + NameVariable + NameVariableAnonymous + NameVariableClass + NameVariableGlobal + NameVariableInstance + NameVariableMagic +) + +// Literals. +const ( + Literal TokenType = 3000 + iota + LiteralDate + LiteralOther +) + +// Strings. +const ( + LiteralString TokenType = 3100 + iota + LiteralStringAffix + LiteralStringAtom + LiteralStringBacktick + LiteralStringBoolean + LiteralStringChar + LiteralStringDelimiter + LiteralStringDoc + LiteralStringDouble + LiteralStringEscape + LiteralStringHeredoc + LiteralStringInterpol + LiteralStringName + LiteralStringOther + LiteralStringRegex + LiteralStringSingle + LiteralStringSymbol +) + +// Literals. +const ( + LiteralNumber TokenType = 3200 + iota + LiteralNumberBin + LiteralNumberFloat + LiteralNumberHex + LiteralNumberInteger + LiteralNumberIntegerLong + LiteralNumberOct +) + +// Operators. +const ( + Operator TokenType = 4000 + iota + OperatorWord +) + +// Punctuation. +const ( + Punctuation TokenType = 5000 + iota +) + +// Comments. +const ( + Comment TokenType = 6000 + iota + CommentHashbang + CommentMultiline + CommentSingle + CommentSpecial +) + +// Preprocessor "comments". +const ( + CommentPreproc TokenType = 6100 + iota + CommentPreprocFile +) + +// Generic tokens. +const ( + Generic TokenType = 7000 + iota + GenericDeleted + GenericEmph + GenericError + GenericHeading + GenericInserted + GenericOutput + GenericPrompt + GenericStrong + GenericSubheading + GenericTraceback + GenericUnderline +) + +// Text. +const ( + Text TokenType = 8000 + iota + TextWhitespace + TextSymbol + TextPunctuation +) + +// Aliases. +const ( + Whitespace = TextWhitespace + + Date = LiteralDate + + String = LiteralString + StringAffix = LiteralStringAffix + StringBacktick = LiteralStringBacktick + StringChar = LiteralStringChar + StringDelimiter = LiteralStringDelimiter + StringDoc = LiteralStringDoc + StringDouble = LiteralStringDouble + StringEscape = LiteralStringEscape + StringHeredoc = LiteralStringHeredoc + StringInterpol = LiteralStringInterpol + StringOther = LiteralStringOther + StringRegex = LiteralStringRegex + StringSingle = LiteralStringSingle + StringSymbol = LiteralStringSymbol + + Number = LiteralNumber + NumberBin = LiteralNumberBin + NumberFloat = LiteralNumberFloat + NumberHex = LiteralNumberHex + NumberInteger = LiteralNumberInteger + NumberIntegerLong = LiteralNumberIntegerLong + NumberOct = LiteralNumberOct +) + +var ( + StandardTypes = map[TokenType]string{ + Background: "chroma", + LineNumbers: "ln", + LineNumbersTable: "lnt", + LineHighlight: "hl", + LineTable: "lntable", + LineTableTD: "lntd", + Text: "", + Whitespace: "w", + Error: "err", + Other: "x", + // I have no idea what this is used for... + // Escape: "esc", + + Keyword: "k", + KeywordConstant: "kc", + KeywordDeclaration: "kd", + KeywordNamespace: "kn", + KeywordPseudo: "kp", + KeywordReserved: "kr", + KeywordType: "kt", + + Name: "n", + NameAttribute: "na", + NameBuiltin: "nb", + NameBuiltinPseudo: "bp", + NameClass: "nc", + NameConstant: "no", + NameDecorator: "nd", + NameEntity: "ni", + NameException: "ne", + NameFunction: "nf", + NameFunctionMagic: "fm", + NameProperty: "py", + NameLabel: "nl", + NameNamespace: "nn", + NameOther: "nx", + NameTag: "nt", + NameVariable: "nv", + NameVariableClass: "vc", + NameVariableGlobal: "vg", + NameVariableInstance: "vi", + NameVariableMagic: "vm", + + Literal: "l", + LiteralDate: "ld", + + String: "s", + StringAffix: "sa", + StringBacktick: "sb", + StringChar: "sc", + StringDelimiter: "dl", + StringDoc: "sd", + StringDouble: "s2", + StringEscape: "se", + StringHeredoc: "sh", + StringInterpol: "si", + StringOther: "sx", + StringRegex: "sr", + StringSingle: "s1", + StringSymbol: "ss", + + Number: "m", + NumberBin: "mb", + NumberFloat: "mf", + NumberHex: "mh", + NumberInteger: "mi", + NumberIntegerLong: "il", + NumberOct: "mo", + + Operator: "o", + OperatorWord: "ow", + + Punctuation: "p", + + Comment: "c", + CommentHashbang: "ch", + CommentMultiline: "cm", + CommentPreproc: "cp", + CommentPreprocFile: "cpf", + CommentSingle: "c1", + CommentSpecial: "cs", + + Generic: "g", + GenericDeleted: "gd", + GenericEmph: "ge", + GenericError: "gr", + GenericHeading: "gh", + GenericInserted: "gi", + GenericOutput: "go", + GenericPrompt: "gp", + GenericStrong: "gs", + GenericSubheading: "gu", + GenericTraceback: "gt", + GenericUnderline: "gl", + } +) + +func (t TokenType) Parent() TokenType { + if t%100 != 0 { + return t / 100 * 100 + } + if t%1000 != 0 { + return t / 1000 * 1000 + } + return 0 +} + +func (t TokenType) Category() TokenType { + return t / 1000 * 1000 +} + +func (t TokenType) SubCategory() TokenType { + return t / 100 * 100 +} + +func (t TokenType) InCategory(other TokenType) bool { + return t/1000 == other/1000 +} + +func (t TokenType) InSubCategory(other TokenType) bool { + return t/100 == other/100 +} + +func (t TokenType) Emit(groups []string, lexer Lexer) Iterator { + return Literator(&Token{Type: t, Value: groups[0]}) +} diff --git a/vendor/github.com/andygrunwald/go-gerrit/changes.go b/vendor/github.com/andygrunwald/go-gerrit/changes.go index 301819f5..2a13b8f4 100644 --- a/vendor/github.com/andygrunwald/go-gerrit/changes.go +++ b/vendor/github.com/andygrunwald/go-gerrit/changes.go @@ -207,15 +207,16 @@ type ReviewerInput struct { // ReviewInput entity contains information for adding a review to a revision. type ReviewInput struct { - Message string `json:"message,omitempty"` - Tag string `json:"tag,omitempty"` - Labels map[string]string `json:"labels,omitempty"` - Comments map[string][]CommentInput `json:"comments,omitempty"` - StrictLabels bool `json:"strict_labels,omitempty"` - Drafts string `json:"drafts,omitempty"` - Notify string `json:"notify,omitempty"` - OmitDuplicateComments bool `json:"omit_duplicate_comments,omitempty"` - OnBehalfOf string `json:"on_behalf_of,omitempty"` + Message string `json:"message,omitempty"` + Tag string `json:"tag,omitempty"` + Labels map[string]string `json:"labels,omitempty"` + Comments map[string][]CommentInput `json:"comments,omitempty"` + RobotComments map[string][]RobotCommentInput `json:"robot_comments,omitempty"` + StrictLabels bool `json:"strict_labels,omitempty"` + Drafts string `json:"drafts,omitempty"` + Notify string `json:"notify,omitempty"` + OmitDuplicateComments bool `json:"omit_duplicate_comments,omitempty"` + OnBehalfOf string `json:"on_behalf_of,omitempty"` } // RelatedChangeAndCommitInfo entity contains information about a related change and commit. @@ -251,6 +252,74 @@ type CommentInput struct { Message string `json:"message,omitempty"` } +// RobotCommentInput entity contains information for creating an inline robot comment. +// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#robot-comment-input +type RobotCommentInput struct { + CommentInput + + // The ID of the robot that generated this comment. + RobotId string `json:"robot_id"` + // An ID of the run of the robot. + RobotRunId string `json:"robot_run_id"` + // URL to more information. + Url string `json:"url,omitempty"` + // Robot specific properties as map that maps arbitrary keys to values. + Properties *map[string]*string `json:"properties,omitempty"` + // Suggested fixes for this robot comment as a list of FixSuggestionInfo + // entities. + FixSuggestions *FixSuggestionInfo `json:"fix_suggestions,omitempty"` +} + +// RobotCommentInfo entity contains information about a robot inline comment +// RobotCommentInfo has the same fields as CommentInfo. In addition RobotCommentInfo has the following fields: +// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#robot-comment-info +type RobotCommentInfo struct { + CommentInfo + + // The ID of the robot that generated this comment. + RobotId string `json:"robot_id"` + // An ID of the run of the robot. + RobotRunId string `json:"robot_run_id"` + // URL to more information. + Url string `json:"url,omitempty"` + // Robot specific properties as map that maps arbitrary keys to values. + Properties map[string]string `json:"properties,omitempty"` + // Suggested fixes for this robot comment as a list of FixSuggestionInfo + // entities. + FixSuggestions *FixSuggestionInfo `json:"fix_suggestions,omitempty"` +} + +// FixSuggestionInfo entity represents a suggested fix. +// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#fix-suggestion-info +type FixSuggestionInfo struct { + // The UUID of the suggested fix. It will be generated automatically and + // hence will be ignored if it’s set for input objects. + FixId string `json:"fix_id"` + // A description of the suggested fix. + Description string `json:"description"` + // A list of FixReplacementInfo entities indicating how the content of one or + // several files should be modified. Within a file, they should refer to + // non-overlapping regions. + Replacements FixReplacementInfo `json:"replacements"` +} + +// FixReplacementInfo entity describes how the content of a file should be replaced by another content. +// https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#fix-replacement-info +type FixReplacementInfo struct { + // The path of the file which should be modified. Any file in the repository may be modified. + Path string `json:"path"` + + // A CommentRange indicating which content of the file should be replaced. + // Lines in the file are assumed to be separated by the line feed character, + // the carriage return character, the carriage return followed by the line + // feed character, or one of the other Unicode linebreak sequences supported + // by Java. + Range CommentRange `json:"range"` + + // The content which should be used instead of the current one. + Replacement string `json:"replacement,omitempty"` +} + // DiffIntralineInfo entity contains information about intraline edits in a file. // // The information consists of a list of pairs, @@ -332,16 +401,16 @@ type RevisionInfo struct { // CommentInfo entity contains information about an inline comment. type CommentInfo struct { - PatchSet int `json:"patch_set,omitempty"` - ID string `json:"id"` - Path string `json:"path,omitempty"` - Side string `json:"side,omitempty"` - Line int `json:"line,omitempty"` - Range CommentRange `json:"range,omitempty"` - InReplyTo string `json:"in_reply_to,omitempty"` - Message string `json:"message,omitempty"` - Updated Timestamp `json:"updated"` - Author AccountInfo `json:"author,omitempty"` + PatchSet int `json:"patch_set,omitempty"` + ID string `json:"id"` + Path string `json:"path,omitempty"` + Side string `json:"side,omitempty"` + Line int `json:"line,omitempty"` + Range *CommentRange `json:"range,omitempty"` + InReplyTo string `json:"in_reply_to,omitempty"` + Message string `json:"message,omitempty"` + Updated *Timestamp `json:"updated"` + Author AccountInfo `json:"author,omitempty"` } // QueryOptions specifies global parameters to query changes / reviewers. diff --git a/vendor/github.com/andygrunwald/go-gerrit/gometalinter.json b/vendor/github.com/andygrunwald/go-gerrit/gometalinter.json index 0f1c951a..e4db6045 100644 --- a/vendor/github.com/andygrunwald/go-gerrit/gometalinter.json +++ b/vendor/github.com/andygrunwald/go-gerrit/gometalinter.json @@ -12,7 +12,7 @@ "goconst", "errcheck", "interfacer", - "gas", + "gosec", "unconvert", "testify", "unparam", diff --git a/vendor/github.com/briandowns/openweathermap/.gitignore b/vendor/github.com/briandowns/openweathermap/.gitignore index 850006e4..0b1d88a8 100644 --- a/vendor/github.com/briandowns/openweathermap/.gitignore +++ b/vendor/github.com/briandowns/openweathermap/.gitignore @@ -25,6 +25,7 @@ _testmain.go *.test *.prof +.vscode .idea *.iml diff --git a/vendor/github.com/danwakefield/fnmatch/.gitignore b/vendor/github.com/danwakefield/fnmatch/.gitignore new file mode 100644 index 00000000..daf913b1 --- /dev/null +++ b/vendor/github.com/danwakefield/fnmatch/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/danwakefield/fnmatch/LICENSE b/vendor/github.com/danwakefield/fnmatch/LICENSE new file mode 100644 index 00000000..0dc9851a --- /dev/null +++ b/vendor/github.com/danwakefield/fnmatch/LICENSE @@ -0,0 +1,23 @@ +Copyright (c) 2016, Daniel Wakefield +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/danwakefield/fnmatch/README.md b/vendor/github.com/danwakefield/fnmatch/README.md new file mode 100644 index 00000000..b8d71566 --- /dev/null +++ b/vendor/github.com/danwakefield/fnmatch/README.md @@ -0,0 +1,4 @@ +# fnmatch +Updated clone of kballards golang fnmatch gist (https://gist.github.com/kballard/272720) + + diff --git a/vendor/github.com/danwakefield/fnmatch/fnmatch.go b/vendor/github.com/danwakefield/fnmatch/fnmatch.go new file mode 100644 index 00000000..07ac7b37 --- /dev/null +++ b/vendor/github.com/danwakefield/fnmatch/fnmatch.go @@ -0,0 +1,219 @@ +// Provide string-matching based on fnmatch.3 +package fnmatch + +// There are a few issues that I believe to be bugs, but this implementation is +// based as closely as possible on BSD fnmatch. These bugs are present in the +// source of BSD fnmatch, and so are replicated here. The issues are as follows: +// +// * FNM_PERIOD is no longer observed after the first * in a pattern +// This only applies to matches done with FNM_PATHNAME as well +// * FNM_PERIOD doesn't apply to ranges. According to the documentation, +// a period must be matched explicitly, but a range will match it too + +import ( + "unicode" + "unicode/utf8" +) + +const ( + FNM_NOESCAPE = (1 << iota) + FNM_PATHNAME + FNM_PERIOD + + FNM_LEADING_DIR + FNM_CASEFOLD + + FNM_IGNORECASE = FNM_CASEFOLD + FNM_FILE_NAME = FNM_PATHNAME +) + +func unpackRune(str *string) rune { + rune, size := utf8.DecodeRuneInString(*str) + *str = (*str)[size:] + return rune +} + +// Matches the pattern against the string, with the given flags, +// and returns true if the match is successful. +// This function should match fnmatch.3 as closely as possible. +func Match(pattern, s string, flags int) bool { + // The implementation for this function was patterned after the BSD fnmatch.c + // source found at http://src.gnu-darwin.org/src/contrib/csup/fnmatch.c.html + noescape := (flags&FNM_NOESCAPE != 0) + pathname := (flags&FNM_PATHNAME != 0) + period := (flags&FNM_PERIOD != 0) + leadingdir := (flags&FNM_LEADING_DIR != 0) + casefold := (flags&FNM_CASEFOLD != 0) + // the following is some bookkeeping that the original fnmatch.c implementation did not do + // We are forced to do this because we're not keeping indexes into C strings but rather + // processing utf8-encoded strings. Use a custom unpacker to maintain our state for us + sAtStart := true + sLastAtStart := true + sLastSlash := false + sLastUnpacked := rune(0) + unpackS := func() rune { + sLastSlash = (sLastUnpacked == '/') + sLastUnpacked = unpackRune(&s) + sLastAtStart = sAtStart + sAtStart = false + return sLastUnpacked + } + for len(pattern) > 0 { + c := unpackRune(&pattern) + switch c { + case '?': + if len(s) == 0 { + return false + } + sc := unpackS() + if pathname && sc == '/' { + return false + } + if period && sc == '.' && (sLastAtStart || (pathname && sLastSlash)) { + return false + } + case '*': + // collapse multiple *'s + // don't use unpackRune here, the only char we care to detect is ASCII + for len(pattern) > 0 && pattern[0] == '*' { + pattern = pattern[1:] + } + if period && s[0] == '.' && (sAtStart || (pathname && sLastUnpacked == '/')) { + return false + } + // optimize for patterns with * at end or before / + if len(pattern) == 0 { + if pathname { + return leadingdir || (strchr(s, '/') == -1) + } else { + return true + } + return !(pathname && strchr(s, '/') >= 0) + } else if pathname && pattern[0] == '/' { + offset := strchr(s, '/') + if offset == -1 { + return false + } else { + // we already know our pattern and string have a /, skip past it + s = s[offset:] // use unpackS here to maintain our bookkeeping state + unpackS() + pattern = pattern[1:] // we know / is one byte long + break + } + } + // general case, recurse + for test := s; len(test) > 0; unpackRune(&test) { + // I believe the (flags &^ FNM_PERIOD) is a bug when FNM_PATHNAME is specified + // but this follows exactly from how fnmatch.c implements it + if Match(pattern, test, (flags &^ FNM_PERIOD)) { + return true + } else if pathname && test[0] == '/' { + break + } + } + return false + case '[': + if len(s) == 0 { + return false + } + if pathname && s[0] == '/' { + return false + } + sc := unpackS() + if !rangematch(&pattern, sc, flags) { + return false + } + case '\\': + if !noescape { + if len(pattern) > 0 { + c = unpackRune(&pattern) + } + } + fallthrough + default: + if len(s) == 0 { + return false + } + sc := unpackS() + switch { + case sc == c: + case casefold && unicode.ToLower(sc) == unicode.ToLower(c): + default: + return false + } + } + } + return len(s) == 0 || (leadingdir && s[0] == '/') +} + +func rangematch(pattern *string, test rune, flags int) bool { + if len(*pattern) == 0 { + return false + } + casefold := (flags&FNM_CASEFOLD != 0) + noescape := (flags&FNM_NOESCAPE != 0) + if casefold { + test = unicode.ToLower(test) + } + var negate, matched bool + if (*pattern)[0] == '^' || (*pattern)[0] == '!' { + negate = true + (*pattern) = (*pattern)[1:] + } + for !matched && len(*pattern) > 1 && (*pattern)[0] != ']' { + c := unpackRune(pattern) + if !noescape && c == '\\' { + if len(*pattern) > 1 { + c = unpackRune(pattern) + } else { + return false + } + } + if casefold { + c = unicode.ToLower(c) + } + if (*pattern)[0] == '-' && len(*pattern) > 1 && (*pattern)[1] != ']' { + unpackRune(pattern) // skip the - + c2 := unpackRune(pattern) + if !noescape && c2 == '\\' { + if len(*pattern) > 0 { + c2 = unpackRune(pattern) + } else { + return false + } + } + if casefold { + c2 = unicode.ToLower(c2) + } + // this really should be more intelligent, but it looks like + // fnmatch.c does simple int comparisons, therefore we will as well + if c <= test && test <= c2 { + matched = true + } + } else if c == test { + matched = true + } + } + // skip past the rest of the pattern + ok := false + for !ok && len(*pattern) > 0 { + c := unpackRune(pattern) + if c == '\\' && len(*pattern) > 0 { + unpackRune(pattern) + } else if c == ']' { + ok = true + } + } + return ok && matched != negate +} + +// define strchr because strings.Index() seems a bit overkill +// returns the index of c in s, or -1 if there is no match +func strchr(s string, c rune) int { + for i, sc := range s { + if sc == c { + return i + } + } + return -1 +} diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE index c8364161..bc52e96f 100644 --- a/vendor/github.com/davecgh/go-spew/LICENSE +++ b/vendor/github.com/davecgh/go-spew/LICENSE @@ -2,7 +2,7 @@ ISC License Copyright (c) 2012-2016 Dave Collins -Permission to use, copy, modify, and distribute this software for any +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go index 8a4a6589..79299478 100644 --- a/vendor/github.com/davecgh/go-spew/spew/bypass.go +++ b/vendor/github.com/davecgh/go-spew/spew/bypass.go @@ -16,7 +16,9 @@ // when the code is not running on Google App Engine, compiled by GopherJS, and // "-tags safe" is not added to the go build command line. The "disableunsafe" // tag is deprecated and thus should not be used. -// +build !js,!appengine,!safe,!disableunsafe +// Go versions prior to 1.4 are disabled because they use a different layout +// for interfaces which make the implementation of unsafeReflectValue more complex. +// +build !js,!appengine,!safe,!disableunsafe,go1.4 package spew @@ -34,80 +36,49 @@ const ( ptrSize = unsafe.Sizeof((*byte)(nil)) ) -var ( - // offsetPtr, offsetScalar, and offsetFlag are the offsets for the - // internal reflect.Value fields. These values are valid before golang - // commit ecccf07e7f9d which changed the format. The are also valid - // after commit 82f48826c6c7 which changed the format again to mirror - // the original format. Code in the init function updates these offsets - // as necessary. - offsetPtr = uintptr(ptrSize) - offsetScalar = uintptr(0) - offsetFlag = uintptr(ptrSize * 2) +type flag uintptr - // flagKindWidth and flagKindShift indicate various bits that the - // reflect package uses internally to track kind information. - // - // flagRO indicates whether or not the value field of a reflect.Value is - // read-only. - // - // flagIndir indicates whether the value field of a reflect.Value is - // the actual data or a pointer to the data. - // - // These values are valid before golang commit 90a7c3c86944 which - // changed their positions. Code in the init function updates these - // flags as necessary. - flagKindWidth = uintptr(5) - flagKindShift = uintptr(flagKindWidth - 1) - flagRO = uintptr(1 << 0) - flagIndir = uintptr(1 << 1) +var ( + // flagRO indicates whether the value field of a reflect.Value + // is read-only. + flagRO flag + + // flagAddr indicates whether the address of the reflect.Value's + // value may be taken. + flagAddr flag ) -func init() { - // Older versions of reflect.Value stored small integers directly in the - // ptr field (which is named val in the older versions). Versions - // between commits ecccf07e7f9d and 82f48826c6c7 added a new field named - // scalar for this purpose which unfortunately came before the flag - // field, so the offset of the flag field is different for those - // versions. - // - // This code constructs a new reflect.Value from a known small integer - // and checks if the size of the reflect.Value struct indicates it has - // the scalar field. When it does, the offsets are updated accordingly. - vv := reflect.ValueOf(0xf00) - if unsafe.Sizeof(vv) == (ptrSize * 4) { - offsetScalar = ptrSize * 2 - offsetFlag = ptrSize * 3 - } +// flagKindMask holds the bits that make up the kind +// part of the flags field. In all the supported versions, +// it is in the lower 5 bits. +const flagKindMask = flag(0x1f) - // Commit 90a7c3c86944 changed the flag positions such that the low - // order bits are the kind. This code extracts the kind from the flags - // field and ensures it's the correct type. When it's not, the flag - // order has been changed to the newer format, so the flags are updated - // accordingly. - upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag) - upfv := *(*uintptr)(upf) - flagKindMask := uintptr((1<>flagKindShift != uintptr(reflect.Int) { - flagKindShift = 0 - flagRO = 1 << 5 - flagIndir = 1 << 6 +// Different versions of Go have used different +// bit layouts for the flags type. This table +// records the known combinations. +var okFlags = []struct { + ro, addr flag +}{{ + // From Go 1.4 to 1.5 + ro: 1 << 5, + addr: 1 << 7, +}, { + // Up to Go tip. + ro: 1<<5 | 1<<6, + addr: 1 << 8, +}} - // Commit adf9b30e5594 modified the flags to separate the - // flagRO flag into two bits which specifies whether or not the - // field is embedded. This causes flagIndir to move over a bit - // and means that flagRO is the combination of either of the - // original flagRO bit and the new bit. - // - // This code detects the change by extracting what used to be - // the indirect bit to ensure it's set. When it's not, the flag - // order has been changed to the newer format, so the flags are - // updated accordingly. - if upfv&flagIndir == 0 { - flagRO = 3 << 5 - flagIndir = 1 << 7 - } +var flagValOffset = func() uintptr { + field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") + if !ok { + panic("reflect.Value has no flag field") } + return field.Offset +}() + +// flagField returns a pointer to the flag field of a reflect.Value. +func flagField(v *reflect.Value) *flag { + return (*flag)(unsafe.Pointer(uintptr(unsafe.Pointer(v)) + flagValOffset)) } // unsafeReflectValue converts the passed reflect.Value into a one that bypasses @@ -119,34 +90,56 @@ func init() { // This allows us to check for implementations of the Stringer and error // interfaces to be used for pretty printing ordinarily unaddressable and // inaccessible values such as unexported struct fields. -func unsafeReflectValue(v reflect.Value) (rv reflect.Value) { - indirects := 1 - vt := v.Type() - upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr) - rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag)) - if rvf&flagIndir != 0 { - vt = reflect.PtrTo(v.Type()) - indirects++ - } else if offsetScalar != 0 { - // The value is in the scalar field when it's not one of the - // reference types. - switch vt.Kind() { - case reflect.Uintptr: - case reflect.Chan: - case reflect.Func: - case reflect.Map: - case reflect.Ptr: - case reflect.UnsafePointer: - default: - upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + - offsetScalar) +func unsafeReflectValue(v reflect.Value) reflect.Value { + if !v.IsValid() || (v.CanInterface() && v.CanAddr()) { + return v + } + flagFieldPtr := flagField(&v) + *flagFieldPtr &^= flagRO + *flagFieldPtr |= flagAddr + return v +} + +// Sanity checks against future reflect package changes +// to the type or semantics of the Value.flag field. +func init() { + field, ok := reflect.TypeOf(reflect.Value{}).FieldByName("flag") + if !ok { + panic("reflect.Value has no flag field") + } + if field.Type.Kind() != reflect.TypeOf(flag(0)).Kind() { + panic("reflect.Value flag field has changed kind") + } + type t0 int + var t struct { + A t0 + // t0 will have flagEmbedRO set. + t0 + // a will have flagStickyRO set + a t0 + } + vA := reflect.ValueOf(t).FieldByName("A") + va := reflect.ValueOf(t).FieldByName("a") + vt0 := reflect.ValueOf(t).FieldByName("t0") + + // Infer flagRO from the difference between the flags + // for the (otherwise identical) fields in t. + flagPublic := *flagField(&vA) + flagWithRO := *flagField(&va) | *flagField(&vt0) + flagRO = flagPublic ^ flagWithRO + + // Infer flagAddr from the difference between a value + // taken from a pointer and not. + vPtrA := reflect.ValueOf(&t).Elem().FieldByName("A") + flagNoPtr := *flagField(&vA) + flagPtr := *flagField(&vPtrA) + flagAddr = flagNoPtr ^ flagPtr + + // Check that the inferred flags tally with one of the known versions. + for _, f := range okFlags { + if flagRO == f.ro && flagAddr == f.addr { + return } } - - pv := reflect.NewAt(vt, upv) - rv = pv - for i := 0; i < indirects; i++ { - rv = rv.Elem() - } - return rv + panic("reflect.Value read-only flag has changed semantics") } diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go index 1fe3cf3d..205c28d6 100644 --- a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go +++ b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go @@ -16,7 +16,7 @@ // when the code is running on Google App Engine, compiled by GopherJS, or // "-tags safe" is added to the go build command line. The "disableunsafe" // tag is deprecated and thus should not be used. -// +build js appengine safe disableunsafe +// +build js appengine safe disableunsafe !go1.4 package spew diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go index 7c519ff4..1be8ce94 100644 --- a/vendor/github.com/davecgh/go-spew/spew/common.go +++ b/vendor/github.com/davecgh/go-spew/spew/common.go @@ -180,7 +180,7 @@ func printComplex(w io.Writer, c complex128, floatPrecision int) { w.Write(closeParenBytes) } -// printHexPtr outputs a uintptr formatted as hexidecimal with a leading '0x' +// printHexPtr outputs a uintptr formatted as hexadecimal with a leading '0x' // prefix to Writer w. func printHexPtr(w io.Writer, p uintptr) { // Null pointer. diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go index df1d582a..f78d89fc 100644 --- a/vendor/github.com/davecgh/go-spew/spew/dump.go +++ b/vendor/github.com/davecgh/go-spew/spew/dump.go @@ -35,16 +35,16 @@ var ( // cCharRE is a regular expression that matches a cgo char. // It is used to detect character arrays to hexdump them. - cCharRE = regexp.MustCompile("^.*\\._Ctype_char$") + cCharRE = regexp.MustCompile(`^.*\._Ctype_char$`) // cUnsignedCharRE is a regular expression that matches a cgo unsigned // char. It is used to detect unsigned character arrays to hexdump // them. - cUnsignedCharRE = regexp.MustCompile("^.*\\._Ctype_unsignedchar$") + cUnsignedCharRE = regexp.MustCompile(`^.*\._Ctype_unsignedchar$`) // cUint8tCharRE is a regular expression that matches a cgo uint8_t. // It is used to detect uint8_t arrays to hexdump them. - cUint8tCharRE = regexp.MustCompile("^.*\\._Ctype_uint8_t$") + cUint8tCharRE = regexp.MustCompile(`^.*\._Ctype_uint8_t$`) ) // dumpState contains information about the state of a dump operation. @@ -143,10 +143,10 @@ func (d *dumpState) dumpPtr(v reflect.Value) { // Display dereferenced value. d.w.Write(openParenBytes) switch { - case nilFound == true: + case nilFound: d.w.Write(nilAngleBytes) - case cycleFound == true: + case cycleFound: d.w.Write(circularBytes) default: diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go index c49875ba..b04edb7d 100644 --- a/vendor/github.com/davecgh/go-spew/spew/format.go +++ b/vendor/github.com/davecgh/go-spew/spew/format.go @@ -182,10 +182,10 @@ func (f *formatState) formatPtr(v reflect.Value) { // Display dereferenced value. switch { - case nilFound == true: + case nilFound: f.fs.Write(nilAngleBytes) - case cycleFound == true: + case cycleFound: f.fs.Write(circularShortBytes) default: diff --git a/vendor/github.com/dlclark/regexp2/.gitignore b/vendor/github.com/dlclark/regexp2/.gitignore new file mode 100644 index 00000000..38a7addc --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.gitignore @@ -0,0 +1,25 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof +*.out \ No newline at end of file diff --git a/vendor/github.com/dlclark/regexp2/.travis.yml b/vendor/github.com/dlclark/regexp2/.travis.yml new file mode 100644 index 00000000..a24aeded --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/.travis.yml @@ -0,0 +1,5 @@ +language: go + +go: + - 1.5 + - tip \ No newline at end of file diff --git a/vendor/github.com/dlclark/regexp2/ATTRIB b/vendor/github.com/dlclark/regexp2/ATTRIB new file mode 100644 index 00000000..cdf4560b --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/ATTRIB @@ -0,0 +1,133 @@ +============ +These pieces of code were ported from dotnet/corefx: + +syntax/charclass.go (from RegexCharClass.cs): ported to use the built-in Go unicode classes. Canonicalize is + a direct port, but most of the other code required large changes because the C# implementation + used a string to represent the CharSet data structure and I cleaned that up in my implementation. + +syntax/code.go (from RegexCode.cs): ported literally with various cleanups and layout to make it more Go-ish. + +syntax/escape.go (from RegexParser.cs): ported Escape method and added some optimizations. Unescape is inspired by + the C# implementation but couldn't be directly ported because of the lack of do-while syntax in Go. + +syntax/parser.go (from RegexpParser.cs and RegexOptions.cs): ported parser struct and associated methods as + literally as possible. Several language differences required changes. E.g. lack pre/post-fix increments as + expressions, lack of do-while loops, lack of overloads, etc. + +syntax/prefix.go (from RegexFCD.cs and RegexBoyerMoore.cs): ported as literally as possible and added support + for unicode chars that are longer than the 16-bit char in C# for the 32-bit rune in Go. + +syntax/replacerdata.go (from RegexReplacement.cs): conceptually ported and re-organized to handle differences + in charclass implementation, and fix odd code layout between RegexParser.cs, Regex.cs, and RegexReplacement.cs. + +syntax/tree.go (from RegexTree.cs and RegexNode.cs): ported literally as possible. + +syntax/writer.go (from RegexWriter.cs): ported literally with minor changes to make it more Go-ish. + +match.go (from RegexMatch.cs): ported, simplified, and changed to handle Go's lack of inheritence. + +regexp.go (from Regex.cs and RegexOptions.cs): conceptually serves the same "starting point", but is simplified + and changed to handle differences in C# strings and Go strings/runes. + +replace.go (from RegexReplacement.cs): ported closely and then cleaned up to combine the MatchEvaluator and + simple string replace implementations. + +runner.go (from RegexRunner.cs): ported literally as possible. + +regexp_test.go (from CaptureTests.cs and GroupNamesAndNumbers.cs): conceptually ported, but the code was + manually structured like Go tests. + +replace_test.go (from RegexReplaceStringTest0.cs): conceptually ported + +rtl_test.go (from RightToLeft.cs): conceptually ported +--- +dotnet/corefx was released under this license: + +The MIT License (MIT) + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +============ +These pieces of code are copied from the Go framework: + +- The overall directory structure of regexp2 was inspired by the Go runtime regexp package. +- The optimization in the escape method of syntax/escape.go is from the Go runtime QuoteMeta() func in regexp/regexp.go +- The method signatures in regexp.go are designed to match the Go framework regexp methods closely +- func regexp2.MustCompile and func quote are almost identifical to the regexp package versions +- BenchmarkMatch* and TestProgramTooLong* funcs in regexp_performance_test.go were copied from the framework + regexp/exec_test.go +--- +The Go framework was released under this license: + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +============ +Some test data were gathered from the Mono project. + +regexp_mono_test.go: ported from https://github.com/mono/mono/blob/master/mcs/class/System/Test/System.Text.RegularExpressions/PerlTrials.cs +--- +Mono tests released under this license: + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/dlclark/regexp2/LICENSE b/vendor/github.com/dlclark/regexp2/LICENSE new file mode 100644 index 00000000..fe83dfdc --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Doug Clark + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/dlclark/regexp2/README.md b/vendor/github.com/dlclark/regexp2/README.md new file mode 100644 index 00000000..f4dc33da --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/README.md @@ -0,0 +1,67 @@ +# regexp2 - full featured regular expressions for Go +Regexp2 is a feature-rich RegExp engine for Go. It doesn't have constant time guarantees like the built-in `regexp` package, but it allows backtracking and is compatible with Perl5 and .NET. You'll likely be better off with the RE2 engine from the `regexp` package and should only use this if you need to write very complex patterns or require compatibility with .NET. + +## Basis of the engine +The engine is ported from the .NET framework's System.Text.RegularExpressions.Regex engine. That engine was open sourced in 2015 under the MIT license. There are some fundamental differences between .NET strings and Go strings that required a bit of borrowing from the Go framework regex engine as well. I cleaned up a couple of the dirtier bits during the port (regexcharclass.cs was terrible), but the parse tree, code emmitted, and therefore patterns matched should be identical. + +## Installing +This is a go-gettable library, so install is easy: + + go get github.com/dlclark/regexp2/... + +## Usage +Usage is similar to the Go `regexp` package. Just like in `regexp`, you start by converting a regex into a state machine via the `Compile` or `MustCompile` methods. They ultimately do the same thing, but `MustCompile` will panic if the regex is invalid. You can then use the provided `Regexp` struct to find matches repeatedly. A `Regexp` struct is safe to use across goroutines. + +```go +re := regexp2.MustCompile(`Your pattern`, 0) +if isMatch, _ := re.MatchString(`Something to match`); isMatch { + //do something +} +``` + +The only error that the `*Match*` methods *should* return is a Timeout if you set the `re.MatchTimeout` field. Any other error is a bug in the `regexp2` package. If you need more details about capture groups in a match then use the `FindStringMatch` method, like so: + +```go +if m, _ := re.FindStringMatch(`Something to match`); m != nil { + // the whole match is always group 0 + fmt.Printf("Group 0: %v\n", m.String()) + + // you can get all the groups too + gps := m.Groups() + + // a group can be captured multiple times, so each cap is separately addressable + fmt.Printf("Group 1, first capture", gps[1].Captures[0].String()) + fmt.Printf("Group 1, second capture", gps[1].Captures[1].String()) +} +``` + +Group 0 is embedded in the Match. Group 0 is an automatically-assigned group that encompasses the whole pattern. This means that `m.String()` is the same as `m.Group.String()` and `m.Groups()[0].String()` + +The __last__ capture is embedded in each group, so `g.String()` will return the same thing as `g.Capture.String()` and `g.Captures[len(g.Captures)-1].String()`. + +## Compare `regexp` and `regexp2` +| Category | regexp | regexp2 | +| --- | --- | --- | +| Catastrophic backtracking possible | no, constant execution time guarantees | yes, if your pattern is at risk you can use the `re.MatchTimeout` field | +| Python-style capture groups `(Pre)` | yes | no | +| .NET-style capture groups `(re)` or `('name're)` | no | yes | +| comments `(?#comment)` | no | yes | +| branch numbering reset `(?\|a\|b)` | no | no | +| possessive match `(?>re)` | no | yes | +| positive lookahead `(?=re)` | no | yes | +| negative lookahead `(?!re)` | no | yes | +| positive lookbehind `(?<=re)` | no | yes | +| negative lookbehind `(? 0 && m.matches[cap][m.matchcount[cap]*2-1] != (-3+1) +} + +// matchIndex returns the index of the last specified matched group by capnum +func (m *Match) matchIndex(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-2] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// matchLength returns the length of the last specified matched group by capnum +func (m *Match) matchLength(cap int) int { + i := m.matches[cap][m.matchcount[cap]*2-1] + if i >= 0 { + return i + } + + return m.matches[cap][-3-i] +} + +// Nonpublic builder: add a capture to the group specified by "c" +func (m *Match) addMatch(c, start, l int) { + + if m.matches[c] == nil { + m.matches[c] = make([]int, 2) + } + + capcount := m.matchcount[c] + + if capcount*2+2 > len(m.matches[c]) { + oldmatches := m.matches[c] + newmatches := make([]int, capcount*8) + copy(newmatches, oldmatches[:capcount*2]) + m.matches[c] = newmatches + } + + m.matches[c][capcount*2] = start + m.matches[c][capcount*2+1] = l + m.matchcount[c] = capcount + 1 + //log.Printf("addMatch: c=%v, i=%v, l=%v ... matches: %v", c, start, l, m.matches) +} + +// Nonpublic builder: Add a capture to balance the specified group. This is used by the +// balanced match construct. (?...) +// +// If there were no such thing as backtracking, this would be as simple as calling RemoveMatch(c). +// However, since we have backtracking, we need to keep track of everything. +func (m *Match) balanceMatch(c int) { + m.balancing = true + + // we'll look at the last capture first + capcount := m.matchcount[c] + target := capcount*2 - 2 + + // first see if it is negative, and therefore is a reference to the next available + // capture group for balancing. If it is, we'll reset target to point to that capture. + if m.matches[c][target] < 0 { + target = -3 - m.matches[c][target] + } + + // move back to the previous capture + target -= 2 + + // if the previous capture is a reference, just copy that reference to the end. Otherwise, point to it. + if target >= 0 && m.matches[c][target] < 0 { + m.addMatch(c, m.matches[c][target], m.matches[c][target+1]) + } else { + m.addMatch(c, -3-target, -4-target /* == -3 - (target + 1) */) + } +} + +// Nonpublic builder: removes a group match by capnum +func (m *Match) removeMatch(c int) { + m.matchcount[c]-- +} + +// GroupCount returns the number of groups this match has matched +func (m *Match) GroupCount() int { + return len(m.matchcount) +} + +// GroupByName returns a group based on the name of the group, or nil if the group name does not exist +func (m *Match) GroupByName(name string) *Group { + num := m.regex.GroupNumberFromName(name) + if num < 0 { + return nil + } + return m.GroupByNumber(num) +} + +// GroupByNumber returns a group based on the number of the group, or nil if the group number does not exist +func (m *Match) GroupByNumber(num int) *Group { + // check our sparse map + if m.sparseCaps != nil { + if newNum, ok := m.sparseCaps[num]; ok { + num = newNum + } + } + if num >= len(m.matchcount) || num < 0 { + return nil + } + + if num == 0 { + return &m.Group + } + + m.populateOtherGroups() + + return &m.otherGroups[num-1] +} + +// Groups returns all the capture groups, starting with group 0 (the full match) +func (m *Match) Groups() []Group { + m.populateOtherGroups() + g := make([]Group, len(m.otherGroups)+1) + g[0] = m.Group + copy(g[1:], m.otherGroups) + return g +} + +func (m *Match) populateOtherGroups() { + // Construct all the Group objects first time called + if m.otherGroups == nil { + m.otherGroups = make([]Group, len(m.matchcount)-1) + for i := 0; i < len(m.otherGroups); i++ { + m.otherGroups[i] = newGroup(m.regex.GroupNameFromNumber(i+1), m.text, m.matches[i+1], m.matchcount[i+1]) + } + } +} + +func (m *Match) groupValueAppendToBuf(groupnum int, buf *bytes.Buffer) { + c := m.matchcount[groupnum] + if c == 0 { + return + } + + matches := m.matches[groupnum] + + index := matches[(c-1)*2] + last := index + matches[(c*2)-1] + + for ; index < last; index++ { + buf.WriteRune(m.text[index]) + } +} + +func newGroup(name string, text []rune, caps []int, capcount int) Group { + g := Group{} + g.text = text + if capcount > 0 { + g.Index = caps[(capcount-1)*2] + g.Length = caps[(capcount*2)-1] + } + g.Name = name + g.Captures = make([]Capture, capcount) + for i := 0; i < capcount; i++ { + g.Captures[i] = Capture{ + text: text, + Index: caps[i*2], + Length: caps[i*2+1], + } + } + //log.Printf("newGroup! capcount %v, %+v", capcount, g) + + return g +} + +func (m *Match) dump() string { + buf := &bytes.Buffer{} + buf.WriteRune('\n') + if len(m.sparseCaps) > 0 { + for k, v := range m.sparseCaps { + fmt.Fprintf(buf, "Slot %v -> %v\n", k, v) + } + } + + for i, g := range m.Groups() { + fmt.Fprintf(buf, "Group %v (%v), %v caps:\n", i, g.Name, len(g.Captures)) + + for _, c := range g.Captures { + fmt.Fprintf(buf, " (%v, %v) %v\n", c.Index, c.Length, c.String()) + } + } + /* + for i := 0; i < len(m.matchcount); i++ { + fmt.Fprintf(buf, "\nGroup %v (%v):\n", i, m.regex.GroupNameFromNumber(i)) + + for j := 0; j < m.matchcount[i]; j++ { + text := "" + + if m.matches[i][j*2] >= 0 { + start := m.matches[i][j*2] + text = m.text[start : start+m.matches[i][j*2+1]] + } + + fmt.Fprintf(buf, " (%v, %v) %v\n", m.matches[i][j*2], m.matches[i][j*2+1], text) + } + } + */ + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/regexp.go b/vendor/github.com/dlclark/regexp2/regexp.go new file mode 100644 index 00000000..b25fe690 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/regexp.go @@ -0,0 +1,357 @@ +/* +Package regexp2 is a regexp package that has an interface similar to Go's framework regexp engine but uses a +more feature full regex engine behind the scenes. + +It doesn't have constant time guarantees, but it allows backtracking and is compatible with Perl5 and .NET. +You'll likely be better off with the RE2 engine from the regexp package and should only use this if you +need to write very complex patterns or require compatibility with .NET. +*/ +package regexp2 + +import ( + "errors" + "math" + "strconv" + "sync" + "time" + + "github.com/dlclark/regexp2/syntax" +) + +// Default timeout used when running regexp matches -- "forever" +var DefaultMatchTimeout = time.Duration(math.MaxInt64) + +// Regexp is the representation of a compiled regular expression. +// A Regexp is safe for concurrent use by multiple goroutines. +type Regexp struct { + //timeout when trying to find matches + MatchTimeout time.Duration + + // read-only after Compile + pattern string // as passed to Compile + options RegexOptions // options + + caps map[int]int // capnum->index + capnames map[string]int //capture group name -> index + capslist []string //sorted list of capture group names + capsize int // size of the capture array + + code *syntax.Code // compiled program + + // cache of machines for running regexp + muRun sync.Mutex + runner []*runner +} + +// Compile parses a regular expression and returns, if successful, +// a Regexp object that can be used to match against text. +func Compile(expr string, opt RegexOptions) (*Regexp, error) { + // parse it + tree, err := syntax.Parse(expr, syntax.RegexOptions(opt)) + if err != nil { + return nil, err + } + + // translate it to code + code, err := syntax.Write(tree) + if err != nil { + return nil, err + } + + // return it + return &Regexp{ + pattern: expr, + options: opt, + caps: code.Caps, + capnames: tree.Capnames, + capslist: tree.Caplist, + capsize: code.Capsize, + code: code, + MatchTimeout: DefaultMatchTimeout, + }, nil +} + +// MustCompile is like Compile but panics if the expression cannot be parsed. +// It simplifies safe initialization of global variables holding compiled regular +// expressions. +func MustCompile(str string, opt RegexOptions) *Regexp { + regexp, error := Compile(str, opt) + if error != nil { + panic(`regexp2: Compile(` + quote(str) + `): ` + error.Error()) + } + return regexp +} + +// Escape adds backslashes to any special characters in the input string +func Escape(input string) string { + return syntax.Escape(input) +} + +// Unescape removes any backslashes from previously-escaped special characters in the input string +func Unescape(input string) (string, error) { + return syntax.Unescape(input) +} + +// String returns the source text used to compile the regular expression. +func (re *Regexp) String() string { + return re.pattern +} + +func quote(s string) string { + if strconv.CanBackquote(s) { + return "`" + s + "`" + } + return strconv.Quote(s) +} + +// RegexOptions impact the runtime and parsing behavior +// for each specific regex. They are setable in code as well +// as in the regex pattern itself. +type RegexOptions int32 + +const ( + None RegexOptions = 0x0 + IgnoreCase = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" +) + +func (re *Regexp) RightToLeft() bool { + return re.options&RightToLeft != 0 +} + +func (re *Regexp) Debug() bool { + return re.options&Debug != 0 +} + +// Replace searches the input string and replaces each match found with the replacement text. +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string +func (re *Regexp) Replace(input, replacement string, startAt, count int) (string, error) { + data, err := syntax.NewReplacerData(replacement, re.caps, re.capsize, re.capnames, syntax.RegexOptions(re.options)) + if err != nil { + return "", err + } + //TODO: cache ReplacerData + + return replace(re, data, nil, input, startAt, count) +} + +// ReplaceFunc searches the input string and replaces each match found using the string from the evaluator +// Count will limit the number of matches attempted and startAt will allow +// us to skip past possible matches at the start of the input (left or right depending on RightToLeft option). +// Set startAt and count to -1 to go through the whole string. +func (re *Regexp) ReplaceFunc(input string, evaluator MatchEvaluator, startAt, count int) (string, error) { + return replace(re, nil, evaluator, input, startAt, count) +} + +// FindStringMatch searches the input string for a Regexp match +func (re *Regexp) FindStringMatch(s string) (*Match, error) { + // convert string to runes + return re.run(false, -1, getRunes(s)) +} + +// FindRunesMatch searches the input rune slice for a Regexp match +func (re *Regexp) FindRunesMatch(r []rune) (*Match, error) { + return re.run(false, -1, r) +} + +// FindStringMatchStartingAt searches the input string for a Regexp match starting at the startAt index +func (re *Regexp) FindStringMatchStartingAt(s string, startAt int) (*Match, error) { + if startAt > len(s) { + return nil, errors.New("startAt must be less than the length of the input string") + } + r, startAt := re.getRunesAndStart(s, startAt) + if startAt == -1 { + // we didn't find our start index in the string -- that's a problem + return nil, errors.New("startAt must align to the start of a valid rune in the input string") + } + + return re.run(false, startAt, r) +} + +// FindRunesMatchStartingAt searches the input rune slice for a Regexp match starting at the startAt index +func (re *Regexp) FindRunesMatchStartingAt(r []rune, startAt int) (*Match, error) { + return re.run(false, startAt, r) +} + +// FindNextMatch returns the next match in the same input string as the match parameter. +// Will return nil if there is no next match or if given a nil match. +func (re *Regexp) FindNextMatch(m *Match) (*Match, error) { + if m == nil { + return nil, nil + } + + // If previous match was empty, advance by one before matching to prevent + // infinite loop + startAt := m.textpos + if m.Length == 0 { + if m.textpos == len(m.text) { + return nil, nil + } + + if re.RightToLeft() { + startAt-- + } else { + startAt++ + } + } + return re.run(false, startAt, m.text) +} + +// MatchString return true if the string matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchString(s string) (bool, error) { + m, err := re.run(true, -1, getRunes(s)) + if err != nil { + return false, err + } + return m != nil, nil +} + +func (re *Regexp) getRunesAndStart(s string, startAt int) ([]rune, int) { + if startAt < 0 { + if re.RightToLeft() { + r := getRunes(s) + return r, len(r) + } + return getRunes(s), 0 + } + ret := make([]rune, len(s)) + i := 0 + runeIdx := -1 + for strIdx, r := range s { + if strIdx == startAt { + runeIdx = i + } + ret[i] = r + i++ + } + return ret[:i], runeIdx +} + +func getRunes(s string) []rune { + ret := make([]rune, len(s)) + i := 0 + for _, r := range s { + ret[i] = r + i++ + } + return ret[:i] +} + +// MatchRunes return true if the runes matches the regex +// error will be set if a timeout occurs +func (re *Regexp) MatchRunes(r []rune) (bool, error) { + m, err := re.run(true, -1, r) + if err != nil { + return false, err + } + return m != nil, nil +} + +// GetGroupNames Returns the set of strings used to name capturing groups in the expression. +func (re *Regexp) GetGroupNames() []string { + var result []string + + if re.capslist == nil { + result = make([]string, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = strconv.Itoa(i) + } + } else { + result = make([]string, len(re.capslist)) + copy(result, re.capslist) + } + + return result +} + +// GetGroupNumbers returns the integer group numbers corresponding to a group name. +func (re *Regexp) GetGroupNumbers() []int { + var result []int + + if re.caps == nil { + result = make([]int, re.capsize) + + for i := 0; i < len(result); i++ { + result[i] = i + } + } else { + result = make([]int, len(re.caps)) + + for k, v := range re.caps { + result[v] = k + } + } + + return result +} + +// GroupNameFromNumber retrieves a group name that corresponds to a group number. +// It will return "" for and unknown group number. Unnamed groups automatically +// receive a name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNameFromNumber(i int) string { + if re.capslist == nil { + if i >= 0 && i < re.capsize { + return strconv.Itoa(i) + } + + return "" + } + + if re.caps != nil { + var ok bool + if i, ok = re.caps[i]; !ok { + return "" + } + } + + if i >= 0 && i < len(re.capslist) { + return re.capslist[i] + } + + return "" +} + +// GroupNumberFromName returns a group number that corresponds to a group name. +// Returns -1 if the name is not a recognized group name. Numbered groups +// automatically get a group name that is the decimal string equivalent of its number. +func (re *Regexp) GroupNumberFromName(name string) int { + // look up name if we have a hashtable of names + if re.capnames != nil { + if k, ok := re.capnames[name]; ok { + return k + } + + return -1 + } + + // convert to an int if it looks like a number + result := 0 + for i := 0; i < len(name); i++ { + ch := name[i] + + if ch > '9' || ch < '0' { + return -1 + } + + result *= 10 + result += int(ch - '0') + } + + // return int if it's in range + if result >= 0 && result < re.capsize { + return result + } + + return -1 +} diff --git a/vendor/github.com/dlclark/regexp2/replace.go b/vendor/github.com/dlclark/regexp2/replace.go new file mode 100644 index 00000000..0376bd9d --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/replace.go @@ -0,0 +1,177 @@ +package regexp2 + +import ( + "bytes" + "errors" + + "github.com/dlclark/regexp2/syntax" +) + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +// MatchEvaluator is a function that takes a match and returns a replacement string to be used +type MatchEvaluator func(Match) string + +// Three very similar algorithms appear below: replace (pattern), +// replace (evaluator), and split. + +// Replace Replaces all occurrences of the regex in the string with the +// replacement pattern. +// +// Note that the special case of no matches is handled on its own: +// with no matches, the input string is returned unchanged. +// The right-to-left case is split out because StringBuilder +// doesn't handle right-to-left string building directly very well. +func replace(regex *Regexp, data *syntax.ReplacerData, evaluator MatchEvaluator, input string, startAt, count int) (string, error) { + if count < -1 { + return "", errors.New("Count too small") + } + if count == 0 { + return "", nil + } + + m, err := regex.FindStringMatchStartingAt(input, startAt) + + if err != nil { + return "", err + } + if m == nil { + return input, nil + } + + buf := &bytes.Buffer{} + text := m.text + + if !regex.RightToLeft() { + prevat := 0 + for m != nil { + if m.Index != prevat { + buf.WriteString(string(text[prevat:m.Index])) + } + prevat = m.Index + m.Length + if evaluator == nil { + replacementImpl(data, buf, m) + } else { + buf.WriteString(evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat < len(text) { + buf.WriteString(string(text[prevat:])) + } + } else { + prevat := len(text) + var al []string + + for m != nil { + if m.Index+m.Length != prevat { + al = append(al, string(text[m.Index+m.Length:prevat])) + } + prevat = m.Index + if evaluator == nil { + replacementImplRTL(data, &al, m) + } else { + al = append(al, evaluator(*m)) + } + + count-- + if count == 0 { + break + } + m, err = regex.FindNextMatch(m) + if err != nil { + return "", nil + } + } + + if prevat > 0 { + buf.WriteString(string(text[:prevat])) + } + + for i := len(al) - 1; i >= 0; i-- { + buf.WriteString(al[i]) + } + } + + return buf.String(), nil +} + +// Given a Match, emits into the StringBuilder the evaluated +// substitution pattern. +func replacementImpl(data *syntax.ReplacerData, buf *bytes.Buffer, m *Match) { + for _, r := range data.Rules { + + if r >= 0 { // string lookup + buf.WriteString(data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + } + } +} + +func replacementImplRTL(data *syntax.ReplacerData, al *[]string, m *Match) { + l := *al + buf := &bytes.Buffer{} + + for _, r := range data.Rules { + buf.Reset() + if r >= 0 { // string lookup + l = append(l, data.Strings[r]) + } else if r < -replaceSpecials { // group lookup + m.groupValueAppendToBuf(-replaceSpecials-1-r, buf) + l = append(l, buf.String()) + } else { + switch -replaceSpecials - 1 - r { // special insertion patterns + case replaceLeftPortion: + for i := 0; i < m.Index; i++ { + buf.WriteRune(m.text[i]) + } + case replaceRightPortion: + for i := m.Index + m.Length; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + case replaceLastGroup: + m.groupValueAppendToBuf(m.GroupCount()-1, buf) + case replaceWholeString: + for i := 0; i < len(m.text); i++ { + buf.WriteRune(m.text[i]) + } + } + l = append(l, buf.String()) + } + } + + *al = l +} diff --git a/vendor/github.com/dlclark/regexp2/runner.go b/vendor/github.com/dlclark/regexp2/runner.go new file mode 100644 index 00000000..2d84a934 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/runner.go @@ -0,0 +1,1621 @@ +package regexp2 + +import ( + "bytes" + "errors" + "fmt" + "math" + "strconv" + "strings" + "time" + "unicode" + + "github.com/dlclark/regexp2/syntax" +) + +type runner struct { + re *Regexp + code *syntax.Code + + runtextstart int // starting point for search + + runtext []rune // text to search + runtextpos int // current position in text + runtextend int + + // The backtracking stack. Opcodes use this to store data regarding + // what they have matched and where to backtrack to. Each "frame" on + // the stack takes the form of [CodePosition Data1 Data2...], where + // CodePosition is the position of the current opcode and + // the data values are all optional. The CodePosition can be negative, and + // these values (also called "back2") are used by the BranchMark family of opcodes + // to indicate whether they are backtracking after a successful or failed + // match. + // When we backtrack, we pop the CodePosition off the stack, set the current + // instruction pointer to that code position, and mark the opcode + // with a backtracking flag ("Back"). Each opcode then knows how to + // handle its own data. + runtrack []int + runtrackpos int + + // This stack is used to track text positions across different opcodes. + // For example, in /(a*b)+/, the parentheses result in a SetMark/CaptureMark + // pair. SetMark records the text position before we match a*b. Then + // CaptureMark uses that position to figure out where the capture starts. + // Opcodes which push onto this stack are always paired with other opcodes + // which will pop the value from it later. A successful match should mean + // that this stack is empty. + runstack []int + runstackpos int + + // The crawl stack is used to keep track of captures. Every time a group + // has a capture, we push its group number onto the runcrawl stack. In + // the case of a balanced match, we push BOTH groups onto the stack. + runcrawl []int + runcrawlpos int + + runtrackcount int // count of states that may do backtracking + + runmatch *Match // result object + + ignoreTimeout bool + timeout time.Duration // timeout in milliseconds (needed for actual) + timeoutChecksToSkip int + timeoutAt time.Time + + operator syntax.InstOp + codepos int + rightToLeft bool + caseInsensitive bool +} + +// run searches for matches and can continue from the previous match +// +// quick is usually false, but can be true to not return matches, just put it in caches +// textstart is -1 to start at the "beginning" (depending on Right-To-Left), otherwise an index in input +// input is the string to search for our regex pattern +func (re *Regexp) run(quick bool, textstart int, input []rune) (*Match, error) { + + // get a cached runner + runner := re.getRunner() + defer re.putRunner(runner) + + if textstart < 0 { + if re.RightToLeft() { + textstart = len(input) + } else { + textstart = 0 + } + } + + return runner.scan(input, textstart, quick, re.MatchTimeout) +} + +// Scans the string to find the first match. Uses the Match object +// both to feed text in and as a place to store matches that come out. +// +// All the action is in the Go() method. Our +// responsibility is to load up the class members before +// calling Go. +// +// The optimizer can compute a set of candidate starting characters, +// and we could use a separate method Skip() that will quickly scan past +// any characters that we know can't match. +func (r *runner) scan(rt []rune, textstart int, quick bool, timeout time.Duration) (*Match, error) { + r.timeout = timeout + r.ignoreTimeout = (time.Duration(math.MaxInt64) == timeout) + r.runtextstart = textstart + r.runtext = rt + r.runtextend = len(rt) + + stoppos := r.runtextend + bump := 1 + + if r.re.RightToLeft() { + bump = -1 + stoppos = 0 + } + + r.runtextpos = textstart + initted := false + + r.startTimeoutWatch() + for { + if r.re.Debug() { + //fmt.Printf("\nSearch content: %v\n", string(r.runtext)) + fmt.Printf("\nSearch range: from 0 to %v\n", r.runtextend) + fmt.Printf("Firstchar search starting at %v stopping at %v\n", r.runtextpos, stoppos) + } + + if r.findFirstChar() { + if err := r.checkTimeout(); err != nil { + return nil, err + } + + if !initted { + r.initMatch() + initted = true + } + + if r.re.Debug() { + fmt.Printf("Executing engine starting at %v\n\n", r.runtextpos) + } + + if err := r.execute(); err != nil { + return nil, err + } + + if r.runmatch.matchcount[0] > 0 { + // We'll return a match even if it touches a previous empty match + return r.tidyMatch(quick), nil + } + + // reset state for another go + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + } + + // failure! + + if r.runtextpos == stoppos { + r.tidyMatch(true) + return nil, nil + } + + // Recognize leading []* and various anchors, and bump on failure accordingly + + // r.bump by one and start again + + r.runtextpos += bump + } + // We never get here +} + +func (r *runner) execute() error { + + r.goTo(0) + + for { + + if r.re.Debug() { + r.dumpState() + } + + if err := r.checkTimeout(); err != nil { + return err + } + + switch r.operator { + case syntax.Stop: + return nil + + case syntax.Nothing: + break + + case syntax.Goto: + r.goTo(r.operand(0)) + continue + + case syntax.Testref: + if !r.runmatch.isMatched(r.operand(0)) { + break + } + r.advance(1) + continue + + case syntax.Lazybranch: + r.trackPush1(r.textPos()) + r.advance(1) + continue + + case syntax.Lazybranch | syntax.Back: + r.trackPop() + r.textto(r.trackPeek()) + r.goTo(r.operand(0)) + continue + + case syntax.Setmark: + r.stackPush(r.textPos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Nullmark: + r.stackPush(-1) + r.trackPush() + r.advance(0) + continue + + case syntax.Setmark | syntax.Back, syntax.Nullmark | syntax.Back: + r.stackPop() + break + + case syntax.Getmark: + r.stackPop() + r.trackPush1(r.stackPeek()) + r.textto(r.stackPeek()) + r.advance(0) + continue + + case syntax.Getmark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + break + + case syntax.Capturemark: + if r.operand(1) != -1 && !r.runmatch.isMatched(r.operand(1)) { + break + } + r.stackPop() + if r.operand(1) != -1 { + r.transferCapture(r.operand(0), r.operand(1), r.stackPeek(), r.textPos()) + } else { + r.capture(r.operand(0), r.stackPeek(), r.textPos()) + } + r.trackPush1(r.stackPeek()) + + r.advance(2) + + continue + + case syntax.Capturemark | syntax.Back: + r.trackPop() + r.stackPush(r.trackPeek()) + r.uncapture() + if r.operand(0) != -1 && r.operand(1) != -1 { + r.uncapture() + } + + break + + case syntax.Branchmark: + r.stackPop() + + matched := r.textPos() - r.stackPeek() + + if matched != 0 { // Nonempty match -> loop now + r.trackPush2(r.stackPeek(), r.textPos()) // Save old mark, textpos + r.stackPush(r.textPos()) // Make new mark + r.goTo(r.operand(0)) // Loop + } else { // Empty match -> straight now + r.trackPushNeg1(r.stackPeek()) // Save old mark + r.advance(1) // Straight + } + continue + + case syntax.Branchmark | syntax.Back: + r.trackPopN(2) + r.stackPop() + r.textto(r.trackPeekN(1)) // Recall position + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.advance(1) // Straight + continue + + case syntax.Branchmark | syntax.Back2: + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break // Backtrack + + case syntax.Lazybranchmark: + { + // We hit this the first time through a lazy loop and after each + // successful match of the inner expression. It simply continues + // on and doesn't loop. + r.stackPop() + + oldMarkPos := r.stackPeek() + + if r.textPos() != oldMarkPos { // Nonempty match -> try to loop again by going to 'back' state + if oldMarkPos != -1 { + r.trackPush2(oldMarkPos, r.textPos()) // Save old mark, textpos + } else { + r.trackPush2(r.textPos(), r.textPos()) + } + } else { + // The inner expression found an empty match, so we'll go directly to 'back2' if we + // backtrack. In this case, we need to push something on the stack, since back2 pops. + // However, in the case of ()+? or similar, this empty match may be legitimate, so push the text + // position associated with that empty match. + r.stackPush(oldMarkPos) + + r.trackPushNeg1(r.stackPeek()) // Save old mark + } + r.advance(1) + continue + } + + case syntax.Lazybranchmark | syntax.Back: + + // After the first time, Lazybranchmark | syntax.Back occurs + // with each iteration of the loop, and therefore with every attempted + // match of the inner expression. We'll try to match the inner expression, + // then go back to Lazybranchmark if successful. If the inner expression + // fails, we go to Lazybranchmark | syntax.Back2 + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.trackPushNeg1(r.trackPeek()) // Save old mark + r.stackPush(pos) // Make new mark + r.textto(pos) // Recall position + r.goTo(r.operand(0)) // Loop + continue + + case syntax.Lazybranchmark | syntax.Back2: + // The lazy loop has failed. We'll do a true backtrack and + // start over before the lazy loop. + r.stackPop() + r.trackPop() + r.stackPush(r.trackPeek()) // Recall old mark + break + + case syntax.Setcount: + r.stackPush2(r.textPos(), r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Nullcount: + r.stackPush2(-1, r.operand(0)) + r.trackPush() + r.advance(1) + continue + + case syntax.Setcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Nullcount | syntax.Back: + r.stackPopN(2) + break + + case syntax.Branchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + matched := r.textPos() - mark + + if count >= r.operand(1) || (matched == 0 && count >= 0) { // Max loops or empty match -> straight now + r.trackPushNeg2(mark, count) // Save old mark, count + r.advance(2) // Straight + } else { // Nonempty match -> count+loop now + r.trackPush1(mark) // remember mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } + continue + + case syntax.Branchcount | syntax.Back: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (= current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + if r.stackPeekN(1) > 0 { // Positive -> can go straight + r.textto(r.stackPeek()) // Zap to mark + r.trackPushNeg2(r.trackPeek(), r.stackPeekN(1)-1) // Save old mark, old count + r.advance(2) // Straight + continue + } + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // recall old mark, old count + break + + case syntax.Branchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // 1: Previous count + r.trackPopN(2) + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, old count + break // Backtrack + + case syntax.Lazybranchcount: + // r.stackPush: + // 0: Mark + // 1: Count + + r.stackPopN(2) + mark := r.stackPeek() + count := r.stackPeekN(1) + + if count < 0 { // Negative count -> loop now + r.trackPushNeg1(mark) // Save old mark + r.stackPush2(r.textPos(), count+1) // Make new mark, incr count + r.goTo(r.operand(0)) // Loop + } else { // Nonneg count -> straight now + r.trackPush3(mark, count, r.textPos()) // Save mark, count, position + r.advance(2) // Straight + } + continue + + case syntax.Lazybranchcount | syntax.Back: + // r.trackPush: + // 0: Mark + // 1: Count + // 2: r.textPos + + r.trackPopN(3) + mark := r.trackPeek() + textpos := r.trackPeekN(2) + + if r.trackPeekN(1) < r.operand(1) && textpos != mark { // Under limit and not empty match -> loop + r.textto(textpos) // Recall position + r.stackPush2(textpos, r.trackPeekN(1)+1) // Make new mark, incr count + r.trackPushNeg1(mark) // Save old mark + r.goTo(r.operand(0)) // Loop + continue + } else { // Max loops or empty match -> backtrack + r.stackPush2(r.trackPeek(), r.trackPeekN(1)) // Recall old mark, count + break // backtrack + } + + case syntax.Lazybranchcount | syntax.Back2: + // r.trackPush: + // 0: Previous mark + // r.stackPush: + // 0: Mark (== current pos, discarded) + // 1: Count + r.trackPop() + r.stackPopN(2) + r.stackPush2(r.trackPeek(), r.stackPeekN(1)-1) // Recall old mark, count + break // Backtrack + + case syntax.Setjump: + r.stackPush2(r.trackpos(), r.crawlpos()) + r.trackPush() + r.advance(0) + continue + + case syntax.Setjump | syntax.Back: + r.stackPopN(2) + break + + case syntax.Backjump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + + for r.crawlpos() != r.stackPeekN(1) { + r.uncapture() + } + + break + + case syntax.Forejump: + // r.stackPush: + // 0: Saved trackpos + // 1: r.crawlpos + r.stackPopN(2) + r.trackto(r.stackPeek()) + r.trackPush1(r.stackPeekN(1)) + r.advance(0) + continue + + case syntax.Forejump | syntax.Back: + // r.trackPush: + // 0: r.crawlpos + r.trackPop() + + for r.crawlpos() != r.trackPeek() { + r.uncapture() + } + + break + + case syntax.Bol: + if r.leftchars() > 0 && r.charAt(r.textPos()-1) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Eol: + if r.rightchars() > 0 && r.charAt(r.textPos()) != '\n' { + break + } + r.advance(0) + continue + + case syntax.Boundary: + if !r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Nonboundary: + if r.isBoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.ECMABoundary: + if !r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.NonECMABoundary: + if r.isECMABoundary(r.textPos(), 0, r.runtextend) { + break + } + r.advance(0) + continue + + case syntax.Beginning: + if r.leftchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.Start: + if r.textPos() != r.textstart() { + break + } + r.advance(0) + continue + + case syntax.EndZ: + if r.rightchars() > 1 || r.rightchars() == 1 && r.charAt(r.textPos()) != '\n' { + break + } + r.advance(0) + continue + + case syntax.End: + if r.rightchars() > 0 { + break + } + r.advance(0) + continue + + case syntax.One: + if r.forwardchars() < 1 || r.forwardcharnext() != rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Notone: + if r.forwardchars() < 1 || r.forwardcharnext() == rune(r.operand(0)) { + break + } + + r.advance(1) + continue + + case syntax.Set: + + if r.forwardchars() < 1 || !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + r.advance(1) + continue + + case syntax.Multi: + if !r.runematch(r.code.Strings[r.operand(0)]) { + break + } + + r.advance(1) + continue + + case syntax.Ref: + + capnum := r.operand(0) + + if r.runmatch.isMatched(capnum) { + if !r.refmatch(r.runmatch.matchIndex(capnum), r.runmatch.matchLength(capnum)) { + break + } + } else { + if (r.re.options & ECMAScript) == 0 { + break + } + } + + r.advance(1) + continue + + case syntax.Onerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() != ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Notonerep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + ch := rune(r.operand(0)) + + for c > 0 { + if r.forwardcharnext() == ch { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Setrep: + + c := r.operand(1) + + if r.forwardchars() < c { + break + } + + set := r.code.Sets[r.operand(0)] + + for c > 0 { + if !set.CharIn(r.forwardcharnext()) { + goto BreakBackward + } + c-- + } + + r.advance(2) + continue + + case syntax.Oneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() != ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Notoneloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + ch := rune(r.operand(0)) + i := c + + for ; i > 0; i-- { + if r.forwardcharnext() == ch { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + set := r.code.Sets[r.operand(0)] + i := c + + for ; i > 0; i-- { + if !set.CharIn(r.forwardcharnext()) { + r.backwardnext() + break + } + } + + if c > i { + r.trackPush2(c-i-1, r.textPos()-r.bump()) + } + + r.advance(2) + continue + + case syntax.Oneloop | syntax.Back, syntax.Notoneloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Setloop | syntax.Back: + + r.trackPopN(2) + i := r.trackPeek() + pos := r.trackPeekN(1) + + r.textto(pos) + + if i > 0 { + r.trackPush2(i-1, pos-r.bump()) + } + + r.advance(2) + continue + + case syntax.Onelazy, syntax.Notonelazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Setlazy: + + c := r.operand(1) + + if c > r.forwardchars() { + c = r.forwardchars() + } + + if c > 0 { + r.trackPush2(c-1, r.textPos()) + } + + r.advance(2) + continue + + case syntax.Onelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() != rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Notonelazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if r.forwardcharnext() == rune(r.operand(0)) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + case syntax.Setlazy | syntax.Back: + + r.trackPopN(2) + pos := r.trackPeekN(1) + r.textto(pos) + + if !r.code.Sets[r.operand(0)].CharIn(r.forwardcharnext()) { + break + } + + i := r.trackPeek() + + if i > 0 { + r.trackPush2(i-1, pos+r.bump()) + } + + r.advance(2) + continue + + default: + return errors.New("unknown state in regex runner") + } + + BreakBackward: + ; + + // "break Backward" comes here: + r.backtrack() + } +} + +// increase the size of stack and track storage +func (r *runner) ensureStorage() { + if r.runstackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runstack, &r.runstackpos) + } + if r.runtrackpos < r.runtrackcount*4 { + doubleIntSlice(&r.runtrack, &r.runtrackpos) + } +} + +func doubleIntSlice(s *[]int, pos *int) { + oldLen := len(*s) + newS := make([]int, oldLen*2) + + copy(newS[oldLen:], *s) + *pos += oldLen + *s = newS +} + +// Save a number on the longjump unrolling stack +func (r *runner) crawl(i int) { + if r.runcrawlpos == 0 { + doubleIntSlice(&r.runcrawl, &r.runcrawlpos) + } + r.runcrawlpos-- + r.runcrawl[r.runcrawlpos] = i +} + +// Remove a number from the longjump unrolling stack +func (r *runner) popcrawl() int { + val := r.runcrawl[r.runcrawlpos] + r.runcrawlpos++ + return val +} + +// Get the height of the stack +func (r *runner) crawlpos() int { + return len(r.runcrawl) - r.runcrawlpos +} + +func (r *runner) advance(i int) { + r.codepos += (i + 1) + r.setOperator(r.code.Codes[r.codepos]) +} + +func (r *runner) goTo(newpos int) { + // when branching backward, ensure storage + if newpos < r.codepos { + r.ensureStorage() + } + + r.setOperator(r.code.Codes[newpos]) + r.codepos = newpos +} + +func (r *runner) textto(newpos int) { + r.runtextpos = newpos +} + +func (r *runner) trackto(newpos int) { + r.runtrackpos = len(r.runtrack) - newpos +} + +func (r *runner) textstart() int { + return r.runtextstart +} + +func (r *runner) textPos() int { + return r.runtextpos +} + +// push onto the backtracking stack +func (r *runner) trackpos() int { + return len(r.runtrack) - r.runtrackpos +} + +func (r *runner) trackPush() { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPush3(I1, I2, I3 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I3 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = r.codepos +} + +func (r *runner) trackPushNeg1(I1 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) trackPushNeg2(I1, I2 int) { + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I1 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = I2 + r.runtrackpos-- + r.runtrack[r.runtrackpos] = -r.codepos +} + +func (r *runner) backtrack() { + newpos := r.runtrack[r.runtrackpos] + r.runtrackpos++ + + if r.re.Debug() { + if newpos < 0 { + fmt.Printf(" Backtracking (back2) to code position %v\n", -newpos) + } else { + fmt.Printf(" Backtracking to code position %v\n", newpos) + } + } + + if newpos < 0 { + newpos = -newpos + r.setOperator(r.code.Codes[newpos] | syntax.Back2) + } else { + r.setOperator(r.code.Codes[newpos] | syntax.Back) + } + + // When branching backward, ensure storage + if newpos < r.codepos { + r.ensureStorage() + } + + r.codepos = newpos +} + +func (r *runner) setOperator(op int) { + r.caseInsensitive = (0 != (op & syntax.Ci)) + r.rightToLeft = (0 != (op & syntax.Rtl)) + r.operator = syntax.InstOp(op & ^(syntax.Rtl | syntax.Ci)) +} + +func (r *runner) trackPop() { + r.runtrackpos++ +} + +// pop framesize items from the backtracking stack +func (r *runner) trackPopN(framesize int) { + r.runtrackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.trackPop(); +// r.trackPeek(); +func (r *runner) trackPeek() int { + return r.runtrack[r.runtrackpos-1] +} + +// get the ith element down on the backtracking stack +func (r *runner) trackPeekN(i int) int { + return r.runtrack[r.runtrackpos-i-1] +} + +// Push onto the grouping stack +func (r *runner) stackPush(I1 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 +} + +func (r *runner) stackPush2(I1, I2 int) { + r.runstackpos-- + r.runstack[r.runstackpos] = I1 + r.runstackpos-- + r.runstack[r.runstackpos] = I2 +} + +func (r *runner) stackPop() { + r.runstackpos++ +} + +// pop framesize items from the grouping stack +func (r *runner) stackPopN(framesize int) { + r.runstackpos += framesize +} + +// Technically we are actually peeking at items already popped. So if you want to +// get and pop the top item from the stack, you do +// r.stackPop(); +// r.stackPeek(); +func (r *runner) stackPeek() int { + return r.runstack[r.runstackpos-1] +} + +// get the ith element down on the grouping stack +func (r *runner) stackPeekN(i int) int { + return r.runstack[r.runstackpos-i-1] +} + +func (r *runner) operand(i int) int { + return r.code.Codes[r.codepos+i+1] +} + +func (r *runner) leftchars() int { + return r.runtextpos +} + +func (r *runner) rightchars() int { + return r.runtextend - r.runtextpos +} + +func (r *runner) bump() int { + if r.rightToLeft { + return -1 + } + return 1 +} + +func (r *runner) forwardchars() int { + if r.rightToLeft { + return r.runtextpos + } + return r.runtextend - r.runtextpos +} + +func (r *runner) forwardcharnext() rune { + var ch rune + if r.rightToLeft { + r.runtextpos-- + ch = r.runtext[r.runtextpos] + } else { + ch = r.runtext[r.runtextpos] + r.runtextpos++ + } + + if r.caseInsensitive { + return unicode.ToLower(ch) + } + return ch +} + +func (r *runner) runematch(str []rune) bool { + var pos int + + c := len(str) + if !r.rightToLeft { + if r.runtextend-r.runtextpos < c { + return false + } + + pos = r.runtextpos + c + } else { + if r.runtextpos-0 < c { + return false + } + + pos = r.runtextpos + } + + if !r.caseInsensitive { + for c != 0 { + c-- + pos-- + if str[c] != r.runtext[pos] { + return false + } + } + } else { + for c != 0 { + c-- + pos-- + if str[c] != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len(str) + } + + r.runtextpos = pos + + return true +} + +func (r *runner) refmatch(index, len int) bool { + var c, pos, cmpos int + + if !r.rightToLeft { + if r.runtextend-r.runtextpos < len { + return false + } + + pos = r.runtextpos + len + } else { + if r.runtextpos-0 < len { + return false + } + + pos = r.runtextpos + } + cmpos = index + len + + c = len + + if !r.caseInsensitive { + for c != 0 { + c-- + cmpos-- + pos-- + if r.runtext[cmpos] != r.runtext[pos] { + return false + } + + } + } else { + for c != 0 { + c-- + cmpos-- + pos-- + + if unicode.ToLower(r.runtext[cmpos]) != unicode.ToLower(r.runtext[pos]) { + return false + } + } + } + + if !r.rightToLeft { + pos += len + } + + r.runtextpos = pos + + return true +} + +func (r *runner) backwardnext() { + if r.rightToLeft { + r.runtextpos++ + } else { + r.runtextpos-- + } +} + +func (r *runner) charAt(j int) rune { + return r.runtext[j] +} + +func (r *runner) findFirstChar() bool { + + if 0 != (r.code.Anchors & (syntax.AnchorBeginning | syntax.AnchorStart | syntax.AnchorEndZ | syntax.AnchorEnd)) { + if !r.code.RightToLeft { + if (0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos > r.runtextstart) { + r.runtextpos = r.runtextend + return false + } + if 0 != (r.code.Anchors&syntax.AnchorEndZ) && r.runtextpos < r.runtextend-1 { + r.runtextpos = r.runtextend - 1 + } else if 0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend { + r.runtextpos = r.runtextend + } + } else { + if (0 != (r.code.Anchors&syntax.AnchorEnd) && r.runtextpos < r.runtextend) || + (0 != (r.code.Anchors&syntax.AnchorEndZ) && (r.runtextpos < r.runtextend-1 || + (r.runtextpos == r.runtextend-1 && r.charAt(r.runtextpos) != '\n'))) || + (0 != (r.code.Anchors&syntax.AnchorStart) && r.runtextpos < r.runtextstart) { + r.runtextpos = 0 + return false + } + if 0 != (r.code.Anchors&syntax.AnchorBeginning) && r.runtextpos > 0 { + r.runtextpos = 0 + } + } + + if r.code.BmPrefix != nil { + return r.code.BmPrefix.IsMatch(r.runtext, r.runtextpos, 0, r.runtextend) + } + + return true // found a valid start or end anchor + } else if r.code.BmPrefix != nil { + r.runtextpos = r.code.BmPrefix.Scan(r.runtext, r.runtextpos, 0, r.runtextend) + + if r.runtextpos == -1 { + if r.code.RightToLeft { + r.runtextpos = 0 + } else { + r.runtextpos = r.runtextend + } + return false + } + + return true + } else if r.code.FcPrefix == nil { + return true + } + + r.rightToLeft = r.code.RightToLeft + r.caseInsensitive = r.code.FcPrefix.CaseInsensitive + + set := r.code.FcPrefix.PrefixSet + if set.IsSingleton() { + ch := set.SingletonChar() + for i := r.forwardchars(); i > 0; i-- { + if ch == r.forwardcharnext() { + r.backwardnext() + return true + } + } + } else { + for i := r.forwardchars(); i > 0; i-- { + n := r.forwardcharnext() + //fmt.Printf("%v in %v: %v\n", string(n), set.String(), set.CharIn(n)) + if set.CharIn(n) { + r.backwardnext() + return true + } + } + } + + return false +} + +func (r *runner) initMatch() { + // Use a hashtable'ed Match object if the capture numbers are sparse + + if r.runmatch == nil { + if r.re.caps != nil { + r.runmatch = newMatchSparse(r.re, r.re.caps, r.re.capsize, r.runtext, r.runtextstart) + } else { + r.runmatch = newMatch(r.re, r.re.capsize, r.runtext, r.runtextstart) + } + } else { + r.runmatch.reset(r.runtext, r.runtextstart) + } + + // note we test runcrawl, because it is the last one to be allocated + // If there is an alloc failure in the middle of the three allocations, + // we may still return to reuse this instance, and we want to behave + // as if the allocations didn't occur. (we used to test _trackcount != 0) + + if r.runcrawl != nil { + r.runtrackpos = len(r.runtrack) + r.runstackpos = len(r.runstack) + r.runcrawlpos = len(r.runcrawl) + return + } + + r.initTrackCount() + + tracksize := r.runtrackcount * 8 + stacksize := r.runtrackcount * 8 + + if tracksize < 32 { + tracksize = 32 + } + if stacksize < 16 { + stacksize = 16 + } + + r.runtrack = make([]int, tracksize) + r.runtrackpos = tracksize + + r.runstack = make([]int, stacksize) + r.runstackpos = stacksize + + r.runcrawl = make([]int, 32) + r.runcrawlpos = 32 +} + +func (r *runner) tidyMatch(quick bool) *Match { + if !quick { + match := r.runmatch + + r.runmatch = nil + + match.tidy(r.runtextpos) + return match + } else { + // send back our match -- it's not leaving the package, so it's safe to not clean it up + // this reduces allocs for frequent calls to the "IsMatch" bool-only functions + return r.runmatch + } +} + +// capture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) capture(capnum, start, end int) { + if end < start { + T := end + end = start + start = T + } + + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) +} + +// transferCapture captures a subexpression. Note that the +// capnum used here has already been mapped to a non-sparse +// index (by the code generator RegexWriter). +func (r *runner) transferCapture(capnum, uncapnum, start, end int) { + var start2, end2 int + + // these are the two intervals that are cancelling each other + + if end < start { + T := end + end = start + start = T + } + + start2 = r.runmatch.matchIndex(uncapnum) + end2 = start2 + r.runmatch.matchLength(uncapnum) + + // The new capture gets the innermost defined interval + + if start >= end2 { + end = start + start = end2 + } else if end <= start2 { + start = start2 + } else { + if end > end2 { + end = end2 + } + if start2 > start { + start = start2 + } + } + + r.crawl(uncapnum) + r.runmatch.balanceMatch(uncapnum) + + if capnum != -1 { + r.crawl(capnum) + r.runmatch.addMatch(capnum, start, end-start) + } +} + +// revert the last capture +func (r *runner) uncapture() { + capnum := r.popcrawl() + r.runmatch.removeMatch(capnum) +} + +//debug + +func (r *runner) dumpState() { + back := "" + if r.operator&syntax.Back != 0 { + back = " Back" + } + if r.operator&syntax.Back2 != 0 { + back += " Back2" + } + fmt.Printf("Text: %v\nTrack: %v\nStack: %v\n %s%s\n\n", + r.textposDescription(), + r.stackDescription(r.runtrack, r.runtrackpos), + r.stackDescription(r.runstack, r.runstackpos), + r.code.OpcodeDescription(r.codepos), + back) +} + +func (r *runner) stackDescription(a []int, index int) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%v/%v", len(a)-index, len(a)) + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + buf.WriteRune('(') + for i := index; i < len(a); i++ { + if i > index { + buf.WriteRune(' ') + } + + buf.WriteString(strconv.Itoa(a[i])) + } + + buf.WriteRune(')') + + return buf.String() +} + +func (r *runner) textposDescription() string { + buf := &bytes.Buffer{} + + buf.WriteString(strconv.Itoa(r.runtextpos)) + + if buf.Len() < 8 { + buf.WriteString(strings.Repeat(" ", 8-buf.Len())) + } + + if r.runtextpos > 0 { + buf.WriteString(syntax.CharDescription(r.runtext[r.runtextpos-1])) + } else { + buf.WriteRune('^') + } + + buf.WriteRune('>') + + for i := r.runtextpos; i < r.runtextend; i++ { + buf.WriteString(syntax.CharDescription(r.runtext[i])) + } + if buf.Len() >= 64 { + buf.Truncate(61) + buf.WriteString("...") + } else { + buf.WriteRune('$') + } + + return buf.String() +} + +// decide whether the pos +// at the specified index is a boundary or not. It's just not worth +// emitting inline code for this logic. +func (r *runner) isBoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsWordChar(r.runtext[index])) +} + +func (r *runner) isECMABoundary(index, startpos, endpos int) bool { + return (index > startpos && syntax.IsECMAWordChar(r.runtext[index-1])) != + (index < endpos && syntax.IsECMAWordChar(r.runtext[index])) +} + +// this seems like a comment to justify randomly picking 1000 :-P +// We have determined this value in a series of experiments where x86 retail +// builds (ono-lab-optimized) were run on different pattern/input pairs. Larger values +// of TimeoutCheckFrequency did not tend to increase performance; smaller values +// of TimeoutCheckFrequency tended to slow down the execution. +const timeoutCheckFrequency int = 1000 + +func (r *runner) startTimeoutWatch() { + if r.ignoreTimeout { + return + } + + r.timeoutChecksToSkip = timeoutCheckFrequency + r.timeoutAt = time.Now().Add(r.timeout) +} + +func (r *runner) checkTimeout() error { + if r.ignoreTimeout { + return nil + } + r.timeoutChecksToSkip-- + if r.timeoutChecksToSkip != 0 { + return nil + } + + r.timeoutChecksToSkip = timeoutCheckFrequency + return r.doCheckTimeout() +} + +func (r *runner) doCheckTimeout() error { + current := time.Now() + + if current.Before(r.timeoutAt) { + return nil + } + + if r.re.Debug() { + //Debug.WriteLine("") + //Debug.WriteLine("RegEx match timeout occurred!") + //Debug.WriteLine("Specified timeout: " + TimeSpan.FromMilliseconds(_timeout).ToString()) + //Debug.WriteLine("Timeout check frequency: " + TimeoutCheckFrequency) + //Debug.WriteLine("Search pattern: " + _runregex._pattern) + //Debug.WriteLine("Input: " + r.runtext) + //Debug.WriteLine("About to throw RegexMatchTimeoutException.") + } + + return fmt.Errorf("match timeout after %v on input `%v`", r.timeout, string(r.runtext)) +} + +func (r *runner) initTrackCount() { + r.runtrackcount = r.code.TrackCount +} + +// getRunner returns a run to use for matching re. +// It uses the re's runner cache if possible, to avoid +// unnecessary allocation. +func (re *Regexp) getRunner() *runner { + re.muRun.Lock() + if n := len(re.runner); n > 0 { + z := re.runner[n-1] + re.runner = re.runner[:n-1] + re.muRun.Unlock() + return z + } + re.muRun.Unlock() + z := &runner{ + re: re, + code: re.code, + } + return z +} + +// putRunner returns a runner to the re's cache. +// There is no attempt to limit the size of the cache, so it will +// grow to the maximum number of simultaneous matches +// run using re. (The cache empties when re gets garbage collected.) +func (re *Regexp) putRunner(r *runner) { + re.muRun.Lock() + re.runner = append(re.runner, r) + re.muRun.Unlock() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/charclass.go b/vendor/github.com/dlclark/regexp2/syntax/charclass.go new file mode 100644 index 00000000..f312ea7a --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/charclass.go @@ -0,0 +1,784 @@ +package syntax + +import ( + "bytes" + "encoding/binary" + "fmt" + "sort" + "unicode" + "unicode/utf8" +) + +// CharSet combines start-end rune ranges and unicode categories representing a set of characters +type CharSet struct { + ranges []singleRange + categories []category + sub *CharSet //optional subtractor + negate bool + anything bool +} + +type category struct { + negate bool + cat string +} + +type singleRange struct { + first rune + last rune +} + +const ( + spaceCategoryText = " " + wordCategoryText = "W" +) + +var ( + ecmaSpace = []rune{0x0009, 0x000e, 0x0020, 0x0021, 0x00a0, 0x00a1, 0x1680, 0x1681, 0x2000, 0x200b, 0x2028, 0x202a, 0x202f, 0x2030, 0x205f, 0x2060, 0x3000, 0x3001, 0xfeff, 0xff00} + ecmaWord = []rune{0x0030, 0x003a, 0x0041, 0x005b, 0x005f, 0x0060, 0x0061, 0x007b} + ecmaDigit = []rune{0x0030, 0x003a} +) + +var ( + AnyClass = getCharSetFromOldString([]rune{0}, false) + ECMAAnyClass = getCharSetFromOldString([]rune{0, 0x000a, 0x000b, 0x000d, 0x000e}, false) + NoneClass = getCharSetFromOldString(nil, false) + ECMAWordClass = getCharSetFromOldString(ecmaWord, false) + NotECMAWordClass = getCharSetFromOldString(ecmaWord, true) + ECMASpaceClass = getCharSetFromOldString(ecmaSpace, false) + NotECMASpaceClass = getCharSetFromOldString(ecmaSpace, true) + ECMADigitClass = getCharSetFromOldString(ecmaDigit, false) + NotECMADigitClass = getCharSetFromOldString(ecmaDigit, true) + + WordClass = getCharSetFromCategoryString(false, false, wordCategoryText) + NotWordClass = getCharSetFromCategoryString(true, false, wordCategoryText) + SpaceClass = getCharSetFromCategoryString(false, false, spaceCategoryText) + NotSpaceClass = getCharSetFromCategoryString(true, false, spaceCategoryText) + DigitClass = getCharSetFromCategoryString(false, false, "Nd") + NotDigitClass = getCharSetFromCategoryString(false, true, "Nd") +) + +var unicodeCategories = func() map[string]*unicode.RangeTable { + retVal := make(map[string]*unicode.RangeTable) + for k, v := range unicode.Scripts { + retVal[k] = v + } + for k, v := range unicode.Categories { + retVal[k] = v + } + for k, v := range unicode.Properties { + retVal[k] = v + } + return retVal +}() + +func getCharSetFromCategoryString(negateSet bool, negateCat bool, cats ...string) func() *CharSet { + if negateCat && negateSet { + panic("BUG! You should only negate the set OR the category in a constant setup, but not both") + } + + c := CharSet{negate: negateSet} + + c.categories = make([]category, len(cats)) + for i, cat := range cats { + c.categories[i] = category{cat: cat, negate: negateCat} + } + return func() *CharSet { + //make a copy each time + local := c + //return that address + return &local + } +} + +func getCharSetFromOldString(setText []rune, negate bool) func() *CharSet { + c := CharSet{} + if len(setText) > 0 { + fillFirst := false + l := len(setText) + if negate { + if setText[0] == 0 { + setText = setText[1:] + } else { + l++ + fillFirst = true + } + } + + if l%2 == 0 { + c.ranges = make([]singleRange, l/2) + } else { + c.ranges = make([]singleRange, l/2+1) + } + + first := true + if fillFirst { + c.ranges[0] = singleRange{first: 0} + first = false + } + + i := 0 + for _, r := range setText { + if first { + // lower bound in a new range + c.ranges[i] = singleRange{first: r} + first = false + } else { + c.ranges[i].last = r - 1 + i++ + first = true + } + } + if !first { + c.ranges[i].last = utf8.MaxRune + } + } + + return func() *CharSet { + local := c + return &local + } +} + +// Copy makes a deep copy to prevent accidental mutation of a set +func (c CharSet) Copy() CharSet { + ret := CharSet{ + anything: c.anything, + negate: c.negate, + } + + ret.ranges = append(ret.ranges, c.ranges...) + ret.categories = append(ret.categories, c.categories...) + + if c.sub != nil { + sub := c.sub.Copy() + ret.sub = &sub + } + + return ret +} + +// gets a human-readable description for a set string +func (c CharSet) String() string { + buf := &bytes.Buffer{} + buf.WriteRune('[') + + if c.IsNegated() { + buf.WriteRune('^') + } + + for _, r := range c.ranges { + + buf.WriteString(CharDescription(r.first)) + if r.first != r.last { + if r.last-r.first != 1 { + //groups that are 1 char apart skip the dash + buf.WriteRune('-') + } + buf.WriteString(CharDescription(r.last)) + } + } + + for _, c := range c.categories { + buf.WriteString(c.String()) + } + + if c.sub != nil { + buf.WriteRune('-') + buf.WriteString(c.sub.String()) + } + + buf.WriteRune(']') + + return buf.String() +} + +// mapHashFill converts a charset into a buffer for use in maps +func (c CharSet) mapHashFill(buf *bytes.Buffer) { + if c.negate { + buf.WriteByte(0) + } else { + buf.WriteByte(1) + } + + binary.Write(buf, binary.LittleEndian, len(c.ranges)) + binary.Write(buf, binary.LittleEndian, len(c.categories)) + for _, r := range c.ranges { + buf.WriteRune(r.first) + buf.WriteRune(r.last) + } + for _, ct := range c.categories { + buf.WriteString(ct.cat) + if ct.negate { + buf.WriteByte(1) + } else { + buf.WriteByte(0) + } + } + + if c.sub != nil { + c.sub.mapHashFill(buf) + } +} + +// CharIn returns true if the rune is in our character set (either ranges or categories). +// It handles negations and subtracted sub-charsets. +func (c CharSet) CharIn(ch rune) bool { + val := false + // in s && !s.subtracted + + //check ranges + for _, r := range c.ranges { + if ch < r.first { + continue + } + if ch <= r.last { + val = true + break + } + } + + //check categories if we haven't already found a range + if !val && len(c.categories) > 0 { + for _, ct := range c.categories { + // special categories...then unicode + if ct.cat == spaceCategoryText { + if unicode.IsSpace(ch) { + // we found a space so we're done + // negate means this is a "bad" thing + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if ct.cat == wordCategoryText { + if IsWordChar(ch) { + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } else if unicode.Is(unicodeCategories[ct.cat], ch) { + // if we're in this unicode category then we're done + // if negate=true on this category then we "failed" our test + // otherwise we're good that we found it + val = !ct.negate + break + } else if ct.negate { + val = true + break + } + } + } + + // negate the whole char set + if c.negate { + val = !val + } + + // get subtracted recurse + if val && c.sub != nil { + val = !c.sub.CharIn(ch) + } + + //log.Printf("Char '%v' in %v == %v", string(ch), c.String(), val) + return val +} + +func (c category) String() string { + switch c.cat { + case spaceCategoryText: + if c.negate { + return "\\S" + } + return "\\s" + case wordCategoryText: + if c.negate { + return "\\W" + } + return "\\w" + } + if _, ok := unicodeCategories[c.cat]; ok { + + if c.negate { + return "\\P{" + c.cat + "}" + } + return "\\p{" + c.cat + "}" + } + return "Unknown category: " + c.cat +} + +// CharDescription Produces a human-readable description for a single character. +func CharDescription(ch rune) string { + /*if ch == '\\' { + return "\\\\" + } + + if ch > ' ' && ch <= '~' { + return string(ch) + } else if ch == '\n' { + return "\\n" + } else if ch == ' ' { + return "\\ " + }*/ + + b := &bytes.Buffer{} + escape(b, ch, false) //fmt.Sprintf("%U", ch) + return b.String() +} + +// According to UTS#18 Unicode Regular Expressions (http://www.unicode.org/reports/tr18/) +// RL 1.4 Simple Word Boundaries The class of includes all Alphabetic +// values from the Unicode character database, from UnicodeData.txt [UData], plus the U+200C +// ZERO WIDTH NON-JOINER and U+200D ZERO WIDTH JOINER. +func IsWordChar(r rune) bool { + //"L", "Mn", "Nd", "Pc" + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) || r == '\u200D' || r == '\u200C' + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +func IsECMAWordChar(r rune) bool { + return unicode.In(r, + unicode.Categories["L"], unicode.Categories["Mn"], + unicode.Categories["Nd"], unicode.Categories["Pc"]) + + //return 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' || '0' <= r && r <= '9' || r == '_' +} + +// SingletonChar will return the char from the first range without validation. +// It assumes you have checked for IsSingleton or IsSingletonInverse and will panic given bad input +func (c CharSet) SingletonChar() rune { + return c.ranges[0].first +} + +func (c CharSet) IsSingleton() bool { + return !c.negate && //negated is multiple chars + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsSingletonInverse() bool { + return c.negate && //same as above, but requires negated + len(c.categories) == 0 && len(c.ranges) == 1 && // multiple ranges and unicode classes represent multiple chars + c.sub == nil && // subtraction means we've got multiple chars + c.ranges[0].first == c.ranges[0].last // first and last equal means we're just 1 char +} + +func (c CharSet) IsMergeable() bool { + return !c.IsNegated() && !c.HasSubtraction() +} + +func (c CharSet) IsNegated() bool { + return c.negate +} + +func (c CharSet) HasSubtraction() bool { + return c.sub != nil +} + +func (c CharSet) IsEmpty() bool { + return len(c.ranges) == 0 && len(c.categories) == 0 && c.sub == nil +} + +func (c *CharSet) addDigit(ecma, negate bool, pattern string) { + if ecma { + if negate { + c.addRanges(NotECMADigitClass().ranges) + } else { + c.addRanges(ECMADigitClass().ranges) + } + } else { + c.addCategories(category{cat: "Nd", negate: negate}) + } +} + +func (c *CharSet) addChar(ch rune) { + c.addRange(ch, ch) +} + +func (c *CharSet) addSpace(ecma, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMASpaceClass().ranges) + } else { + c.addRanges(ECMASpaceClass().ranges) + } + } else { + c.addCategories(category{cat: spaceCategoryText, negate: negate}) + } +} + +func (c *CharSet) addWord(ecma, negate bool) { + if ecma { + if negate { + c.addRanges(NotECMAWordClass().ranges) + } else { + c.addRanges(ECMAWordClass().ranges) + } + } else { + c.addCategories(category{cat: wordCategoryText, negate: negate}) + } +} + +// Add set ranges and categories into ours -- no deduping or anything +func (c *CharSet) addSet(set CharSet) { + if c.anything { + return + } + if set.anything { + c.makeAnything() + return + } + // just append here to prevent double-canon + c.ranges = append(c.ranges, set.ranges...) + c.addCategories(set.categories...) + c.canonicalize() +} + +func (c *CharSet) makeAnything() { + c.anything = true + c.categories = []category{} + c.ranges = AnyClass().ranges +} + +func (c *CharSet) addCategories(cats ...category) { + // don't add dupes and remove positive+negative + if c.anything { + // if we've had a previous positive+negative group then + // just return, we're as broad as we can get + return + } + + for _, ct := range cats { + found := false + for _, ct2 := range c.categories { + if ct.cat == ct2.cat { + if ct.negate != ct2.negate { + // oposite negations...this mean we just + // take us as anything and move on + c.makeAnything() + return + } + found = true + break + } + } + + if !found { + c.categories = append(c.categories, ct) + } + } +} + +// Merges new ranges to our own +func (c *CharSet) addRanges(ranges []singleRange) { + if c.anything { + return + } + c.ranges = append(c.ranges, ranges...) + c.canonicalize() +} + +func isValidUnicodeCat(catName string) bool { + _, ok := unicodeCategories[catName] + return ok +} + +func (c *CharSet) addCategory(categoryName string, negate, caseInsensitive bool, pattern string) { + if !isValidUnicodeCat(categoryName) { + // unknown unicode category, script, or property "blah" + panic(fmt.Errorf("Unknown unicode category, script, or property '%v'", categoryName)) + + } + + if caseInsensitive && (categoryName == "Ll" || categoryName == "Lu" || categoryName == "Lt") { + // when RegexOptions.IgnoreCase is specified then {Ll} {Lu} and {Lt} cases should all match + c.addCategories( + category{cat: "Ll", negate: negate}, + category{cat: "Lu", negate: negate}, + category{cat: "Lt", negate: negate}) + } + c.addCategories(category{cat: categoryName, negate: negate}) +} + +func (c *CharSet) addSubtraction(sub *CharSet) { + c.sub = sub +} + +func (c *CharSet) addRange(chMin, chMax rune) { + c.ranges = append(c.ranges, singleRange{first: chMin, last: chMax}) + c.canonicalize() +} + +type singleRangeSorter []singleRange + +func (p singleRangeSorter) Len() int { return len(p) } +func (p singleRangeSorter) Less(i, j int) bool { return p[i].first < p[j].first } +func (p singleRangeSorter) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +// Logic to reduce a character class to a unique, sorted form. +func (c *CharSet) canonicalize() { + var i, j int + var last rune + + // + // Find and eliminate overlapping or abutting ranges + // + + if len(c.ranges) > 1 { + sort.Sort(singleRangeSorter(c.ranges)) + + done := false + + for i, j = 1, 0; ; i++ { + for last = c.ranges[j].last; ; i++ { + if i == len(c.ranges) || last == utf8.MaxRune { + done = true + break + } + + CurrentRange := c.ranges[i] + if CurrentRange.first > last+1 { + break + } + + if last < CurrentRange.last { + last = CurrentRange.last + } + } + + c.ranges[j] = singleRange{first: c.ranges[j].first, last: last} + + j++ + + if done { + break + } + + if j < i { + c.ranges[j] = c.ranges[i] + } + } + + c.ranges = append(c.ranges[:j], c.ranges[len(c.ranges):]...) + } +} + +// Adds to the class any lowercase versions of characters already +// in the class. Used for case-insensitivity. +func (c *CharSet) addLowercase() { + if c.anything { + return + } + toAdd := []singleRange{} + for i := 0; i < len(c.ranges); i++ { + r := c.ranges[i] + if r.first == r.last { + lower := unicode.ToLower(r.first) + c.ranges[i] = singleRange{first: lower, last: lower} + } else { + toAdd = append(toAdd, r) + } + } + + for _, r := range toAdd { + c.addLowercaseRange(r.first, r.last) + } + c.canonicalize() +} + +/************************************************************************** + Let U be the set of Unicode character values and let L be the lowercase + function, mapping from U to U. To perform case insensitive matching of + character sets, we need to be able to map an interval I in U, say + + I = [chMin, chMax] = { ch : chMin <= ch <= chMax } + + to a set A such that A contains L(I) and A is contained in the union of + I and L(I). + + The table below partitions U into intervals on which L is non-decreasing. + Thus, for any interval J = [a, b] contained in one of these intervals, + L(J) is contained in [L(a), L(b)]. + + It is also true that for any such J, [L(a), L(b)] is contained in the + union of J and L(J). This does not follow from L being non-decreasing on + these intervals. It follows from the nature of the L on each interval. + On each interval, L has one of the following forms: + + (1) L(ch) = constant (LowercaseSet) + (2) L(ch) = ch + offset (LowercaseAdd) + (3) L(ch) = ch | 1 (LowercaseBor) + (4) L(ch) = ch + (ch & 1) (LowercaseBad) + + It is easy to verify that for any of these forms [L(a), L(b)] is + contained in the union of [a, b] and L([a, b]). +***************************************************************************/ + +const ( + LowercaseSet = 0 // Set to arg. + LowercaseAdd = 1 // Add arg. + LowercaseBor = 2 // Bitwise or with 1. + LowercaseBad = 3 // Bitwise and with 1 and add original. +) + +type lcMap struct { + chMin, chMax rune + op, data int32 +} + +var lcTable = []lcMap{ + lcMap{'\u0041', '\u005A', LowercaseAdd, 32}, + lcMap{'\u00C0', '\u00DE', LowercaseAdd, 32}, + lcMap{'\u0100', '\u012E', LowercaseBor, 0}, + lcMap{'\u0130', '\u0130', LowercaseSet, 0x0069}, + lcMap{'\u0132', '\u0136', LowercaseBor, 0}, + lcMap{'\u0139', '\u0147', LowercaseBad, 0}, + lcMap{'\u014A', '\u0176', LowercaseBor, 0}, + lcMap{'\u0178', '\u0178', LowercaseSet, 0x00FF}, + lcMap{'\u0179', '\u017D', LowercaseBad, 0}, + lcMap{'\u0181', '\u0181', LowercaseSet, 0x0253}, + lcMap{'\u0182', '\u0184', LowercaseBor, 0}, + lcMap{'\u0186', '\u0186', LowercaseSet, 0x0254}, + lcMap{'\u0187', '\u0187', LowercaseSet, 0x0188}, + lcMap{'\u0189', '\u018A', LowercaseAdd, 205}, + lcMap{'\u018B', '\u018B', LowercaseSet, 0x018C}, + lcMap{'\u018E', '\u018E', LowercaseSet, 0x01DD}, + lcMap{'\u018F', '\u018F', LowercaseSet, 0x0259}, + lcMap{'\u0190', '\u0190', LowercaseSet, 0x025B}, + lcMap{'\u0191', '\u0191', LowercaseSet, 0x0192}, + lcMap{'\u0193', '\u0193', LowercaseSet, 0x0260}, + lcMap{'\u0194', '\u0194', LowercaseSet, 0x0263}, + lcMap{'\u0196', '\u0196', LowercaseSet, 0x0269}, + lcMap{'\u0197', '\u0197', LowercaseSet, 0x0268}, + lcMap{'\u0198', '\u0198', LowercaseSet, 0x0199}, + lcMap{'\u019C', '\u019C', LowercaseSet, 0x026F}, + lcMap{'\u019D', '\u019D', LowercaseSet, 0x0272}, + lcMap{'\u019F', '\u019F', LowercaseSet, 0x0275}, + lcMap{'\u01A0', '\u01A4', LowercaseBor, 0}, + lcMap{'\u01A7', '\u01A7', LowercaseSet, 0x01A8}, + lcMap{'\u01A9', '\u01A9', LowercaseSet, 0x0283}, + lcMap{'\u01AC', '\u01AC', LowercaseSet, 0x01AD}, + lcMap{'\u01AE', '\u01AE', LowercaseSet, 0x0288}, + lcMap{'\u01AF', '\u01AF', LowercaseSet, 0x01B0}, + lcMap{'\u01B1', '\u01B2', LowercaseAdd, 217}, + lcMap{'\u01B3', '\u01B5', LowercaseBad, 0}, + lcMap{'\u01B7', '\u01B7', LowercaseSet, 0x0292}, + lcMap{'\u01B8', '\u01B8', LowercaseSet, 0x01B9}, + lcMap{'\u01BC', '\u01BC', LowercaseSet, 0x01BD}, + lcMap{'\u01C4', '\u01C5', LowercaseSet, 0x01C6}, + lcMap{'\u01C7', '\u01C8', LowercaseSet, 0x01C9}, + lcMap{'\u01CA', '\u01CB', LowercaseSet, 0x01CC}, + lcMap{'\u01CD', '\u01DB', LowercaseBad, 0}, + lcMap{'\u01DE', '\u01EE', LowercaseBor, 0}, + lcMap{'\u01F1', '\u01F2', LowercaseSet, 0x01F3}, + lcMap{'\u01F4', '\u01F4', LowercaseSet, 0x01F5}, + lcMap{'\u01FA', '\u0216', LowercaseBor, 0}, + lcMap{'\u0386', '\u0386', LowercaseSet, 0x03AC}, + lcMap{'\u0388', '\u038A', LowercaseAdd, 37}, + lcMap{'\u038C', '\u038C', LowercaseSet, 0x03CC}, + lcMap{'\u038E', '\u038F', LowercaseAdd, 63}, + lcMap{'\u0391', '\u03AB', LowercaseAdd, 32}, + lcMap{'\u03E2', '\u03EE', LowercaseBor, 0}, + lcMap{'\u0401', '\u040F', LowercaseAdd, 80}, + lcMap{'\u0410', '\u042F', LowercaseAdd, 32}, + lcMap{'\u0460', '\u0480', LowercaseBor, 0}, + lcMap{'\u0490', '\u04BE', LowercaseBor, 0}, + lcMap{'\u04C1', '\u04C3', LowercaseBad, 0}, + lcMap{'\u04C7', '\u04C7', LowercaseSet, 0x04C8}, + lcMap{'\u04CB', '\u04CB', LowercaseSet, 0x04CC}, + lcMap{'\u04D0', '\u04EA', LowercaseBor, 0}, + lcMap{'\u04EE', '\u04F4', LowercaseBor, 0}, + lcMap{'\u04F8', '\u04F8', LowercaseSet, 0x04F9}, + lcMap{'\u0531', '\u0556', LowercaseAdd, 48}, + lcMap{'\u10A0', '\u10C5', LowercaseAdd, 48}, + lcMap{'\u1E00', '\u1EF8', LowercaseBor, 0}, + lcMap{'\u1F08', '\u1F0F', LowercaseAdd, -8}, + lcMap{'\u1F18', '\u1F1F', LowercaseAdd, -8}, + lcMap{'\u1F28', '\u1F2F', LowercaseAdd, -8}, + lcMap{'\u1F38', '\u1F3F', LowercaseAdd, -8}, + lcMap{'\u1F48', '\u1F4D', LowercaseAdd, -8}, + lcMap{'\u1F59', '\u1F59', LowercaseSet, 0x1F51}, + lcMap{'\u1F5B', '\u1F5B', LowercaseSet, 0x1F53}, + lcMap{'\u1F5D', '\u1F5D', LowercaseSet, 0x1F55}, + lcMap{'\u1F5F', '\u1F5F', LowercaseSet, 0x1F57}, + lcMap{'\u1F68', '\u1F6F', LowercaseAdd, -8}, + lcMap{'\u1F88', '\u1F8F', LowercaseAdd, -8}, + lcMap{'\u1F98', '\u1F9F', LowercaseAdd, -8}, + lcMap{'\u1FA8', '\u1FAF', LowercaseAdd, -8}, + lcMap{'\u1FB8', '\u1FB9', LowercaseAdd, -8}, + lcMap{'\u1FBA', '\u1FBB', LowercaseAdd, -74}, + lcMap{'\u1FBC', '\u1FBC', LowercaseSet, 0x1FB3}, + lcMap{'\u1FC8', '\u1FCB', LowercaseAdd, -86}, + lcMap{'\u1FCC', '\u1FCC', LowercaseSet, 0x1FC3}, + lcMap{'\u1FD8', '\u1FD9', LowercaseAdd, -8}, + lcMap{'\u1FDA', '\u1FDB', LowercaseAdd, -100}, + lcMap{'\u1FE8', '\u1FE9', LowercaseAdd, -8}, + lcMap{'\u1FEA', '\u1FEB', LowercaseAdd, -112}, + lcMap{'\u1FEC', '\u1FEC', LowercaseSet, 0x1FE5}, + lcMap{'\u1FF8', '\u1FF9', LowercaseAdd, -128}, + lcMap{'\u1FFA', '\u1FFB', LowercaseAdd, -126}, + lcMap{'\u1FFC', '\u1FFC', LowercaseSet, 0x1FF3}, + lcMap{'\u2160', '\u216F', LowercaseAdd, 16}, + lcMap{'\u24B6', '\u24D0', LowercaseAdd, 26}, + lcMap{'\uFF21', '\uFF3A', LowercaseAdd, 32}, +} + +func (c *CharSet) addLowercaseRange(chMin, chMax rune) { + var i, iMax, iMid int + var chMinT, chMaxT rune + var lc lcMap + + for i, iMax = 0, len(lcTable); i < iMax; { + iMid = (i + iMax) / 2 + if lcTable[iMid].chMax < chMin { + i = iMid + 1 + } else { + iMax = iMid + } + } + + for ; i < len(lcTable); i++ { + lc = lcTable[i] + if lc.chMin > chMax { + return + } + chMinT = lc.chMin + if chMinT < chMin { + chMinT = chMin + } + + chMaxT = lc.chMax + if chMaxT > chMax { + chMaxT = chMax + } + + switch lc.op { + case LowercaseSet: + chMinT = rune(lc.data) + chMaxT = rune(lc.data) + break + case LowercaseAdd: + chMinT += lc.data + chMaxT += lc.data + break + case LowercaseBor: + chMinT |= 1 + chMaxT |= 1 + break + case LowercaseBad: + chMinT += (chMinT & 1) + chMaxT += (chMaxT & 1) + break + } + + if chMinT < chMin || chMaxT > chMax { + c.addRange(chMinT, chMaxT) + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/code.go b/vendor/github.com/dlclark/regexp2/syntax/code.go new file mode 100644 index 00000000..686e822a --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/code.go @@ -0,0 +1,274 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" +) + +// similar to prog.go in the go regex package...also with comment 'may not belong in this package' + +// File provides operator constants for use by the Builder and the Machine. + +// Implementation notes: +// +// Regexps are built into RegexCodes, which contain an operation array, +// a string table, and some constants. +// +// Each operation is one of the codes below, followed by the integer +// operands specified for each op. +// +// Strings and sets are indices into a string table. + +type InstOp int + +const ( + // lef/back operands description + + Onerep InstOp = 0 // lef,back char,min,max a {n} + Notonerep = 1 // lef,back char,min,max .{n} + Setrep = 2 // lef,back set,min,max [\d]{n} + + Oneloop = 3 // lef,back char,min,max a {,n} + Notoneloop = 4 // lef,back char,min,max .{,n} + Setloop = 5 // lef,back set,min,max [\d]{,n} + + Onelazy = 6 // lef,back char,min,max a {,n}? + Notonelazy = 7 // lef,back char,min,max .{,n}? + Setlazy = 8 // lef,back set,min,max [\d]{,n}? + + One = 9 // lef char a + Notone = 10 // lef char [^a] + Set = 11 // lef set [a-z\s] \w \s \d + + Multi = 12 // lef string abcd + Ref = 13 // lef group \# + + Bol = 14 // ^ + Eol = 15 // $ + Boundary = 16 // \b + Nonboundary = 17 // \B + Beginning = 18 // \A + Start = 19 // \G + EndZ = 20 // \Z + End = 21 // \Z + + Nothing = 22 // Reject! + + // Primitive control structures + + Lazybranch = 23 // back jump straight first + Branchmark = 24 // back jump branch first for loop + Lazybranchmark = 25 // back jump straight first for loop + Nullcount = 26 // back val set counter, null mark + Setcount = 27 // back val set counter, make mark + Branchcount = 28 // back jump,limit branch++ if zero<=c impl group slots + Capsize int // number of impl group slots + FcPrefix *Prefix // the set of candidate first characters (may be null) + BmPrefix *BmPrefix // the fixed prefix string as a Boyer-Moore machine (may be null) + Anchors AnchorLoc // the set of zero-length start anchors (RegexFCD.Bol, etc) + RightToLeft bool // true if right to left +} + +func opcodeBacktracks(op InstOp) bool { + op &= Mask + + switch op { + case Oneloop, Notoneloop, Setloop, Onelazy, Notonelazy, Setlazy, Lazybranch, Branchmark, Lazybranchmark, + Nullcount, Setcount, Branchcount, Lazybranchcount, Setmark, Capturemark, Getmark, Setjump, Backjump, + Forejump, Goto: + return true + + default: + return false + } +} + +func opcodeSize(op InstOp) int { + op &= Mask + + switch op { + case Nothing, Bol, Eol, Boundary, Nonboundary, ECMABoundary, NonECMABoundary, Beginning, Start, EndZ, + End, Nullmark, Setmark, Getmark, Setjump, Backjump, Forejump, Stop: + return 1 + + case One, Notone, Multi, Ref, Testref, Goto, Nullcount, Setcount, Lazybranch, Branchmark, Lazybranchmark, + Prune, Set: + return 2 + + case Capturemark, Branchcount, Lazybranchcount, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, + Setlazy, Setrep, Setloop: + return 3 + + default: + panic(fmt.Errorf("Unexpected op code: %v", op)) + } +} + +var codeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", "Beginning", "Start", "EndZ", "End", + "Nothing", + "Lazybranch", "Branchmark", "Lazybranchmark", + "Nullcount", "Setcount", "Branchcount", "Lazybranchcount", + "Nullmark", "Setmark", "Capturemark", "Getmark", + "Setjump", "Backjump", "Forejump", "Testref", "Goto", + "Prune", "Stop", + "ECMABoundary", "NonECMABoundary", +} + +func operatorDescription(op InstOp) string { + desc := codeStr[op&Mask] + if (op & Ci) != 0 { + desc += "-Ci" + } + if (op & Rtl) != 0 { + desc += "-Rtl" + } + if (op & Back) != 0 { + desc += "-Back" + } + if (op & Back2) != 0 { + desc += "-Back2" + } + + return desc +} + +// OpcodeDescription is a humman readable string of the specific offset +func (c *Code) OpcodeDescription(offset int) string { + buf := &bytes.Buffer{} + + op := InstOp(c.Codes[offset]) + fmt.Fprintf(buf, "%06d ", offset) + + if opcodeBacktracks(op & Mask) { + buf.WriteString("*") + } else { + buf.WriteString(" ") + } + buf.WriteString(operatorDescription(op)) + buf.WriteString("(") + op &= Mask + + switch op { + case One, Notone, Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy: + buf.WriteString("Ch = ") + buf.WriteString(CharDescription(rune(c.Codes[offset+1]))) + + case Set, Setrep, Setloop, Setlazy: + buf.WriteString("Set = ") + buf.WriteString(c.Sets[c.Codes[offset+1]].String()) + + case Multi: + fmt.Fprintf(buf, "String = %s", string(c.Strings[c.Codes[offset+1]])) + + case Ref, Testref: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + + case Capturemark: + fmt.Fprintf(buf, "Index = %d", c.Codes[offset+1]) + if c.Codes[offset+2] != -1 { + fmt.Fprintf(buf, ", Unindex = %d", c.Codes[offset+2]) + } + + case Nullcount, Setcount: + fmt.Fprintf(buf, "Value = %d", c.Codes[offset+1]) + + case Goto, Lazybranch, Branchmark, Lazybranchmark, Branchcount, Lazybranchcount: + fmt.Fprintf(buf, "Addr = %d", c.Codes[offset+1]) + } + + switch op { + case Onerep, Notonerep, Oneloop, Notoneloop, Onelazy, Notonelazy, Setrep, Setloop, Setlazy: + buf.WriteString(", Rep = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + case Branchcount, Lazybranchcount: + buf.WriteString(", Limit = ") + if c.Codes[offset+2] == math.MaxInt32 { + buf.WriteString("inf") + } else { + fmt.Fprintf(buf, "%d", c.Codes[offset+2]) + } + + } + + buf.WriteString(")") + + return buf.String() +} + +func (c *Code) Dump() string { + buf := &bytes.Buffer{} + + if c.RightToLeft { + fmt.Fprintln(buf, "Direction: right-to-left") + } else { + fmt.Fprintln(buf, "Direction: left-to-right") + } + if c.FcPrefix == nil { + fmt.Fprintln(buf, "Firstchars: n/a") + } else { + fmt.Fprintf(buf, "Firstchars: %v\n", c.FcPrefix.PrefixSet.String()) + } + + if c.BmPrefix == nil { + fmt.Fprintln(buf, "Prefix: n/a") + } else { + fmt.Fprintf(buf, "Prefix: %v\n", Escape(c.BmPrefix.String())) + } + + fmt.Fprintf(buf, "Anchors: %v\n", c.Anchors) + fmt.Fprintln(buf) + + if c.BmPrefix != nil { + fmt.Fprintln(buf, "BoyerMoore:") + fmt.Fprintln(buf, c.BmPrefix.Dump(" ")) + } + for i := 0; i < len(c.Codes); i += opcodeSize(InstOp(c.Codes[i])) { + fmt.Fprintln(buf, c.OpcodeDescription(i)) + } + + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/escape.go b/vendor/github.com/dlclark/regexp2/syntax/escape.go new file mode 100644 index 00000000..609df107 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/escape.go @@ -0,0 +1,94 @@ +package syntax + +import ( + "bytes" + "strconv" + "strings" + "unicode" +) + +func Escape(input string) string { + b := &bytes.Buffer{} + for _, r := range input { + escape(b, r, false) + } + return b.String() +} + +const meta = `\.+*?()|[]{}^$# ` + +func escape(b *bytes.Buffer, r rune, force bool) { + if unicode.IsPrint(r) { + if strings.IndexRune(meta, r) >= 0 || force { + b.WriteRune('\\') + } + b.WriteRune(r) + return + } + + switch r { + case '\a': + b.WriteString(`\a`) + case '\f': + b.WriteString(`\f`) + case '\n': + b.WriteString(`\n`) + case '\r': + b.WriteString(`\r`) + case '\t': + b.WriteString(`\t`) + case '\v': + b.WriteString(`\v`) + default: + if r < 0x100 { + b.WriteString(`\x`) + s := strconv.FormatInt(int64(r), 16) + if len(s) == 1 { + b.WriteRune('0') + } + b.WriteString(s) + break + } + b.WriteString(`\u`) + b.WriteString(strconv.FormatInt(int64(r), 16)) + } +} + +func Unescape(input string) (string, error) { + idx := strings.IndexRune(input, '\\') + // no slashes means no unescape needed + if idx == -1 { + return input, nil + } + + buf := bytes.NewBufferString(input[:idx]) + // get the runes for the rest of the string -- we're going full parser scan on this + + p := parser{} + p.setPattern(input[idx+1:]) + for { + if p.rightMost() { + return "", p.getErr(ErrIllegalEndEscape) + } + r, err := p.scanCharEscape() + if err != nil { + return "", err + } + buf.WriteRune(r) + // are we done? + if p.rightMost() { + return buf.String(), nil + } + + r = p.moveRightGetChar() + for r != '\\' { + buf.WriteRune(r) + if p.rightMost() { + // we're done, no more slashes + return buf.String(), nil + } + // keep scanning until we get another slash + r = p.moveRightGetChar() + } + } +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/fuzz.go b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go new file mode 100644 index 00000000..ee863866 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/fuzz.go @@ -0,0 +1,20 @@ +// +build gofuzz + +package syntax + +// Fuzz is the input point for go-fuzz +func Fuzz(data []byte) int { + sdata := string(data) + tree, err := Parse(sdata, RegexOptions(0)) + if err != nil { + return 0 + } + + // translate it to code + _, err = Write(tree) + if err != nil { + panic(err) + } + + return 1 +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/parser.go b/vendor/github.com/dlclark/regexp2/syntax/parser.go new file mode 100644 index 00000000..6e8ea834 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/parser.go @@ -0,0 +1,2125 @@ +package syntax + +import ( + "fmt" + "math" + "os" + "sort" + "strconv" + "unicode" +) + +type RegexOptions int32 + +const ( + IgnoreCase RegexOptions = 0x0001 // "i" + Multiline = 0x0002 // "m" + ExplicitCapture = 0x0004 // "n" + Compiled = 0x0008 // "c" + Singleline = 0x0010 // "s" + IgnorePatternWhitespace = 0x0020 // "x" + RightToLeft = 0x0040 // "r" + Debug = 0x0080 // "d" + ECMAScript = 0x0100 // "e" +) + +func optionFromCode(ch rune) RegexOptions { + // case-insensitive + switch ch { + case 'i', 'I': + return IgnoreCase + case 'r', 'R': + return RightToLeft + case 'm', 'M': + return Multiline + case 'n', 'N': + return ExplicitCapture + case 's', 'S': + return Singleline + case 'x', 'X': + return IgnorePatternWhitespace + case 'd', 'D': + return Debug + case 'e', 'E': + return ECMAScript + default: + return 0 + } +} + +// An Error describes a failure to parse a regular expression +// and gives the offending expression. +type Error struct { + Code ErrorCode + Expr string + Args []interface{} +} + +func (e *Error) Error() string { + if len(e.Args) == 0 { + return "error parsing regexp: " + e.Code.String() + " in `" + e.Expr + "`" + } + return "error parsing regexp: " + fmt.Sprintf(e.Code.String(), e.Args...) + " in `" + e.Expr + "`" +} + +// An ErrorCode describes a failure to parse a regular expression. +type ErrorCode string + +const ( + // internal issue + ErrInternalError ErrorCode = "regexp/syntax: internal error" + // Parser errors + ErrUnterminatedComment = "unterminated comment" + ErrInvalidCharRange = "invalid character class range" + ErrInvalidRepeatSize = "invalid repeat count" + ErrInvalidUTF8 = "invalid UTF-8" + ErrCaptureGroupOutOfRange = "capture group number out of range" + ErrUnexpectedParen = "unexpected )" + ErrMissingParen = "missing closing )" + ErrMissingBrace = "missing closing }" + ErrInvalidRepeatOp = "invalid nested repetition operator" + ErrMissingRepeatArgument = "missing argument to repetition operator" + ErrConditionalExpression = "illegal conditional (?(...)) expression" + ErrTooManyAlternates = "too many | in (?()|)" + ErrUnrecognizedGrouping = "unrecognized grouping construct: (%v" + ErrInvalidGroupName = "invalid group name: group names must begin with a word character and have a matching terminator" + ErrCapNumNotZero = "capture number cannot be zero" + ErrUndefinedBackRef = "reference to undefined group number %v" + ErrUndefinedNameRef = "reference to undefined group name %v" + ErrAlternationCantCapture = "alternation conditions do not capture and cannot be named" + ErrAlternationCantHaveComment = "alternation conditions cannot be comments" + ErrMalformedReference = "(?(%v) ) malformed" + ErrUndefinedReference = "(?(%v) ) reference to undefined group" + ErrIllegalEndEscape = "illegal \\ at end of pattern" + ErrMalformedSlashP = "malformed \\p{X} character escape" + ErrIncompleteSlashP = "incomplete \\p{X} character escape" + ErrUnknownSlashP = "unknown unicode category, script, or property '%v'" + ErrUnrecognizedEscape = "unrecognized escape sequence \\%v" + ErrMissingControl = "missing control character" + ErrUnrecognizedControl = "unrecognized control character" + ErrTooFewHex = "insufficient hexadecimal digits" + ErrInvalidHex = "hex values may not be larger than 0x10FFFF" + ErrMalformedNameRef = "malformed \\k<...> named back reference" + ErrBadClassInCharRange = "cannot include class \\%v in character range" + ErrUnterminatedBracket = "unterminated [] set" + ErrSubtractionMustBeLast = "a subtraction must be the last element in a character class" + ErrReversedCharRange = "[x-y] range in reverse order" +) + +func (e ErrorCode) String() string { + return string(e) +} + +type parser struct { + stack *regexNode + group *regexNode + alternation *regexNode + concatenation *regexNode + unit *regexNode + + patternRaw string + pattern []rune + + currentPos int + specialCase *unicode.SpecialCase + + autocap int + capcount int + captop int + capsize int + + caps map[int]int + capnames map[string]int + + capnumlist []int + capnamelist []string + + options RegexOptions + optionsStack []RegexOptions + ignoreNextParen bool +} + +const ( + maxValueDiv10 int = math.MaxInt32 / 10 + maxValueMod10 = math.MaxInt32 % 10 +) + +// Parse converts a regex string into a parse tree +func Parse(re string, op RegexOptions) (*RegexTree, error) { + p := parser{ + options: op, + caps: make(map[int]int), + } + p.setPattern(re) + + if err := p.countCaptures(); err != nil { + return nil, err + } + + p.reset(op) + root, err := p.scanRegex() + + if err != nil { + return nil, err + } + tree := &RegexTree{ + root: root, + caps: p.caps, + capnumlist: p.capnumlist, + captop: p.captop, + Capnames: p.capnames, + Caplist: p.capnamelist, + options: op, + } + + if tree.options&Debug > 0 { + os.Stdout.WriteString(tree.Dump()) + } + + return tree, nil +} + +func (p *parser) setPattern(pattern string) { + p.patternRaw = pattern + p.pattern = make([]rune, 0, len(pattern)) + + //populate our rune array to handle utf8 encoding + for _, r := range pattern { + p.pattern = append(p.pattern, r) + } +} +func (p *parser) getErr(code ErrorCode, args ...interface{}) error { + return &Error{Code: code, Expr: p.patternRaw, Args: args} +} + +func (p *parser) noteCaptureSlot(i, pos int) { + if _, ok := p.caps[i]; !ok { + // the rhs of the hashtable isn't used in the parser + p.caps[i] = pos + p.capcount++ + + if p.captop <= i { + if i == math.MaxInt32 { + p.captop = i + } else { + p.captop = i + 1 + } + } + } +} + +func (p *parser) noteCaptureName(name string, pos int) { + if p.capnames == nil { + p.capnames = make(map[string]int) + } + + if _, ok := p.capnames[name]; !ok { + p.capnames[name] = pos + p.capnamelist = append(p.capnamelist, name) + } +} + +func (p *parser) assignNameSlots() { + if p.capnames != nil { + for _, name := range p.capnamelist { + for p.isCaptureSlot(p.autocap) { + p.autocap++ + } + pos := p.capnames[name] + p.capnames[name] = p.autocap + p.noteCaptureSlot(p.autocap, pos) + + p.autocap++ + } + } + + // if the caps array has at least one gap, construct the list of used slots + if p.capcount < p.captop { + p.capnumlist = make([]int, p.capcount) + i := 0 + + for k := range p.caps { + p.capnumlist[i] = k + i++ + } + + sort.Ints(p.capnumlist) + } + + // merge capsnumlist into capnamelist + if p.capnames != nil || p.capnumlist != nil { + var oldcapnamelist []string + var next int + var k int + + if p.capnames == nil { + oldcapnamelist = nil + p.capnames = make(map[string]int) + p.capnamelist = []string{} + next = -1 + } else { + oldcapnamelist = p.capnamelist + p.capnamelist = []string{} + next = p.capnames[oldcapnamelist[0]] + } + + for i := 0; i < p.capcount; i++ { + j := i + if p.capnumlist != nil { + j = p.capnumlist[i] + } + + if next == j { + p.capnamelist = append(p.capnamelist, oldcapnamelist[k]) + k++ + + if k == len(oldcapnamelist) { + next = -1 + } else { + next = p.capnames[oldcapnamelist[k]] + } + + } else { + //feature: culture? + str := strconv.Itoa(j) + p.capnamelist = append(p.capnamelist, str) + p.capnames[str] = j + } + } + } +} + +func (p *parser) consumeAutocap() int { + r := p.autocap + p.autocap++ + return r +} + +// CountCaptures is a prescanner for deducing the slots used for +// captures by doing a partial tokenization of the pattern. +func (p *parser) countCaptures() error { + var ch rune + + p.noteCaptureSlot(0, 0) + + p.autocap = 1 + + for p.charsRight() > 0 { + pos := p.textpos() + ch = p.moveRightGetChar() + switch ch { + case '\\': + if p.charsRight() > 0 { + p.moveRight(1) + } + + case '#': + if p.useOptionX() { + p.moveLeft() + p.scanBlank() + } + + case '[': + p.scanCharSet(false, true) + + case ')': + if !p.emptyOptionsStack() { + p.popOptions() + } + + case '(': + if p.charsRight() >= 2 && p.rightChar(1) == '#' && p.rightChar(0) == '?' { + p.moveLeft() + p.scanBlank() + } else { + p.pushOptions() + if p.charsRight() > 0 && p.rightChar(0) == '?' { + // we have (?... + p.moveRight(1) + + if p.charsRight() > 1 && (p.rightChar(0) == '<' || p.rightChar(0) == '\'') { + // named group: (?<... or (?'... + + p.moveRight(1) + ch = p.rightChar(0) + + if ch != '0' && IsWordChar(ch) { + if ch >= '1' && ch <= '9' { + dec, err := p.scanDecimal() + if err != nil { + return err + } + p.noteCaptureSlot(dec, pos) + } else { + p.noteCaptureName(p.scanCapname(), pos) + } + } + } else { + // (?... + + // get the options if it's an option construct (?cimsx-cimsx...) + p.scanOptions() + + if p.charsRight() > 0 { + if p.rightChar(0) == ')' { + // (?cimsx-cimsx) + p.moveRight(1) + p.popKeepOptions() + } else if p.rightChar(0) == '(' { + // alternation construct: (?(foo)yes|no) + // ignore the next paren so we don't capture the condition + p.ignoreNextParen = true + + // break from here so we don't reset ignoreNextParen + continue + } + } + } + } else { + if !p.useOptionN() && !p.ignoreNextParen { + p.noteCaptureSlot(p.consumeAutocap(), pos) + } + } + } + + p.ignoreNextParen = false + + } + } + + p.assignNameSlots() + return nil +} + +func (p *parser) reset(topopts RegexOptions) { + p.currentPos = 0 + p.autocap = 1 + p.ignoreNextParen = false + + if len(p.optionsStack) > 0 { + p.optionsStack = p.optionsStack[:0] + } + + p.options = topopts + p.stack = nil +} + +func (p *parser) scanRegex() (*regexNode, error) { + ch := '@' // nonspecial ch, means at beginning + isQuant := false + + p.startGroup(newRegexNodeMN(ntCapture, p.options, 0, -1)) + + for p.charsRight() > 0 { + wasPrevQuantifier := isQuant + isQuant = false + + if err := p.scanBlank(); err != nil { + return nil, err + } + + startpos := p.textpos() + + // move past all of the normal characters. We'll stop when we hit some kind of control character, + // or if IgnorePatternWhiteSpace is on, we'll stop when we see some whitespace. + if p.useOptionX() { + for p.charsRight() > 0 { + ch = p.rightChar(0) + //UGLY: clean up, this is ugly + if !(!isStopperX(ch) || (ch == '{' && !p.isTrueQuantifier())) { + break + } + p.moveRight(1) + } + } else { + for p.charsRight() > 0 { + ch = p.rightChar(0) + if !(!isSpecial(ch) || ch == '{' && !p.isTrueQuantifier()) { + break + } + p.moveRight(1) + } + } + + endpos := p.textpos() + + p.scanBlank() + + if p.charsRight() == 0 { + ch = '!' // nonspecial, means at end + } else if ch = p.rightChar(0); isSpecial(ch) { + isQuant = isQuantifier(ch) + p.moveRight(1) + } else { + ch = ' ' // nonspecial, means at ordinary char + } + + if startpos < endpos { + cchUnquantified := endpos - startpos + if isQuant { + cchUnquantified-- + } + wasPrevQuantifier = false + + if cchUnquantified > 0 { + p.addToConcatenate(startpos, cchUnquantified, false) + } + + if isQuant { + p.addUnitOne(p.charAt(endpos - 1)) + } + } + + switch ch { + case '!': + goto BreakOuterScan + + case ' ': + goto ContinueOuterScan + + case '[': + cc, err := p.scanCharSet(p.useOptionI(), false) + if err != nil { + return nil, err + } + p.addUnitSet(cc) + + case '(': + p.pushOptions() + + if grouper, err := p.scanGroupOpen(); err != nil { + return nil, err + } else if grouper == nil { + p.popKeepOptions() + } else { + p.pushGroup() + p.startGroup(grouper) + } + + continue + + case '|': + p.addAlternate() + goto ContinueOuterScan + + case ')': + if p.emptyStack() { + return nil, p.getErr(ErrUnexpectedParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + if err := p.popGroup(); err != nil { + return nil, err + } + p.popOptions() + + if p.unit == nil { + goto ContinueOuterScan + } + + case '\\': + n, err := p.scanBackslash() + if err != nil { + return nil, err + } + p.addUnitNode(n) + + case '^': + if p.useOptionM() { + p.addUnitType(ntBol) + } else { + p.addUnitType(ntBeginning) + } + + case '$': + if p.useOptionM() { + p.addUnitType(ntEol) + } else { + p.addUnitType(ntEndZ) + } + + case '.': + if p.useOptionE() { + p.addUnitSet(ECMAAnyClass()) + } else if p.useOptionS() { + p.addUnitSet(AnyClass()) + } else { + p.addUnitNotone('\n') + } + + case '{', '*', '+', '?': + if p.unit == nil { + if wasPrevQuantifier { + return nil, p.getErr(ErrInvalidRepeatOp) + } else { + return nil, p.getErr(ErrMissingRepeatArgument) + } + } + p.moveLeft() + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() > 0 { + isQuant = p.isTrueQuantifier() + } + if p.charsRight() == 0 || !isQuant { + //maintain odd C# assignment order -- not sure if required, could clean up? + p.addConcatenate() + goto ContinueOuterScan + } + + ch = p.moveRightGetChar() + + // Handle quantifiers + for p.unit != nil { + var min, max int + var lazy bool + + switch ch { + case '*': + min = 0 + max = math.MaxInt32 + + case '?': + min = 0 + max = 1 + + case '+': + min = 1 + max = math.MaxInt32 + + case '{': + { + var err error + startpos = p.textpos() + if min, err = p.scanDecimal(); err != nil { + return nil, err + } + max = min + if startpos < p.textpos() { + if p.charsRight() > 0 && p.rightChar(0) == ',' { + p.moveRight(1) + if p.charsRight() == 0 || p.rightChar(0) == '}' { + max = math.MaxInt32 + } else { + if max, err = p.scanDecimal(); err != nil { + return nil, err + } + } + } + } + + if startpos == p.textpos() || p.charsRight() == 0 || p.moveRightGetChar() != '}' { + p.addConcatenate() + p.textto(startpos - 1) + goto ContinueOuterScan + } + } + + default: + return nil, p.getErr(ErrInternalError) + } + + if err := p.scanBlank(); err != nil { + return nil, err + } + + if p.charsRight() == 0 || p.rightChar(0) != '?' { + lazy = false + } else { + p.moveRight(1) + lazy = true + } + + if min > max { + return nil, p.getErr(ErrInvalidRepeatSize) + } + + p.addConcatenate3(lazy, min, max) + } + + ContinueOuterScan: + } + +BreakOuterScan: + ; + + if !p.emptyStack() { + return nil, p.getErr(ErrMissingParen) + } + + if err := p.addGroup(); err != nil { + return nil, err + } + + return p.unit, nil + +} + +/* + * Simple parsing for replacement patterns + */ +func (p *parser) scanReplacement() (*regexNode, error) { + var c, startpos int + + p.concatenation = newRegexNode(ntConcatenate, p.options) + + for { + c = p.charsRight() + if c == 0 { + break + } + + startpos = p.textpos() + + for c > 0 && p.rightChar(0) != '$' { + p.moveRight(1) + c-- + } + + p.addToConcatenate(startpos, p.textpos()-startpos, true) + + if c > 0 { + if p.moveRightGetChar() == '$' { + n, err := p.scanDollar() + if err != nil { + return nil, err + } + p.addUnitNode(n) + } + p.addConcatenate() + } + } + + return p.concatenation, nil +} + +/* + * Scans $ patterns recognized within replacement patterns + */ +func (p *parser) scanDollar() (*regexNode, error) { + if p.charsRight() == 0 { + return newRegexNodeCh(ntOne, p.options, '$'), nil + } + + ch := p.rightChar(0) + angled := false + backpos := p.textpos() + lastEndPos := backpos + + // Note angle + + if ch == '{' && p.charsRight() > 1 { + angled = true + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \1 or \{1} or \{cap} + + if ch >= '0' && ch <= '9' { + if !angled && p.useOptionE() { + capnum := -1 + newcapnum := int(ch - '0') + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + + for p.charsRight() > 0 { + ch = p.rightChar(0) + if ch < '0' || ch > '9' { + break + } + digit := int(ch - '0') + if newcapnum > maxValueDiv10 || (newcapnum == maxValueDiv10 && digit > maxValueMod10) { + return nil, p.getErr(ErrCaptureGroupOutOfRange) + } + + newcapnum = newcapnum*10 + digit + + p.moveRight(1) + if p.isCaptureSlot(newcapnum) { + capnum = newcapnum + lastEndPos = p.textpos() + } + } + p.textto(lastEndPos) + if capnum >= 0 { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } else { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + if !angled || p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + } + } else if angled && IsWordChar(ch) { + capname := p.scanCapname() + + if p.charsRight() > 0 && p.moveRightGetChar() == '}' { + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + } + } else if !angled { + capnum := 1 + + switch ch { + case '$': + p.moveRight(1) + return newRegexNodeCh(ntOne, p.options, '$'), nil + case '&': + capnum = 0 + case '`': + capnum = replaceLeftPortion + case '\'': + capnum = replaceRightPortion + case '+': + capnum = replaceLastGroup + case '_': + capnum = replaceWholeString + } + + if capnum != 1 { + p.moveRight(1) + return newRegexNodeM(ntRef, p.options, capnum), nil + } + } + + // unrecognized $: literalize + + p.textto(backpos) + return newRegexNodeCh(ntOne, p.options, '$'), nil +} + +// scanGroupOpen scans chars following a '(' (not counting the '('), and returns +// a RegexNode for the type of group scanned, or nil if the group +// simply changed options (?cimsx-cimsx) or was a comment (#...). +func (p *parser) scanGroupOpen() (*regexNode, error) { + var ch rune + var nt nodeType + var err error + close := '>' + start := p.textpos() + + // just return a RegexNode if we have: + // 1. "(" followed by nothing + // 2. "(x" where x != ? + // 3. "(?)" + if p.charsRight() == 0 || p.rightChar(0) != '?' || (p.rightChar(0) == '?' && (p.charsRight() > 1 && p.rightChar(1) == ')')) { + if p.useOptionN() || p.ignoreNextParen { + p.ignoreNextParen = false + return newRegexNode(ntGroup, p.options), nil + } + return newRegexNodeMN(ntCapture, p.options, p.consumeAutocap(), -1), nil + } + + p.moveRight(1) + + for { + if p.charsRight() == 0 { + break + } + + switch ch = p.moveRightGetChar(); ch { + case ':': + nt = ntGroup + + case '=': + p.options &= ^RightToLeft + nt = ntRequire + + case '!': + p.options &= ^RightToLeft + nt = ntPrevent + + case '>': + nt = ntGreedy + + case '\'': + close = '\'' + fallthrough + + case '<': + if p.charsRight() == 0 { + goto BreakRecognize + } + + switch ch = p.moveRightGetChar(); ch { + case '=': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntRequire + + case '!': + if close == '\'' { + goto BreakRecognize + } + + p.options |= RightToLeft + nt = ntPrevent + + default: + p.moveLeft() + capnum := -1 + uncapnum := -1 + proceed := false + + // grab part before - + + if ch >= '0' && ch <= '9' { + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(capnum) { + capnum = -1 + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + if capnum == 0 { + return nil, p.getErr(ErrCapNumNotZero) + } + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) { + capnum = p.captureSlotFromName(capname) + } + + // check if we have bogus character after the name + if p.charsRight() > 0 && !(p.rightChar(0) == close || p.rightChar(0) == '-') { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if ch == '-' { + proceed = true + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + + // grab part after - if any + + if (capnum != -1 || proceed == true) && p.charsRight() > 0 && p.rightChar(0) == '-' { + p.moveRight(1) + + //no more chars left, no closing char, etc + if p.charsRight() == 0 { + return nil, p.getErr(ErrInvalidGroupName) + } + + ch = p.rightChar(0) + if ch >= '0' && ch <= '9' { + if uncapnum, err = p.scanDecimal(); err != nil { + return nil, err + } + + if !p.isCaptureSlot(uncapnum) { + return nil, p.getErr(ErrUndefinedBackRef, uncapnum) + } + + // check if we have bogus characters after the number + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else if IsWordChar(ch) { + uncapname := p.scanCapname() + + if !p.isCaptureName(uncapname) { + return nil, p.getErr(ErrUndefinedNameRef, uncapname) + } + uncapnum = p.captureSlotFromName(uncapname) + + // check if we have bogus character after the name + if p.charsRight() > 0 && p.rightChar(0) != close { + return nil, p.getErr(ErrInvalidGroupName) + } + } else { + // bad group name - starts with something other than a word character and isn't a number + return nil, p.getErr(ErrInvalidGroupName) + } + } + + // actually make the node + + if (capnum != -1 || uncapnum != -1) && p.charsRight() > 0 && p.moveRightGetChar() == close { + return newRegexNodeMN(ntCapture, p.options, capnum, uncapnum), nil + } + goto BreakRecognize + } + + case '(': + // alternation construct (?(...) | ) + + parenPos := p.textpos() + if p.charsRight() > 0 { + ch = p.rightChar(0) + + // check if the alternation condition is a backref + if ch >= '0' && ch <= '9' { + var capnum int + if capnum, err = p.scanDecimal(); err != nil { + return nil, err + } + if p.charsRight() > 0 && p.moveRightGetChar() == ')' { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntTestref, p.options, capnum), nil + } + return nil, p.getErr(ErrUndefinedReference, capnum) + } + + return nil, p.getErr(ErrMalformedReference, capnum) + + } else if IsWordChar(ch) { + capname := p.scanCapname() + + if p.isCaptureName(capname) && p.charsRight() > 0 && p.moveRightGetChar() == ')' { + return newRegexNodeM(ntTestref, p.options, p.captureSlotFromName(capname)), nil + } + } + } + // not a backref + nt = ntTestgroup + p.textto(parenPos - 1) // jump to the start of the parentheses + p.ignoreNextParen = true // but make sure we don't try to capture the insides + + charsRight := p.charsRight() + if charsRight >= 3 && p.rightChar(1) == '?' { + rightchar2 := p.rightChar(2) + // disallow comments in the condition + if rightchar2 == '#' { + return nil, p.getErr(ErrAlternationCantHaveComment) + } + + // disallow named capture group (?<..>..) in the condition + if rightchar2 == '\'' { + return nil, p.getErr(ErrAlternationCantCapture) + } + + if charsRight >= 4 && (rightchar2 == '<' && p.rightChar(3) != '!' && p.rightChar(3) != '=') { + return nil, p.getErr(ErrAlternationCantCapture) + } + } + + default: + p.moveLeft() + + nt = ntGroup + // disallow options in the children of a testgroup node + if p.group.t != ntTestgroup { + p.scanOptions() + } + if p.charsRight() == 0 { + goto BreakRecognize + } + + if ch = p.moveRightGetChar(); ch == ')' { + return nil, nil + } + + if ch != ':' { + goto BreakRecognize + } + + } + + return newRegexNode(nt, p.options), nil + } + +BreakRecognize: + + // break Recognize comes here + + return nil, p.getErr(ErrUnrecognizedGrouping, string(p.pattern[start:p.textpos()])) +} + +// scans backslash specials and basics +func (p *parser) scanBackslash() (*regexNode, error) { + + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + + switch ch := p.rightChar(0); ch { + case 'b', 'B', 'A', 'G', 'Z', 'z': + p.moveRight(1) + return newRegexNode(p.typeFromCode(ch), p.options), nil + + case 'w': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, WordClass()), nil + + case 'W': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMAWordClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotWordClass()), nil + + case 's': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMASpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, SpaceClass()), nil + + case 'S': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMASpaceClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotSpaceClass()), nil + + case 'd': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, ECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, DigitClass()), nil + + case 'D': + p.moveRight(1) + if p.useOptionE() { + return newRegexNodeSet(ntSet, p.options, NotECMADigitClass()), nil + } + return newRegexNodeSet(ntSet, p.options, NotDigitClass()), nil + + case 'p', 'P': + p.moveRight(1) + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc := &CharSet{} + cc.addCategory(prop, (ch != 'p'), p.useOptionI(), p.patternRaw) + if p.useOptionI() { + cc.addLowercase() + } + + return newRegexNodeSet(ntSet, p.options, cc), nil + + default: + return p.scanBasicBackslash() + } +} + +// Scans \-style backreferences and character escapes +func (p *parser) scanBasicBackslash() (*regexNode, error) { + if p.charsRight() == 0 { + return nil, p.getErr(ErrIllegalEndEscape) + } + angled := false + close := '\x00' + + backpos := p.textpos() + ch := p.rightChar(0) + + // allow \k instead of \, which is now deprecated + + if ch == 'k' { + if p.charsRight() >= 2 { + p.moveRight(1) + ch = p.moveRightGetChar() + + if ch == '<' || ch == '\'' { + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + } + } + + if !angled || p.charsRight() <= 0 { + return nil, p.getErr(ErrMalformedNameRef) + } + + ch = p.rightChar(0) + + } else if (ch == '<' || ch == '\'') && p.charsRight() > 1 { // Note angle without \g + angled = true + if ch == '\'' { + close = '\'' + } else { + close = '>' + } + + p.moveRight(1) + ch = p.rightChar(0) + } + + // Try to parse backreference: \<1> or \ + + if angled && ch >= '0' && ch <= '9' { + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + + if p.charsRight() > 0 && p.moveRightGetChar() == close { + if p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } else { + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + } + } else if !angled && ch >= '1' && ch <= '9' { // Try to parse backreference or octal: \1 + capnum, err := p.scanDecimal() + if err != nil { + return nil, err + } + if p.useOptionE() || p.isCaptureSlot(capnum) { + return newRegexNodeM(ntRef, p.options, capnum), nil + } + if capnum <= 9 { + return nil, p.getErr(ErrUndefinedBackRef, capnum) + } + + } else if angled && IsWordChar(ch) { + capname := p.scanCapname() + + if p.charsRight() > 0 && p.moveRightGetChar() == close { + if p.isCaptureName(capname) { + return newRegexNodeM(ntRef, p.options, p.captureSlotFromName(capname)), nil + } + return nil, p.getErr(ErrUndefinedNameRef, capname) + } + } + + // Not backreference: must be char code + + p.textto(backpos) + ch, err := p.scanCharEscape() + if err != nil { + return nil, err + } + + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + return newRegexNodeCh(ntOne, p.options, ch), nil +} + +// Scans X for \p{X} or \P{X} +func (p *parser) parseProperty() (string, error) { + if p.charsRight() < 3 { + return "", p.getErr(ErrIncompleteSlashP) + } + ch := p.moveRightGetChar() + if ch != '{' { + return "", p.getErr(ErrMalformedSlashP) + } + + startpos := p.textpos() + for p.charsRight() > 0 { + ch = p.moveRightGetChar() + if !(IsWordChar(ch) || ch == '-') { + p.moveLeft() + break + } + } + capname := string(p.pattern[startpos:p.textpos()]) + + if p.charsRight() == 0 || p.moveRightGetChar() != '}' { + return "", p.getErr(ErrIncompleteSlashP) + } + + if !isValidUnicodeCat(capname) { + return "", p.getErr(ErrUnknownSlashP, capname) + } + + return capname, nil +} + +// Returns ReNode type for zero-length assertions with a \ code. +func (p *parser) typeFromCode(ch rune) nodeType { + switch ch { + case 'b': + if p.useOptionE() { + return ntECMABoundary + } + return ntBoundary + case 'B': + if p.useOptionE() { + return ntNonECMABoundary + } + return ntNonboundary + case 'A': + return ntBeginning + case 'G': + return ntStart + case 'Z': + return ntEndZ + case 'z': + return ntEnd + default: + return ntNothing + } +} + +// Scans whitespace or x-mode comments. +func (p *parser) scanBlank() error { + if p.useOptionX() { + for { + for p.charsRight() > 0 && isSpace(p.rightChar(0)) { + p.moveRight(1) + } + + if p.charsRight() == 0 { + break + } + + if p.rightChar(0) == '#' { + for p.charsRight() > 0 && p.rightChar(0) != '\n' { + p.moveRight(1) + } + } else if p.charsRight() >= 3 && p.rightChar(2) == '#' && + p.rightChar(1) == '?' && p.rightChar(0) == '(' { + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } else { + break + } + } + } else { + for { + if p.charsRight() < 3 || p.rightChar(2) != '#' || + p.rightChar(1) != '?' || p.rightChar(0) != '(' { + return nil + } + + for p.charsRight() > 0 && p.rightChar(0) != ')' { + p.moveRight(1) + } + if p.charsRight() == 0 { + return p.getErr(ErrUnterminatedComment) + } + p.moveRight(1) + } + } + return nil +} + +func (p *parser) scanCapname() string { + startpos := p.textpos() + + for p.charsRight() > 0 { + if !IsWordChar(p.moveRightGetChar()) { + p.moveLeft() + break + } + } + + return string(p.pattern[startpos:p.textpos()]) +} + +//Scans contents of [] (not including []'s), and converts to a set. +func (p *parser) scanCharSet(caseInsensitive, scanOnly bool) (*CharSet, error) { + ch := '\x00' + chPrev := '\x00' + inRange := false + firstChar := true + closed := false + + var cc *CharSet + if !scanOnly { + cc = &CharSet{} + } + + if p.charsRight() > 0 && p.rightChar(0) == '^' { + p.moveRight(1) + if !scanOnly { + cc.negate = true + } + } + + for ; p.charsRight() > 0; firstChar = false { + fTranslatedChar := false + ch = p.moveRightGetChar() + if ch == ']' { + if !firstChar { + closed = true + break + } else if p.useOptionE() { + if !scanOnly { + cc.addRanges(NoneClass().ranges) + } + closed = true + break + } + + } else if ch == '\\' && p.charsRight() > 0 { + switch ch = p.moveRightGetChar(); ch { + case 'D', 'd': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addDigit(p.useOptionE(), ch == 'D', p.patternRaw) + } + continue + + case 'S', 's': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + cc.addSpace(p.useOptionE(), ch == 'S') + } + continue + + case 'W', 'w': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + + cc.addWord(p.useOptionE(), ch == 'W') + } + continue + + case 'p', 'P': + if !scanOnly { + if inRange { + return nil, p.getErr(ErrBadClassInCharRange, ch) + } + prop, err := p.parseProperty() + if err != nil { + return nil, err + } + cc.addCategory(prop, (ch != 'p'), caseInsensitive, p.patternRaw) + } else { + p.parseProperty() + } + + continue + + case '-': + if !scanOnly { + cc.addRange(ch, ch) + } + continue + + default: + p.moveLeft() + var err error + ch, err = p.scanCharEscape() // non-literal character + if err != nil { + return nil, err + } + fTranslatedChar = true + break // this break will only break out of the switch + } + } else if ch == '[' { + // This is code for Posix style properties - [:Ll:] or [:IsTibetan:]. + // It currently doesn't do anything other than skip the whole thing! + if p.charsRight() > 0 && p.rightChar(0) == ':' && !inRange { + savePos := p.textpos() + + p.moveRight(1) + p.scanCapname() // throwaway the name + if p.charsRight() < 2 || p.moveRightGetChar() != ':' || p.moveRightGetChar() != ']' { + p.textto(savePos) + } + // else lookup name (nyi) + } + } + + if inRange { + inRange = false + if !scanOnly { + if ch == '[' && !fTranslatedChar && !firstChar { + // We thought we were in a range, but we're actually starting a subtraction. + // In that case, we'll add chPrev to our char class, skip the opening [, and + // scan the new character class recursively. + cc.addChar(chPrev) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + // a regular range, like a-z + if chPrev > ch { + return nil, p.getErr(ErrReversedCharRange) + } + cc.addRange(chPrev, ch) + } + } + } else if p.charsRight() >= 2 && p.rightChar(0) == '-' && p.rightChar(1) != ']' { + // this could be the start of a range + chPrev = ch + inRange = true + p.moveRight(1) + } else if p.charsRight() >= 1 && ch == '-' && !fTranslatedChar && p.rightChar(0) == '[' && !firstChar { + // we aren't in a range, and now there is a subtraction. Usually this happens + // only when a subtraction follows a range, like [a-z-[b]] + if !scanOnly { + p.moveRight(1) + sub, err := p.scanCharSet(caseInsensitive, false) + if err != nil { + return nil, err + } + cc.addSubtraction(sub) + + if p.charsRight() > 0 && p.rightChar(0) != ']' { + return nil, p.getErr(ErrSubtractionMustBeLast) + } + } else { + p.moveRight(1) + p.scanCharSet(caseInsensitive, true) + } + } else { + if !scanOnly { + cc.addRange(ch, ch) + } + } + } + + if !closed { + return nil, p.getErr(ErrUnterminatedBracket) + } + + if !scanOnly && caseInsensitive { + cc.addLowercase() + } + + return cc, nil +} + +// Scans any number of decimal digits (pegs value at 2^31-1 if too large) +func (p *parser) scanDecimal() (int, error) { + i := 0 + var d int + + for p.charsRight() > 0 { + d = int(p.rightChar(0) - '0') + if d < 0 || d > 9 { + break + } + p.moveRight(1) + + if i > maxValueDiv10 || (i == maxValueDiv10 && d > maxValueMod10) { + return 0, p.getErr(ErrCaptureGroupOutOfRange) + } + + i *= 10 + i += d + } + + return int(i), nil +} + +// Returns true for options allowed only at the top level +func isOnlyTopOption(option RegexOptions) bool { + return option == RightToLeft || option == ECMAScript +} + +// Scans cimsx-cimsx option string, stops at the first unrecognized char. +func (p *parser) scanOptions() { + + for off := false; p.charsRight() > 0; p.moveRight(1) { + ch := p.rightChar(0) + + if ch == '-' { + off = true + } else if ch == '+' { + off = false + } else { + option := optionFromCode(ch) + if option == 0 || isOnlyTopOption(option) { + return + } + + if off { + p.options &= ^option + } else { + p.options |= option + } + } + } +} + +// Scans \ code for escape codes that map to single unicode chars. +func (p *parser) scanCharEscape() (rune, error) { + + ch := p.moveRightGetChar() + + if ch >= '0' && ch <= '7' { + p.moveLeft() + return p.scanOctal(), nil + } + + switch ch { + case 'x': + // support for \x{HEX} syntax from Perl and PCRE + if p.charsRight() > 0 && p.rightChar(0) == '{' { + p.moveRight(1) + return p.scanHexUntilBrace() + } + return p.scanHex(2) + case 'u': + return p.scanHex(4) + case 'a': + return '\u0007', nil + case 'b': + return '\b', nil + case 'e': + return '\u001B', nil + case 'f': + return '\f', nil + case 'n': + return '\n', nil + case 'r': + return '\r', nil + case 't': + return '\t', nil + case 'v': + return '\u000B', nil + case 'c': + return p.scanControl() + default: + if !p.useOptionE() && IsWordChar(ch) { + return 0, p.getErr(ErrUnrecognizedEscape, string(ch)) + } + return ch, nil + } +} + +// Grabs and converts an ascii control character +func (p *parser) scanControl() (rune, error) { + if p.charsRight() <= 0 { + return 0, p.getErr(ErrMissingControl) + } + + ch := p.moveRightGetChar() + + // \ca interpreted as \cA + + if ch >= 'a' && ch <= 'z' { + ch = (ch - ('a' - 'A')) + } + ch = (ch - '@') + if ch >= 0 && ch < ' ' { + return ch, nil + } + + return 0, p.getErr(ErrUnrecognizedControl) + +} + +// Scan hex digits until we hit a closing brace. +// Non-hex digits, hex value too large for UTF-8, or running out of chars are errors +func (p *parser) scanHexUntilBrace() (rune, error) { + // PCRE spec reads like unlimited hex digits are allowed, but unicode has a limit + // so we can enforce that + i := 0 + hasContent := false + + for p.charsRight() > 0 { + ch := p.moveRightGetChar() + if ch == '}' { + // hit our close brace, we're done here + // prevent \x{} + if !hasContent { + return 0, p.getErr(ErrTooFewHex) + } + return rune(i), nil + } + hasContent = true + // no brace needs to be hex digit + d := hexDigit(ch) + if d < 0 { + return 0, p.getErr(ErrMissingBrace) + } + + i *= 0x10 + i += d + + if i > unicode.MaxRune { + return 0, p.getErr(ErrInvalidHex) + } + } + + // we only make it here if we run out of digits without finding the brace + return 0, p.getErr(ErrMissingBrace) +} + +// Scans exactly c hex digits (c=2 for \xFF, c=4 for \uFFFF) +func (p *parser) scanHex(c int) (rune, error) { + + i := 0 + + if p.charsRight() >= c { + for c > 0 { + d := hexDigit(p.moveRightGetChar()) + if d < 0 { + break + } + i *= 0x10 + i += d + c-- + } + } + + if c > 0 { + return 0, p.getErr(ErrTooFewHex) + } + + return rune(i), nil +} + +// Returns n <= 0xF for a hex digit. +func hexDigit(ch rune) int { + + if d := uint(ch - '0'); d <= 9 { + return int(d) + } + + if d := uint(ch - 'a'); d <= 5 { + return int(d + 0xa) + } + + if d := uint(ch - 'A'); d <= 5 { + return int(d + 0xa) + } + + return -1 +} + +// Scans up to three octal digits (stops before exceeding 0377). +func (p *parser) scanOctal() rune { + // Consume octal chars only up to 3 digits and value 0377 + + c := 3 + + if c > p.charsRight() { + c = p.charsRight() + } + + //we know the first char is good because the caller had to check + i := 0 + d := int(p.rightChar(0) - '0') + for c > 0 && d <= 7 { + i *= 8 + i += d + if p.useOptionE() && i >= 0x20 { + break + } + c-- + + p.moveRight(1) + if !p.rightMost() { + d = int(p.rightChar(0) - '0') + } + } + + // Octal codes only go up to 255. Any larger and the behavior that Perl follows + // is simply to truncate the high bits. + i &= 0xFF + + return rune(i) +} + +// Returns the current parsing position. +func (p *parser) textpos() int { + return p.currentPos +} + +// Zaps to a specific parsing position. +func (p *parser) textto(pos int) { + p.currentPos = pos +} + +// Returns the char at the right of the current parsing position and advances to the right. +func (p *parser) moveRightGetChar() rune { + ch := p.pattern[p.currentPos] + p.currentPos++ + return ch +} + +// Moves the current position to the right. +func (p *parser) moveRight(i int) { + // default would be 1 + p.currentPos += i +} + +// Moves the current parsing position one to the left. +func (p *parser) moveLeft() { + p.currentPos-- +} + +// Returns the char left of the current parsing position. +func (p *parser) charAt(i int) rune { + return p.pattern[i] +} + +// Returns the char i chars right of the current parsing position. +func (p *parser) rightChar(i int) rune { + // default would be 0 + return p.pattern[p.currentPos+i] +} + +// Number of characters to the right of the current parsing position. +func (p *parser) charsRight() int { + return len(p.pattern) - p.currentPos +} + +func (p *parser) rightMost() bool { + return p.currentPos == len(p.pattern) +} + +// Looks up the slot number for a given name +func (p *parser) captureSlotFromName(capname string) int { + return p.capnames[capname] +} + +// True if the capture slot was noted +func (p *parser) isCaptureSlot(i int) bool { + if p.caps != nil { + _, ok := p.caps[i] + return ok + } + + return (i >= 0 && i < p.capsize) +} + +// Looks up the slot number for a given name +func (p *parser) isCaptureName(capname string) bool { + if p.capnames == nil { + return false + } + + _, ok := p.capnames[capname] + return ok +} + +// option shortcuts + +// True if N option disabling '(' autocapture is on. +func (p *parser) useOptionN() bool { + return (p.options & ExplicitCapture) != 0 +} + +// True if I option enabling case-insensitivity is on. +func (p *parser) useOptionI() bool { + return (p.options & IgnoreCase) != 0 +} + +// True if M option altering meaning of $ and ^ is on. +func (p *parser) useOptionM() bool { + return (p.options & Multiline) != 0 +} + +// True if S option altering meaning of . is on. +func (p *parser) useOptionS() bool { + return (p.options & Singleline) != 0 +} + +// True if X option enabling whitespace/comment mode is on. +func (p *parser) useOptionX() bool { + return (p.options & IgnorePatternWhitespace) != 0 +} + +// True if E option enabling ECMAScript behavior on. +func (p *parser) useOptionE() bool { + return (p.options & ECMAScript) != 0 +} + +// True if options stack is empty. +func (p *parser) emptyOptionsStack() bool { + return len(p.optionsStack) == 0 +} + +// Finish the current quantifiable (when a quantifier is not found or is not possible) +func (p *parser) addConcatenate() { + // The first (| inside a Testgroup group goes directly to the group + p.concatenation.addChild(p.unit) + p.unit = nil +} + +// Finish the current quantifiable (when a quantifier is found) +func (p *parser) addConcatenate3(lazy bool, min, max int) { + p.concatenation.addChild(p.unit.makeQuantifier(lazy, min, max)) + p.unit = nil +} + +// Sets the current unit to a single char node +func (p *parser) addUnitOne(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntOne, p.options, ch) +} + +// Sets the current unit to a single inverse-char node +func (p *parser) addUnitNotone(ch rune) { + if p.useOptionI() { + ch = unicode.ToLower(ch) + } + + p.unit = newRegexNodeCh(ntNotone, p.options, ch) +} + +// Sets the current unit to a single set node +func (p *parser) addUnitSet(set *CharSet) { + p.unit = newRegexNodeSet(ntSet, p.options, set) +} + +// Sets the current unit to a subtree +func (p *parser) addUnitNode(node *regexNode) { + p.unit = node +} + +// Sets the current unit to an assertion of the specified type +func (p *parser) addUnitType(t nodeType) { + p.unit = newRegexNode(t, p.options) +} + +// Finish the current group (in response to a ')' or end) +func (p *parser) addGroup() error { + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + if (p.group.t == ntTestref && len(p.group.children) > 2) || len(p.group.children) > 3 { + return p.getErr(ErrTooManyAlternates) + } + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + p.group.addChild(p.alternation) + } + + p.unit = p.group + return nil +} + +// Pops the option stack, but keeps the current options unchanged. +func (p *parser) popKeepOptions() { + lastIdx := len(p.optionsStack) - 1 + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Recalls options from the stack. +func (p *parser) popOptions() { + lastIdx := len(p.optionsStack) - 1 + // get the last item on the stack and then remove it by reslicing + p.options = p.optionsStack[lastIdx] + p.optionsStack = p.optionsStack[:lastIdx] +} + +// Saves options on a stack. +func (p *parser) pushOptions() { + p.optionsStack = append(p.optionsStack, p.options) +} + +// Add a string to the last concatenate. +func (p *parser) addToConcatenate(pos, cch int, isReplacement bool) { + var node *regexNode + + if cch == 0 { + return + } + + if cch > 1 { + str := p.pattern[pos : pos+cch] + + if p.useOptionI() && !isReplacement { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + for i := 0; i < len(str); i++ { + str[i] = unicode.ToLower(str[i]) + } + } + + node = newRegexNodeStr(ntMulti, p.options, str) + } else { + ch := p.charAt(pos) + + if p.useOptionI() && !isReplacement { + ch = unicode.ToLower(ch) + } + + node = newRegexNodeCh(ntOne, p.options, ch) + } + + p.concatenation.addChild(node) +} + +// Push the parser state (in response to an open paren) +func (p *parser) pushGroup() { + p.group.next = p.stack + p.alternation.next = p.group + p.concatenation.next = p.alternation + p.stack = p.concatenation +} + +// Remember the pushed state (in response to a ')') +func (p *parser) popGroup() error { + p.concatenation = p.stack + p.alternation = p.concatenation.next + p.group = p.alternation.next + p.stack = p.group.next + + // The first () inside a Testgroup group goes directly to the group + if p.group.t == ntTestgroup && len(p.group.children) == 0 { + if p.unit == nil { + return p.getErr(ErrConditionalExpression) + } + + p.group.addChild(p.unit) + p.unit = nil + } + return nil +} + +// True if the group stack is empty. +func (p *parser) emptyStack() bool { + return p.stack == nil +} + +// Start a new round for the parser state (in response to an open paren or string start) +func (p *parser) startGroup(openGroup *regexNode) { + p.group = openGroup + p.alternation = newRegexNode(ntAlternate, p.options) + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// Finish the current concatenation (in response to a |) +func (p *parser) addAlternate() { + // The | parts inside a Testgroup group go directly to the group + + if p.group.t == ntTestgroup || p.group.t == ntTestref { + p.group.addChild(p.concatenation.reverseLeft()) + } else { + p.alternation.addChild(p.concatenation.reverseLeft()) + } + + p.concatenation = newRegexNode(ntConcatenate, p.options) +} + +// For categorizing ascii characters. + +const ( + Q byte = 5 // quantifier + S = 4 // ordinary stopper + Z = 3 // ScanBlank stopper + X = 2 // whitespace + E = 1 // should be escaped +) + +var _category = []byte{ + //01 2 3 4 5 6 7 8 9 A B C D E F 0 1 2 3 4 5 6 7 8 9 A B C D E F + 0, 0, 0, 0, 0, 0, 0, 0, 0, X, X, X, X, X, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + // ! " # $ % & ' ( ) * + , - . / 0 1 2 3 4 5 6 7 8 9 : ; < = > ? + X, 0, 0, Z, S, 0, 0, 0, S, S, Q, Q, 0, 0, S, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, + //@A B C D E F G H I J K L M N O P Q R S T U V W X Y Z [ \ ] ^ _ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, S, S, 0, S, 0, + //'a b c d e f g h i j k l m n o p q r s t u v w x y z { | } ~ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, Q, S, 0, 0, 0, +} + +func isSpace(ch rune) bool { + return (ch <= ' ' && _category[ch] == X) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isSpecial(ch rune) bool { + return (ch <= '|' && _category[ch] >= S) +} + +// Returns true for those characters that terminate a string of ordinary chars. +func isStopperX(ch rune) bool { + return (ch <= '|' && _category[ch] >= X) +} + +// Returns true for those characters that begin a quantifier. +func isQuantifier(ch rune) bool { + return (ch <= '{' && _category[ch] >= Q) +} + +func (p *parser) isTrueQuantifier() bool { + nChars := p.charsRight() + if nChars == 0 { + return false + } + + startpos := p.textpos() + ch := p.charAt(startpos) + if ch != '{' { + return ch <= '{' && _category[ch] >= Q + } + + //UGLY: this is ugly -- the original code was ugly too + pos := startpos + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + if nChars == 0 || pos-startpos == 1 { + return false + } + if ch == '}' { + return true + } + if ch != ',' { + return false + } + for { + nChars-- + if nChars <= 0 { + break + } + pos++ + ch = p.charAt(pos) + if ch < '0' || ch > '9' { + break + } + } + + return nChars > 0 && ch == '}' +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/prefix.go b/vendor/github.com/dlclark/regexp2/syntax/prefix.go new file mode 100644 index 00000000..011ef0b4 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/prefix.go @@ -0,0 +1,896 @@ +package syntax + +import ( + "bytes" + "fmt" + "strconv" + "unicode" + "unicode/utf8" +) + +type Prefix struct { + PrefixStr []rune + PrefixSet CharSet + CaseInsensitive bool +} + +// It takes a RegexTree and computes the set of chars that can start it. +func getFirstCharsPrefix(tree *RegexTree) *Prefix { + s := regexFcd{ + fcStack: make([]regexFc, 32), + intStack: make([]int, 32), + } + fc := s.regexFCFromRegexTree(tree) + + if fc == nil || fc.nullable || fc.cc.IsEmpty() { + return nil + } + fcSet := fc.getFirstChars() + return &Prefix{PrefixSet: fcSet, CaseInsensitive: fc.caseInsensitive} +} + +type regexFcd struct { + intStack []int + intDepth int + fcStack []regexFc + fcDepth int + skipAllChildren bool // don't process any more children at the current level + skipchild bool // don't process the current child. + failed bool +} + +/* + * The main FC computation. It does a shortcutted depth-first walk + * through the tree and calls CalculateFC to emits code before + * and after each child of an interior node, and at each leaf. + */ +func (s *regexFcd) regexFCFromRegexTree(tree *RegexTree) *regexFc { + curNode := tree.root + curChild := 0 + + for { + if len(curNode.children) == 0 { + // This is a leaf node + s.calculateFC(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) && !s.skipAllChildren { + // This is an interior node, and we have more children to analyze + s.calculateFC(curNode.t|beforeChild, curNode, curChild) + + if !s.skipchild { + curNode = curNode.children[curChild] + // this stack is how we get a depth first walk of the tree. + s.pushInt(curChild) + curChild = 0 + } else { + curChild++ + s.skipchild = false + } + continue + } + + // This is an interior node where we've finished analyzing all the children, or + // the end of a leaf node. + s.skipAllChildren = false + + if s.intIsEmpty() { + break + } + + curChild = s.popInt() + curNode = curNode.next + + s.calculateFC(curNode.t|afterChild, curNode, curChild) + if s.failed { + return nil + } + + curChild++ + } + + if s.fcIsEmpty() { + return nil + } + + return s.popFC() +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (s *regexFcd) pushInt(I int) { + if s.intDepth >= len(s.intStack) { + expanded := make([]int, s.intDepth*2) + copy(expanded, s.intStack) + s.intStack = expanded + } + + s.intStack[s.intDepth] = I + s.intDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) intIsEmpty() bool { + return s.intDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popInt() int { + s.intDepth-- + return s.intStack[s.intDepth] +} + +// We also use a stack of RegexFC objects. +// This is the push. +func (s *regexFcd) pushFC(fc regexFc) { + if s.fcDepth >= len(s.fcStack) { + expanded := make([]regexFc, s.fcDepth*2) + copy(expanded, s.fcStack) + s.fcStack = expanded + } + + s.fcStack[s.fcDepth] = fc + s.fcDepth++ +} + +// True if the stack is empty. +func (s *regexFcd) fcIsEmpty() bool { + return s.fcDepth == 0 +} + +// This is the pop. +func (s *regexFcd) popFC() *regexFc { + s.fcDepth-- + return &s.fcStack[s.fcDepth] +} + +// This is the top. +func (s *regexFcd) topFC() *regexFc { + return &s.fcStack[s.fcDepth-1] +} + +// Called in Beforechild to prevent further processing of the current child +func (s *regexFcd) skipChild() { + s.skipchild = true +} + +// FC computation and shortcut cases for each node type +func (s *regexFcd) calculateFC(nt nodeType, node *regexNode, CurIndex int) { + //fmt.Printf("NodeType: %v, CurIndex: %v, Desc: %v\n", nt, CurIndex, node.description()) + ci := false + rtl := false + + if nt <= ntRef { + if (node.options & IgnoreCase) != 0 { + ci = true + } + if (node.options & RightToLeft) != 0 { + rtl = true + } + } + + switch nt { + case ntConcatenate | beforeChild, ntAlternate | beforeChild, ntTestref | beforeChild, ntLoop | beforeChild, ntLazyloop | beforeChild: + break + + case ntTestgroup | beforeChild: + if CurIndex == 0 { + s.skipChild() + } + break + + case ntEmpty: + s.pushFC(regexFc{nullable: true}) + break + + case ntConcatenate | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, true) + } + + fc := s.topFC() + if !fc.nullable { + s.skipAllChildren = true + } + break + + case ntTestgroup | afterChild: + if CurIndex > 1 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntAlternate | afterChild, ntTestref | afterChild: + if CurIndex != 0 { + child := s.popFC() + cumul := s.topFC() + + s.failed = !cumul.addFC(*child, false) + } + break + + case ntLoop | afterChild, ntLazyloop | afterChild: + if node.m == 0 { + fc := s.topFC() + fc.nullable = true + } + break + + case ntGroup | beforeChild, ntGroup | afterChild, ntCapture | beforeChild, ntCapture | afterChild, ntGreedy | beforeChild, ntGreedy | afterChild: + break + + case ntRequire | beforeChild, ntPrevent | beforeChild: + s.skipChild() + s.pushFC(regexFc{nullable: true}) + break + + case ntRequire | afterChild, ntPrevent | afterChild: + break + + case ntOne, ntNotone: + s.pushFC(newRegexFc(node.ch, nt == ntNotone, false, ci)) + break + + case ntOneloop, ntOnelazy: + s.pushFC(newRegexFc(node.ch, false, node.m == 0, ci)) + break + + case ntNotoneloop, ntNotonelazy: + s.pushFC(newRegexFc(node.ch, true, node.m == 0, ci)) + break + + case ntMulti: + if len(node.str) == 0 { + s.pushFC(regexFc{nullable: true}) + } else if !rtl { + s.pushFC(newRegexFc(node.str[0], false, false, ci)) + } else { + s.pushFC(newRegexFc(node.str[len(node.str)-1], false, false, ci)) + } + break + + case ntSet: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: false, caseInsensitive: ci}) + break + + case ntSetloop, ntSetlazy: + s.pushFC(regexFc{cc: node.set.Copy(), nullable: node.m == 0, caseInsensitive: ci}) + break + + case ntRef: + s.pushFC(regexFc{cc: *AnyClass(), nullable: true, caseInsensitive: false}) + break + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + s.pushFC(regexFc{nullable: true}) + break + + default: + panic(fmt.Sprintf("unexpected op code: %v", nt)) + } +} + +type regexFc struct { + cc CharSet + nullable bool + caseInsensitive bool +} + +func newRegexFc(ch rune, not, nullable, caseInsensitive bool) regexFc { + r := regexFc{ + caseInsensitive: caseInsensitive, + nullable: nullable, + } + if not { + if ch > 0 { + r.cc.addRange('\x00', ch-1) + } + if ch < 0xFFFF { + r.cc.addRange(ch+1, utf8.MaxRune) + } + } else { + r.cc.addRange(ch, ch) + } + return r +} + +func (r *regexFc) getFirstChars() CharSet { + if r.caseInsensitive { + r.cc.addLowercase() + } + + return r.cc +} + +func (r *regexFc) addFC(fc regexFc, concatenate bool) bool { + if !r.cc.IsMergeable() || !fc.cc.IsMergeable() { + return false + } + + if concatenate { + if !r.nullable { + return true + } + + if !fc.nullable { + r.nullable = false + } + } else { + if fc.nullable { + r.nullable = true + } + } + + r.caseInsensitive = r.caseInsensitive || fc.caseInsensitive + r.cc.addSet(fc.cc) + + return true +} + +// This is a related computation: it takes a RegexTree and computes the +// leading substring if it sees one. It's quite trivial and gives up easily. +func getPrefix(tree *RegexTree) *Prefix { + var concatNode *regexNode + nextChild := 0 + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntOneloop, ntOnelazy: + if curNode.m > 0 { + return &Prefix{ + PrefixStr: repeat(curNode.ch, curNode.m), + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + } + return nil + + case ntOne: + return &Prefix{ + PrefixStr: []rune{curNode.ch}, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntMulti: + return &Prefix{ + PrefixStr: curNode.str, + CaseInsensitive: (curNode.options & IgnoreCase) != 0, + } + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, ntStart, + ntEndZ, ntEnd, ntEmpty, ntRequire, ntPrevent: + + default: + return nil + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return nil + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +// repeat the rune r, c times... up to the max of MaxPrefixSize +func repeat(r rune, c int) []rune { + if c > MaxPrefixSize { + c = MaxPrefixSize + } + + ret := make([]rune, c) + + // binary growth using copy for speed + ret[0] = r + bp := 1 + for bp < len(ret) { + copy(ret[bp:], ret[:bp]) + bp *= 2 + } + + return ret +} + +// BmPrefix precomputes the Boyer-Moore +// tables for fast string scanning. These tables allow +// you to scan for the first occurrence of a string within +// a large body of text without examining every character. +// The performance of the heuristic depends on the actual +// string and the text being searched, but usually, the longer +// the string that is being searched for, the fewer characters +// need to be examined. +type BmPrefix struct { + positive []int + negativeASCII []int + negativeUnicode [][]int + pattern []rune + lowASCII rune + highASCII rune + rightToLeft bool + caseInsensitive bool +} + +func newBmPrefix(pattern []rune, caseInsensitive, rightToLeft bool) *BmPrefix { + + b := &BmPrefix{ + rightToLeft: rightToLeft, + caseInsensitive: caseInsensitive, + pattern: pattern, + } + + if caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + // We do the ToLower character by character for consistency. With surrogate chars, doing + // a ToLower on the entire string could actually change the surrogate pair. This is more correct + // linguistically, but since Regex doesn't support surrogates, it's more important to be + // consistent. + + b.pattern[i] = unicode.ToLower(b.pattern[i]) + } + } + + var beforefirst, last, bump int + var scan, match int + + if !rightToLeft { + beforefirst = -1 + last = len(b.pattern) - 1 + bump = 1 + } else { + beforefirst = len(b.pattern) + last = 0 + bump = -1 + } + + // PART I - the good-suffix shift table + // + // compute the positive requirement: + // if char "i" is the first one from the right that doesn't match, + // then we know the matcher can advance by _positive[i]. + // + // This algorithm is a simplified variant of the standard + // Boyer-Moore good suffix calculation. + + b.positive = make([]int, len(b.pattern)) + + examine := last + ch := b.pattern[examine] + b.positive[examine] = bump + examine -= bump + +Outerloop: + for { + // find an internal char (examine) that matches the tail + + for { + if examine == beforefirst { + break Outerloop + } + if b.pattern[examine] == ch { + break + } + examine -= bump + } + + match = last + scan = examine + + // find the length of the match + for { + if scan == beforefirst || b.pattern[match] != b.pattern[scan] { + // at the end of the match, note the difference in _positive + // this is not the length of the match, but the distance from the internal match + // to the tail suffix. + if b.positive[match] == 0 { + b.positive[match] = match - scan + } + + // System.Diagnostics.Debug.WriteLine("Set positive[" + match + "] to " + (match - scan)); + + break + } + + scan -= bump + match -= bump + } + + examine -= bump + } + + match = last - bump + + // scan for the chars for which there are no shifts that yield a different candidate + + // The inside of the if statement used to say + // "_positive[match] = last - beforefirst;" + // This is slightly less aggressive in how much we skip, but at worst it + // should mean a little more work rather than skipping a potential match. + for match != beforefirst { + if b.positive[match] == 0 { + b.positive[match] = bump + } + + match -= bump + } + + // PART II - the bad-character shift table + // + // compute the negative requirement: + // if char "ch" is the reject character when testing position "i", + // we can slide up by _negative[ch]; + // (_negative[ch] = str.Length - 1 - str.LastIndexOf(ch)) + // + // the lookup table is divided into ASCII and Unicode portions; + // only those parts of the Unicode 16-bit code set that actually + // appear in the string are in the table. (Maximum size with + // Unicode is 65K; ASCII only case is 512 bytes.) + + b.negativeASCII = make([]int, 128) + + for i := 0; i < len(b.negativeASCII); i++ { + b.negativeASCII[i] = last - beforefirst + } + + b.lowASCII = 127 + b.highASCII = 0 + + for examine = last; examine != beforefirst; examine -= bump { + ch = b.pattern[examine] + + switch { + case ch < 128: + if b.lowASCII > ch { + b.lowASCII = ch + } + + if b.highASCII < ch { + b.highASCII = ch + } + + if b.negativeASCII[ch] == last-beforefirst { + b.negativeASCII[ch] = last - examine + } + case ch <= 0xffff: + i, j := ch>>8, ch&0xFF + + if b.negativeUnicode == nil { + b.negativeUnicode = make([][]int, 256) + } + + if b.negativeUnicode[i] == nil { + newarray := make([]int, 256) + + for k := 0; k < len(newarray); k++ { + newarray[k] = last - beforefirst + } + + if i == 0 { + copy(newarray, b.negativeASCII) + //TODO: this line needed? + b.negativeASCII = newarray + } + + b.negativeUnicode[i] = newarray + } + + if b.negativeUnicode[i][j] == last-beforefirst { + b.negativeUnicode[i][j] = last - examine + } + default: + // we can't do the filter because this algo doesn't support + // unicode chars >0xffff + return nil + } + } + + return b +} + +func (b *BmPrefix) String() string { + return string(b.pattern) +} + +// Dump returns the contents of the filter as a human readable string +func (b *BmPrefix) Dump(indent string) string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "%sBM Pattern: %s\n%sPositive: ", indent, string(b.pattern), indent) + for i := 0; i < len(b.positive); i++ { + buf.WriteString(strconv.Itoa(b.positive[i])) + buf.WriteRune(' ') + } + buf.WriteRune('\n') + + if b.negativeASCII != nil { + buf.WriteString(indent) + buf.WriteString("Negative table\n") + for i := 0; i < len(b.negativeASCII); i++ { + if b.negativeASCII[i] != len(b.pattern) { + fmt.Fprintf(buf, "%s %s %s\n", indent, Escape(string(rune(i))), strconv.Itoa(b.negativeASCII[i])) + } + } + } + + return buf.String() +} + +// Scan uses the Boyer-Moore algorithm to find the first occurrence +// of the specified string within text, beginning at index, and +// constrained within beglimit and endlimit. +// +// The direction and case-sensitivity of the match is determined +// by the arguments to the RegexBoyerMoore constructor. +func (b *BmPrefix) Scan(text []rune, index, beglimit, endlimit int) int { + var ( + defadv, test, test2 int + match, startmatch, endmatch int + bump, advance int + chTest rune + unicodeLookup []int + ) + + if !b.rightToLeft { + defadv = len(b.pattern) + startmatch = len(b.pattern) - 1 + endmatch = 0 + test = index + defadv - 1 + bump = 1 + } else { + defadv = -len(b.pattern) + startmatch = 0 + endmatch = -defadv - 1 + test = index + defadv + bump = -1 + } + + chMatch := b.pattern[startmatch] + + for { + if test >= endlimit || test < beglimit { + return -1 + } + + chTest = text[test] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != chMatch { + if chTest < 128 { + advance = b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + advance = unicodeLookup[chTest&0xFF] + } else { + advance = defadv + } + } else { + advance = defadv + } + + test += advance + } else { // if (chTest == chMatch) + test2 = test + match = startmatch + + for { + if match == endmatch { + if b.rightToLeft { + return test2 + 1 + } else { + return test2 + } + } + + match -= bump + test2 -= bump + + chTest = text[test2] + + if b.caseInsensitive { + chTest = unicode.ToLower(chTest) + } + + if chTest != b.pattern[match] { + advance = b.positive[match] + if (chTest & 0xFF80) == 0 { + test2 = (match - startmatch) + b.negativeASCII[chTest] + } else if chTest < 0xffff && len(b.negativeUnicode) > 0 { + unicodeLookup = b.negativeUnicode[chTest>>8] + if len(unicodeLookup) > 0 { + test2 = (match - startmatch) + unicodeLookup[chTest&0xFF] + } else { + test += advance + break + } + } else { + test += advance + break + } + + if b.rightToLeft { + if test2 < advance { + advance = test2 + } + } else if test2 > advance { + advance = test2 + } + + test += advance + break + } + } + } + } +} + +// When a regex is anchored, we can do a quick IsMatch test instead of a Scan +func (b *BmPrefix) IsMatch(text []rune, index, beglimit, endlimit int) bool { + if !b.rightToLeft { + if index < beglimit || endlimit-index < len(b.pattern) { + return false + } + + return b.matchPattern(text, index) + } else { + if index > endlimit || index-beglimit < len(b.pattern) { + return false + } + + return b.matchPattern(text, index-len(b.pattern)) + } +} + +func (b *BmPrefix) matchPattern(text []rune, index int) bool { + if len(text)-index < len(b.pattern) { + return false + } + + if b.caseInsensitive { + for i := 0; i < len(b.pattern); i++ { + //Debug.Assert(textinfo.ToLower(_pattern[i]) == _pattern[i], "pattern should be converted to lower case in constructor!"); + if unicode.ToLower(text[index+i]) != b.pattern[i] { + return false + } + } + return true + } else { + for i := 0; i < len(b.pattern); i++ { + if text[index+i] != b.pattern[i] { + return false + } + } + return true + } +} + +type AnchorLoc int16 + +// where the regex can be pegged +const ( + AnchorBeginning AnchorLoc = 0x0001 + AnchorBol = 0x0002 + AnchorStart = 0x0004 + AnchorEol = 0x0008 + AnchorEndZ = 0x0010 + AnchorEnd = 0x0020 + AnchorBoundary = 0x0040 + AnchorECMABoundary = 0x0080 +) + +func getAnchors(tree *RegexTree) AnchorLoc { + + var concatNode *regexNode + nextChild, result := 0, AnchorLoc(0) + + curNode := tree.root + + for { + switch curNode.t { + case ntConcatenate: + if len(curNode.children) > 0 { + concatNode = curNode + nextChild = 0 + } + + case ntGreedy, ntCapture: + curNode = curNode.children[0] + concatNode = nil + continue + + case ntBol, ntEol, ntBoundary, ntECMABoundary, ntBeginning, + ntStart, ntEndZ, ntEnd: + return result | anchorFromType(curNode.t) + + case ntEmpty, ntRequire, ntPrevent: + + default: + return result + } + + if concatNode == nil || nextChild >= len(concatNode.children) { + return result + } + + curNode = concatNode.children[nextChild] + nextChild++ + } +} + +func anchorFromType(t nodeType) AnchorLoc { + switch t { + case ntBol: + return AnchorBol + case ntEol: + return AnchorEol + case ntBoundary: + return AnchorBoundary + case ntECMABoundary: + return AnchorECMABoundary + case ntBeginning: + return AnchorBeginning + case ntStart: + return AnchorStart + case ntEndZ: + return AnchorEndZ + case ntEnd: + return AnchorEnd + default: + return 0 + } +} + +// anchorDescription returns a human-readable description of the anchors +func (anchors AnchorLoc) String() string { + buf := &bytes.Buffer{} + + if 0 != (anchors & AnchorBeginning) { + buf.WriteString(", Beginning") + } + if 0 != (anchors & AnchorStart) { + buf.WriteString(", Start") + } + if 0 != (anchors & AnchorBol) { + buf.WriteString(", Bol") + } + if 0 != (anchors & AnchorBoundary) { + buf.WriteString(", Boundary") + } + if 0 != (anchors & AnchorECMABoundary) { + buf.WriteString(", ECMABoundary") + } + if 0 != (anchors & AnchorEol) { + buf.WriteString(", Eol") + } + if 0 != (anchors & AnchorEnd) { + buf.WriteString(", End") + } + if 0 != (anchors & AnchorEndZ) { + buf.WriteString(", EndZ") + } + + // trim off comma + if buf.Len() >= 2 { + return buf.String()[2:] + } + return "None" +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go new file mode 100644 index 00000000..bcf4d3f2 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/replacerdata.go @@ -0,0 +1,87 @@ +package syntax + +import ( + "bytes" + "errors" +) + +type ReplacerData struct { + Rep string + Strings []string + Rules []int +} + +const ( + replaceSpecials = 4 + replaceLeftPortion = -1 + replaceRightPortion = -2 + replaceLastGroup = -3 + replaceWholeString = -4 +) + +//ErrReplacementError is a general error during parsing the replacement text +var ErrReplacementError = errors.New("Replacement pattern error.") + +// NewReplacerData will populate a reusable replacer data struct based on the given replacement string +// and the capture group data from a regexp +func NewReplacerData(rep string, caps map[int]int, capsize int, capnames map[string]int, op RegexOptions) (*ReplacerData, error) { + p := parser{ + options: op, + caps: caps, + capsize: capsize, + capnames: capnames, + } + p.setPattern(rep) + concat, err := p.scanReplacement() + if err != nil { + return nil, err + } + + if concat.t != ntConcatenate { + panic(ErrReplacementError) + } + + sb := &bytes.Buffer{} + var ( + strings []string + rules []int + ) + + for _, child := range concat.children { + switch child.t { + case ntMulti: + child.writeStrToBuf(sb) + + case ntOne: + sb.WriteRune(child.ch) + + case ntRef: + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + sb.Reset() + } + slot := child.m + + if len(caps) > 0 && slot >= 0 { + slot = caps[slot] + } + + rules = append(rules, -replaceSpecials-1-slot) + + default: + panic(ErrReplacementError) + } + } + + if sb.Len() > 0 { + rules = append(rules, len(strings)) + strings = append(strings, sb.String()) + } + + return &ReplacerData{ + Rep: rep, + Strings: strings, + Rules: rules, + }, nil +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/tree.go b/vendor/github.com/dlclark/regexp2/syntax/tree.go new file mode 100644 index 00000000..ea288293 --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/tree.go @@ -0,0 +1,654 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "strconv" +) + +type RegexTree struct { + root *regexNode + caps map[int]int + capnumlist []int + captop int + Capnames map[string]int + Caplist []string + options RegexOptions +} + +// It is built into a parsed tree for a regular expression. + +// Implementation notes: +// +// Since the node tree is a temporary data structure only used +// during compilation of the regexp to integer codes, it's +// designed for clarity and convenience rather than +// space efficiency. +// +// RegexNodes are built into a tree, linked by the n.children list. +// Each node also has a n.parent and n.ichild member indicating +// its parent and which child # it is in its parent's list. +// +// RegexNodes come in as many types as there are constructs in +// a regular expression, for example, "concatenate", "alternate", +// "one", "rept", "group". There are also node types for basic +// peephole optimizations, e.g., "onerep", "notsetrep", etc. +// +// Because perl 5 allows "lookback" groups that scan backwards, +// each node also gets a "direction". Normally the value of +// boolean n.backward = false. +// +// During parsing, top-level nodes are also stacked onto a parse +// stack (a stack of trees). For this purpose we have a n.next +// pointer. [Note that to save a few bytes, we could overload the +// n.parent pointer instead.] +// +// On the parse stack, each tree has a "role" - basically, the +// nonterminal in the grammar that the parser has currently +// assigned to the tree. That code is stored in n.role. +// +// Finally, some of the different kinds of nodes have data. +// Two integers (for the looping constructs) are stored in +// n.operands, an an object (either a string or a set) +// is stored in n.data +type regexNode struct { + t nodeType + children []*regexNode + str []rune + set *CharSet + ch rune + m int + n int + options RegexOptions + next *regexNode +} + +type nodeType int32 + +const ( + // The following are leaves, and correspond to primitive operations + + ntOnerep nodeType = 0 // lef,back char,min,max a {n} + ntNotonerep = 1 // lef,back char,min,max .{n} + ntSetrep = 2 // lef,back set,min,max [\d]{n} + ntOneloop = 3 // lef,back char,min,max a {,n} + ntNotoneloop = 4 // lef,back char,min,max .{,n} + ntSetloop = 5 // lef,back set,min,max [\d]{,n} + ntOnelazy = 6 // lef,back char,min,max a {,n}? + ntNotonelazy = 7 // lef,back char,min,max .{,n}? + ntSetlazy = 8 // lef,back set,min,max [\d]{,n}? + ntOne = 9 // lef char a + ntNotone = 10 // lef char [^a] + ntSet = 11 // lef set [a-z\s] \w \s \d + ntMulti = 12 // lef string abcd + ntRef = 13 // lef group \# + ntBol = 14 // ^ + ntEol = 15 // $ + ntBoundary = 16 // \b + ntNonboundary = 17 // \B + ntBeginning = 18 // \A + ntStart = 19 // \G + ntEndZ = 20 // \Z + ntEnd = 21 // \Z + + // Interior nodes do not correspond to primitive operations, but + // control structures compositing other operations + + // Concat and alternate take n children, and can run forward or backwards + + ntNothing = 22 // [] + ntEmpty = 23 // () + ntAlternate = 24 // a|b + ntConcatenate = 25 // ab + ntLoop = 26 // m,x * + ? {,} + ntLazyloop = 27 // m,x *? +? ?? {,}? + ntCapture = 28 // n () + ntGroup = 29 // (?:) + ntRequire = 30 // (?=) (?<=) + ntPrevent = 31 // (?!) (?) (?<) + ntTestref = 33 // (?(n) | ) + ntTestgroup = 34 // (?(...) | ) + + ntECMABoundary = 41 // \b + ntNonECMABoundary = 42 // \B +) + +func newRegexNode(t nodeType, opt RegexOptions) *regexNode { + return ®exNode{ + t: t, + options: opt, + } +} + +func newRegexNodeCh(t nodeType, opt RegexOptions, ch rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + ch: ch, + } +} + +func newRegexNodeStr(t nodeType, opt RegexOptions, str []rune) *regexNode { + return ®exNode{ + t: t, + options: opt, + str: str, + } +} + +func newRegexNodeSet(t nodeType, opt RegexOptions, set *CharSet) *regexNode { + return ®exNode{ + t: t, + options: opt, + set: set, + } +} + +func newRegexNodeM(t nodeType, opt RegexOptions, m int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + } +} +func newRegexNodeMN(t nodeType, opt RegexOptions, m, n int) *regexNode { + return ®exNode{ + t: t, + options: opt, + m: m, + n: n, + } +} + +func (n *regexNode) writeStrToBuf(buf *bytes.Buffer) { + for i := 0; i < len(n.str); i++ { + buf.WriteRune(n.str[i]) + } +} + +func (n *regexNode) addChild(child *regexNode) { + reduced := child.reduce() + n.children = append(n.children, reduced) + reduced.next = n +} + +func (n *regexNode) insertChildren(afterIndex int, nodes []*regexNode) { + newChildren := make([]*regexNode, 0, len(n.children)+len(nodes)) + n.children = append(append(append(newChildren, n.children[:afterIndex]...), nodes...), n.children[afterIndex:]...) +} + +// removes children including the start but not the end index +func (n *regexNode) removeChildren(startIndex, endIndex int) { + n.children = append(n.children[:startIndex], n.children[endIndex:]...) +} + +// Pass type as OneLazy or OneLoop +func (n *regexNode) makeRep(t nodeType, min, max int) { + n.t += (t - ntOne) + n.m = min + n.n = max +} + +func (n *regexNode) reduce() *regexNode { + switch n.t { + case ntAlternate: + return n.reduceAlternation() + + case ntConcatenate: + return n.reduceConcatenation() + + case ntLoop, ntLazyloop: + return n.reduceRep() + + case ntGroup: + return n.reduceGroup() + + case ntSet, ntSetloop: + return n.reduceSet() + + default: + return n + } +} + +// Basic optimization. Single-letter alternations can be replaced +// by faster set specifications, and nested alternations with no +// intervening operators can be flattened: +// +// a|b|c|def|g|h -> [a-c]|def|[gh] +// apple|(?:orange|pear)|grape -> apple|orange|pear|grape +func (n *regexNode) reduceAlternation() *regexNode { + if len(n.children) == 0 { + return newRegexNode(ntNothing, n.options) + } + + wasLastSet := false + lastNodeCannotMerge := false + var optionsLast RegexOptions + var i, j int + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + at := n.children[i] + + if j < i { + n.children[j] = at + } + + for { + if at.t == ntAlternate { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntSet || at.t == ntOne { + // Cannot merge sets if L or I options differ, or if either are negated. + optionsAt := at.options & (RightToLeft | IgnoreCase) + + if at.t == ntSet { + if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge || !at.set.IsMergeable() { + wasLastSet = true + lastNodeCannotMerge = !at.set.IsMergeable() + optionsLast = optionsAt + break + } + } else if !wasLastSet || optionsLast != optionsAt || lastNodeCannotMerge { + wasLastSet = true + lastNodeCannotMerge = false + optionsLast = optionsAt + break + } + + // The last node was a Set or a One, we're a Set or One and our options are the same. + // Merge the two nodes. + j-- + prev := n.children[j] + + var prevCharClass *CharSet + if prev.t == ntOne { + prevCharClass = &CharSet{} + prevCharClass.addChar(prev.ch) + } else { + prevCharClass = prev.set + } + + if at.t == ntOne { + prevCharClass.addChar(at.ch) + } else { + prevCharClass.addSet(*at.set) + } + + prev.t = ntSet + prev.set = prevCharClass + } else if at.t == ntNothing { + j-- + } else { + wasLastSet = false + lastNodeCannotMerge = false + } + break + } + } + + if j < i { + n.removeChildren(j, i) + } + + return n.stripEnation(ntNothing) +} + +// Basic optimization. Adjacent strings can be concatenated. +// +// (?:abc)(?:def) -> abcdef +func (n *regexNode) reduceConcatenation() *regexNode { + // Eliminate empties and concat adjacent strings/chars + + var optionsLast RegexOptions + var optionsAt RegexOptions + var i, j int + + if len(n.children) == 0 { + return newRegexNode(ntEmpty, n.options) + } + + wasLastString := false + + for i, j = 0, 0; i < len(n.children); i, j = i+1, j+1 { + var at, prev *regexNode + + at = n.children[i] + + if j < i { + n.children[j] = at + } + + if at.t == ntConcatenate && + ((at.options & RightToLeft) == (n.options & RightToLeft)) { + for k := 0; k < len(at.children); k++ { + at.children[k].next = n + } + + //insert at.children at i+1 index in n.children + n.insertChildren(i+1, at.children) + + j-- + } else if at.t == ntMulti || at.t == ntOne { + // Cannot merge strings if L or I options differ + optionsAt = at.options & (RightToLeft | IgnoreCase) + + if !wasLastString || optionsLast != optionsAt { + wasLastString = true + optionsLast = optionsAt + continue + } + + j-- + prev = n.children[j] + + if prev.t == ntOne { + prev.t = ntMulti + prev.str = []rune{prev.ch} + } + + if (optionsAt & RightToLeft) == 0 { + if at.t == ntOne { + prev.str = append(prev.str, at.ch) + } else { + prev.str = append(prev.str, at.str...) + } + } else { + if at.t == ntOne { + // insert at the front by expanding our slice, copying the data over, and then setting the value + prev.str = append(prev.str, 0) + copy(prev.str[1:], prev.str) + prev.str[0] = at.ch + } else { + //insert at the front...this one we'll make a new slice and copy both into it + merge := make([]rune, len(prev.str)+len(at.str)) + copy(merge, at.str) + copy(merge[len(at.str):], prev.str) + prev.str = merge + } + } + } else if at.t == ntEmpty { + j-- + } else { + wasLastString = false + } + } + + if j < i { + // remove indices j through i from the children + n.removeChildren(j, i) + } + + return n.stripEnation(ntEmpty) +} + +// Nested repeaters just get multiplied with each other if they're not +// too lumpy +func (n *regexNode) reduceRep() *regexNode { + + u := n + t := n.t + min := n.m + max := n.n + + for { + if len(u.children) == 0 { + break + } + + child := u.children[0] + + // multiply reps of the same type only + if child.t != t { + childType := child.t + + if !(childType >= ntOneloop && childType <= ntSetloop && t == ntLoop || + childType >= ntOnelazy && childType <= ntSetlazy && t == ntLazyloop) { + break + } + } + + // child can be too lumpy to blur, e.g., (a {100,105}) {3} or (a {2,})? + // [but things like (a {2,})+ are not too lumpy...] + if u.m == 0 && child.m > 1 || child.n < child.m*2 { + break + } + + u = child + if u.m > 0 { + if (math.MaxInt32-1)/u.m < min { + u.m = math.MaxInt32 + } else { + u.m = u.m * min + } + } + if u.n > 0 { + if (math.MaxInt32-1)/u.n < max { + u.n = math.MaxInt32 + } else { + u.n = u.n * max + } + } + } + + if math.MaxInt32 == min { + return newRegexNode(ntNothing, n.options) + } + return u + +} + +// Simple optimization. If a concatenation or alternation has only +// one child strip out the intermediate node. If it has zero children, +// turn it into an empty. +func (n *regexNode) stripEnation(emptyType nodeType) *regexNode { + switch len(n.children) { + case 0: + return newRegexNode(emptyType, n.options) + case 1: + return n.children[0] + default: + return n + } +} + +func (n *regexNode) reduceGroup() *regexNode { + u := n + + for u.t == ntGroup { + u = u.children[0] + } + + return u +} + +// Simple optimization. If a set is a singleton, an inverse singleton, +// or empty, it's transformed accordingly. +func (n *regexNode) reduceSet() *regexNode { + // Extract empty-set, one and not-one case as special + + if n.set == nil { + n.t = ntNothing + } else if n.set.IsSingleton() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntOne - ntSet) + } else if n.set.IsSingletonInverse() { + n.ch = n.set.SingletonChar() + n.set = nil + n.t += (ntNotone - ntSet) + } + + return n +} + +func (n *regexNode) reverseLeft() *regexNode { + if n.options&RightToLeft != 0 && n.t == ntConcatenate && len(n.children) > 0 { + //reverse children order + for left, right := 0, len(n.children)-1; left < right; left, right = left+1, right-1 { + n.children[left], n.children[right] = n.children[right], n.children[left] + } + } + + return n +} + +func (n *regexNode) makeQuantifier(lazy bool, min, max int) *regexNode { + if min == 0 && max == 0 { + return newRegexNode(ntEmpty, n.options) + } + + if min == 1 && max == 1 { + return n + } + + switch n.t { + case ntOne, ntNotone, ntSet: + if lazy { + n.makeRep(Onelazy, min, max) + } else { + n.makeRep(Oneloop, min, max) + } + return n + + default: + var t nodeType + if lazy { + t = ntLazyloop + } else { + t = ntLoop + } + result := newRegexNodeMN(t, n.options, min, max) + result.addChild(n) + return result + } +} + +// debug functions + +var typeStr = []string{ + "Onerep", "Notonerep", "Setrep", + "Oneloop", "Notoneloop", "Setloop", + "Onelazy", "Notonelazy", "Setlazy", + "One", "Notone", "Set", + "Multi", "Ref", + "Bol", "Eol", "Boundary", "Nonboundary", + "Beginning", "Start", "EndZ", "End", + "Nothing", "Empty", + "Alternate", "Concatenate", + "Loop", "Lazyloop", + "Capture", "Group", "Require", "Prevent", "Greedy", + "Testref", "Testgroup", + "Unknown", "Unknown", "Unknown", + "Unknown", "Unknown", "Unknown", + "ECMABoundary", "NonECMABoundary", +} + +func (n *regexNode) description() string { + buf := &bytes.Buffer{} + + buf.WriteString(typeStr[n.t]) + + if (n.options & ExplicitCapture) != 0 { + buf.WriteString("-C") + } + if (n.options & IgnoreCase) != 0 { + buf.WriteString("-I") + } + if (n.options & RightToLeft) != 0 { + buf.WriteString("-L") + } + if (n.options & Multiline) != 0 { + buf.WriteString("-M") + } + if (n.options & Singleline) != 0 { + buf.WriteString("-S") + } + if (n.options & IgnorePatternWhitespace) != 0 { + buf.WriteString("-X") + } + if (n.options & ECMAScript) != 0 { + buf.WriteString("-E") + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntOne, ntNotone: + buf.WriteString("(Ch = " + CharDescription(n.ch) + ")") + break + case ntCapture: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ", unindex = " + strconv.Itoa(n.n) + ")") + break + case ntRef, ntTestref: + buf.WriteString("(index = " + strconv.Itoa(n.m) + ")") + break + case ntMulti: + fmt.Fprintf(buf, "(String = %s)", string(n.str)) + break + case ntSet, ntSetloop, ntSetlazy: + buf.WriteString("(Set = " + n.set.String() + ")") + break + } + + switch n.t { + case ntOneloop, ntNotoneloop, ntOnelazy, ntNotonelazy, ntSetloop, ntSetlazy, ntLoop, ntLazyloop: + buf.WriteString("(Min = ") + buf.WriteString(strconv.Itoa(n.m)) + buf.WriteString(", Max = ") + if n.n == math.MaxInt32 { + buf.WriteString("inf") + } else { + buf.WriteString(strconv.Itoa(n.n)) + } + buf.WriteString(")") + + break + } + + return buf.String() +} + +var padSpace = []byte(" ") + +func (t *RegexTree) Dump() string { + return t.root.dump() +} + +func (n *regexNode) dump() string { + var stack []int + CurNode := n + CurChild := 0 + + buf := bytes.NewBufferString(CurNode.description()) + buf.WriteRune('\n') + + for { + if CurNode.children != nil && CurChild < len(CurNode.children) { + stack = append(stack, CurChild+1) + CurNode = CurNode.children[CurChild] + CurChild = 0 + + Depth := len(stack) + if Depth > 32 { + Depth = 32 + } + buf.Write(padSpace[:Depth]) + buf.WriteString(CurNode.description()) + buf.WriteRune('\n') + } else { + if len(stack) == 0 { + break + } + + CurChild = stack[len(stack)-1] + stack = stack[:len(stack)-1] + CurNode = CurNode.next + } + } + return buf.String() +} diff --git a/vendor/github.com/dlclark/regexp2/syntax/writer.go b/vendor/github.com/dlclark/regexp2/syntax/writer.go new file mode 100644 index 00000000..a5aa11ca --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/syntax/writer.go @@ -0,0 +1,500 @@ +package syntax + +import ( + "bytes" + "fmt" + "math" + "os" +) + +func Write(tree *RegexTree) (*Code, error) { + w := writer{ + intStack: make([]int, 0, 32), + emitted: make([]int, 2), + stringhash: make(map[string]int), + sethash: make(map[string]int), + } + + code, err := w.codeFromTree(tree) + + if tree.options&Debug > 0 && code != nil { + os.Stdout.WriteString(code.Dump()) + os.Stdout.WriteString("\n") + } + + return code, err +} + +type writer struct { + emitted []int + + intStack []int + curpos int + stringhash map[string]int + stringtable [][]rune + sethash map[string]int + settable []*CharSet + counting bool + count int + trackcount int + caps map[int]int +} + +const ( + beforeChild nodeType = 64 + afterChild = 128 + //MaxPrefixSize is the largest number of runes we'll use for a BoyerMoyer prefix + MaxPrefixSize = 50 +) + +// The top level RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +// +// It runs two passes, first to count the size of the generated +// code, and second to generate the code. +// +// We should time it against the alternative, which is +// to just generate the code and grow the array as we go. +func (w *writer) codeFromTree(tree *RegexTree) (*Code, error) { + var ( + curNode *regexNode + curChild int + capsize int + ) + // construct sparse capnum mapping if some numbers are unused + + if tree.capnumlist == nil || tree.captop == len(tree.capnumlist) { + capsize = tree.captop + w.caps = nil + } else { + capsize = len(tree.capnumlist) + w.caps = tree.caps + for i := 0; i < len(tree.capnumlist); i++ { + w.caps[tree.capnumlist[i]] = i + } + } + + w.counting = true + + for { + if !w.counting { + w.emitted = make([]int, w.count) + } + + curNode = tree.root + curChild = 0 + + w.emit1(Lazybranch, 0) + + for { + if len(curNode.children) == 0 { + w.emitFragment(curNode.t, curNode, 0) + } else if curChild < len(curNode.children) { + w.emitFragment(curNode.t|beforeChild, curNode, curChild) + + curNode = curNode.children[curChild] + + w.pushInt(curChild) + curChild = 0 + continue + } + + if w.emptyStack() { + break + } + + curChild = w.popInt() + curNode = curNode.next + + w.emitFragment(curNode.t|afterChild, curNode, curChild) + curChild++ + } + + w.patchJump(0, w.curPos()) + w.emit(Stop) + + if !w.counting { + break + } + + w.counting = false + } + + fcPrefix := getFirstCharsPrefix(tree) + prefix := getPrefix(tree) + rtl := (tree.options & RightToLeft) != 0 + + var bmPrefix *BmPrefix + //TODO: benchmark string prefixes + if prefix != nil && len(prefix.PrefixStr) > 0 && MaxPrefixSize > 0 { + if len(prefix.PrefixStr) > MaxPrefixSize { + // limit prefix changes to 10k + prefix.PrefixStr = prefix.PrefixStr[:MaxPrefixSize] + } + bmPrefix = newBmPrefix(prefix.PrefixStr, prefix.CaseInsensitive, rtl) + } else { + bmPrefix = nil + } + + return &Code{ + Codes: w.emitted, + Strings: w.stringtable, + Sets: w.settable, + TrackCount: w.trackcount, + Caps: w.caps, + Capsize: capsize, + FcPrefix: fcPrefix, + BmPrefix: bmPrefix, + Anchors: getAnchors(tree), + RightToLeft: rtl, + }, nil +} + +// The main RegexCode generator. It does a depth-first walk +// through the tree and calls EmitFragment to emits code before +// and after each child of an interior node, and at each leaf. +func (w *writer) emitFragment(nodetype nodeType, node *regexNode, curIndex int) error { + bits := InstOp(0) + + if nodetype <= ntRef { + if (node.options & RightToLeft) != 0 { + bits |= Rtl + } + if (node.options & IgnoreCase) != 0 { + bits |= Ci + } + } + ntBits := nodeType(bits) + + switch nodetype { + case ntConcatenate | beforeChild, ntConcatenate | afterChild, ntEmpty: + break + + case ntAlternate | beforeChild: + if curIndex < len(node.children)-1 { + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntAlternate | afterChild: + if curIndex < len(node.children)-1 { + lbPos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(lbPos, w.curPos()) + } else { + for i := 0; i < curIndex; i++ { + w.patchJump(w.popInt(), w.curPos()) + } + } + break + + case ntTestref | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + w.emit1(Testref, w.mapCapnum(node.m)) + w.emit(Forejump) + } + + case ntTestref | afterChild: + if curIndex == 0 { + branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(branchpos, w.curPos()) + w.emit(Forejump) + if len(node.children) <= 1 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 1 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntTestgroup | beforeChild: + if curIndex == 0 { + w.emit(Setjump) + w.emit(Setmark) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + } + + case ntTestgroup | afterChild: + if curIndex == 0 { + w.emit(Getmark) + w.emit(Forejump) + } else if curIndex == 1 { + Branchpos := w.popInt() + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + w.patchJump(Branchpos, w.curPos()) + w.emit(Getmark) + w.emit(Forejump) + if len(node.children) <= 2 { + w.patchJump(w.popInt(), w.curPos()) + } + } else if curIndex == 2 { + w.patchJump(w.popInt(), w.curPos()) + } + + case ntLoop | beforeChild, ntLazyloop | beforeChild: + + if node.n < math.MaxInt32 || node.m > 1 { + if node.m == 0 { + w.emit1(Nullcount, 0) + } else { + w.emit1(Setcount, 1-node.m) + } + } else if node.m == 0 { + w.emit(Nullmark) + } else { + w.emit(Setmark) + } + + if node.m == 0 { + w.pushInt(w.curPos()) + w.emit1(Goto, 0) + } + w.pushInt(w.curPos()) + + case ntLoop | afterChild, ntLazyloop | afterChild: + + startJumpPos := w.curPos() + lazy := (nodetype - (ntLoop | afterChild)) + + if node.n < math.MaxInt32 || node.m > 1 { + if node.n == math.MaxInt32 { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), math.MaxInt32) + } else { + w.emit2(InstOp(Branchcount+lazy), w.popInt(), node.n-node.m) + } + } else { + w.emit1(InstOp(Branchmark+lazy), w.popInt()) + } + + if node.m == 0 { + w.patchJump(w.popInt(), startJumpPos) + } + + case ntGroup | beforeChild, ntGroup | afterChild: + + case ntCapture | beforeChild: + w.emit(Setmark) + + case ntCapture | afterChild: + w.emit2(Capturemark, w.mapCapnum(node.m), w.mapCapnum(node.n)) + + case ntRequire | beforeChild: + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Setjump) + + w.emit(Setmark) + + case ntRequire | afterChild: + w.emit(Getmark) + + // NOTE: the following line causes lookahead/lookbehind to be + // NON-BACKTRACKING. It can be commented out with (*) + w.emit(Forejump) + + case ntPrevent | beforeChild: + w.emit(Setjump) + w.pushInt(w.curPos()) + w.emit1(Lazybranch, 0) + + case ntPrevent | afterChild: + w.emit(Backjump) + w.patchJump(w.popInt(), w.curPos()) + w.emit(Forejump) + + case ntGreedy | beforeChild: + w.emit(Setjump) + + case ntGreedy | afterChild: + w.emit(Forejump) + + case ntOne, ntNotone: + w.emit1(InstOp(node.t|ntBits), int(node.ch)) + + case ntNotoneloop, ntNotonelazy, ntOneloop, ntOnelazy: + if node.m > 0 { + if node.t == ntOneloop || node.t == ntOnelazy { + w.emit2(Onerep|bits, int(node.ch), node.m) + } else { + w.emit2(Notonerep|bits, int(node.ch), node.m) + } + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), int(node.ch), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), int(node.ch), node.n-node.m) + } + } + + case ntSetloop, ntSetlazy: + if node.m > 0 { + w.emit2(Setrep|bits, w.setCode(node.set), node.m) + } + if node.n > node.m { + if node.n == math.MaxInt32 { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), math.MaxInt32) + } else { + w.emit2(InstOp(node.t|ntBits), w.setCode(node.set), node.n-node.m) + } + } + + case ntMulti: + w.emit1(InstOp(node.t|ntBits), w.stringCode(node.str)) + + case ntSet: + w.emit1(InstOp(node.t|ntBits), w.setCode(node.set)) + + case ntRef: + w.emit1(InstOp(node.t|ntBits), w.mapCapnum(node.m)) + + case ntNothing, ntBol, ntEol, ntBoundary, ntNonboundary, ntECMABoundary, ntNonECMABoundary, ntBeginning, ntStart, ntEndZ, ntEnd: + w.emit(InstOp(node.t)) + + default: + return fmt.Errorf("unexpected opcode in regular expression generation: %v", nodetype) + } + + return nil +} + +// To avoid recursion, we use a simple integer stack. +// This is the push. +func (w *writer) pushInt(i int) { + w.intStack = append(w.intStack, i) +} + +// Returns true if the stack is empty. +func (w *writer) emptyStack() bool { + return len(w.intStack) == 0 +} + +// This is the pop. +func (w *writer) popInt() int { + //get our item + idx := len(w.intStack) - 1 + i := w.intStack[idx] + //trim our slice + w.intStack = w.intStack[:idx] + return i +} + +// Returns the current position in the emitted code. +func (w *writer) curPos() int { + return w.curpos +} + +// Fixes up a jump instruction at the specified offset +// so that it jumps to the specified jumpDest. +func (w *writer) patchJump(offset, jumpDest int) { + w.emitted[offset+1] = jumpDest +} + +// Returns an index in the set table for a charset +// uses a map to eliminate duplicates. +func (w *writer) setCode(set *CharSet) int { + if w.counting { + return 0 + } + + buf := &bytes.Buffer{} + + set.mapHashFill(buf) + hash := buf.String() + i, ok := w.sethash[hash] + if !ok { + i = len(w.sethash) + w.sethash[hash] = i + w.settable = append(w.settable, set) + } + return i +} + +// Returns an index in the string table for a string. +// uses a map to eliminate duplicates. +func (w *writer) stringCode(str []rune) int { + if w.counting { + return 0 + } + + hash := string(str) + i, ok := w.stringhash[hash] + if !ok { + i = len(w.stringhash) + w.stringhash[hash] = i + w.stringtable = append(w.stringtable, str) + } + + return i +} + +// When generating code on a regex that uses a sparse set +// of capture slots, we hash them to a dense set of indices +// for an array of capture slots. Instead of doing the hash +// at match time, it's done at compile time, here. +func (w *writer) mapCapnum(capnum int) int { + if capnum == -1 { + return -1 + } + + if w.caps != nil { + return w.caps[capnum] + } + + return capnum +} + +// Emits a zero-argument operation. Note that the emit +// functions all run in two modes: they can emit code, or +// they can just count the size of the code. +func (w *writer) emit(op InstOp) { + if w.counting { + w.count++ + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ +} + +// Emits a one-argument operation. +func (w *writer) emit1(op InstOp, opd1 int) { + if w.counting { + w.count += 2 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ +} + +// Emits a two-argument operation. +func (w *writer) emit2(op InstOp, opd1, opd2 int) { + if w.counting { + w.count += 3 + if opcodeBacktracks(op) { + w.trackcount++ + } + return + } + w.emitted[w.curpos] = int(op) + w.curpos++ + w.emitted[w.curpos] = opd1 + w.curpos++ + w.emitted[w.curpos] = opd2 + w.curpos++ +} diff --git a/vendor/github.com/dlclark/regexp2/testoutput1 b/vendor/github.com/dlclark/regexp2/testoutput1 new file mode 100644 index 00000000..fbf63fdf --- /dev/null +++ b/vendor/github.com/dlclark/regexp2/testoutput1 @@ -0,0 +1,7061 @@ +# This set of tests is for features that are compatible with all versions of +# Perl >= 5.10, in non-UTF mode. It should run clean for the 8-bit, 16-bit, and +# 32-bit PCRE libraries, and also using the perltest.pl script. + +#forbid_utf +#newline_default lf any anycrlf +#perltest + +/the quick brown fox/ + the quick brown fox + 0: the quick brown fox + What do you know about the quick brown fox? + 0: the quick brown fox +\= Expect no match + The quick brown FOX +No match + What do you know about THE QUICK BROWN FOX? +No match + +/The quick brown fox/i + the quick brown fox + 0: the quick brown fox + The quick brown FOX + 0: The quick brown FOX + What do you know about the quick brown fox? + 0: the quick brown fox + What do you know about THE QUICK BROWN FOX? + 0: THE QUICK BROWN FOX + +/abcd\t\n\r\f\a\e\071\x3b\$\\\?caxyz/ + abcd\t\n\r\f\a\e9;\$\\?caxyz + 0: abcd\x09\x0a\x0d\x0c\x07\x1b9;$\?caxyz + +/a*abc?xyz+pqr{3}ab{2,}xy{4,5}pq{0,6}AB{0,}zz/ + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + abxyzpqrrrabbxyyyypqAzz + 0: abxyzpqrrrabbxyyyypqAzz + aabxyzpqrrrabbxyyyypqAzz + 0: aabxyzpqrrrabbxyyyypqAzz + aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz + aabcxyzpqrrrabbxyyyypqAzz + 0: aabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypAzz + 0: aaabcxyzpqrrrabbxyyyypAzz + aaabcxyzpqrrrabbxyyyypqAzz + 0: aaabcxyzpqrrrabbxyyyypqAzz + aaabcxyzpqrrrabbxyyyypqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqAzz + aaabcxyzpqrrrabbxyyyypqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqAzz + aaabcxyzpqrrrabbxyyyypqqqqqqAzz + 0: aaabcxyzpqrrrabbxyyyypqqqqqqAzz + aaaabcxyzpqrrrabbxyyyypqAzz + 0: aaaabcxyzpqrrrabbxyyyypqAzz + abxyzzpqrrrabbxyyyypqAzz + 0: abxyzzpqrrrabbxyyyypqAzz + aabxyzzzpqrrrabbxyyyypqAzz + 0: aabxyzzzpqrrrabbxyyyypqAzz + aaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaabxyzzzzpqrrrabbxyyyypqAzz + aaaabxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabxyzzzzpqrrrabbxyyyypqAzz + abcxyzzpqrrrabbxyyyypqAzz + 0: abcxyzzpqrrrabbxyyyypqAzz + aabcxyzzzpqrrrabbxyyyypqAzz + 0: aabcxyzzzpqrrrabbxyyyypqAzz + aaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyypqAzz + aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + 0: aaaabcxyzzzzpqrrrabbbxyyyyypqAzz + aaabcxyzpqrrrabbxyyyypABzz + 0: aaabcxyzpqrrrabbxyyyypABzz + aaabcxyzpqrrrabbxyyyypABBzz + 0: aaabcxyzpqrrrabbxyyyypABBzz + >>>aaabxyzpqrrrabbxyyyypqAzz + 0: aaabxyzpqrrrabbxyyyypqAzz + >aaaabxyzpqrrrabbxyyyypqAzz + 0: aaaabxyzpqrrrabbxyyyypqAzz + >>>>abcxyzpqrrrabbxyyyypqAzz + 0: abcxyzpqrrrabbxyyyypqAzz +\= Expect no match + abxyzpqrrabbxyyyypqAzz +No match + abxyzpqrrrrabbxyyyypqAzz +No match + abxyzpqrrrabxyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyyyyypqAzz +No match + aaaabcxyzzzzpqrrrabbbxyyypqAzz +No match + aaabcxyzpqrrrabbxyyyypqqqqqqqAzz +No match + +/^(abc){1,2}zz/ + abczz + 0: abczz + 1: abc + abcabczz + 0: abcabczz + 1: abc +\= Expect no match + zz +No match + abcabcabczz +No match + >>abczz +No match + +/^(b+?|a){1,2}?c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: b + bbbc + 0: bbbc + 1: bb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}c/ + bc + 0: bc + 1: b + bbc + 0: bbc + 1: bb + bbbc + 0: bbbc + 1: bbb + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + aac + 0: aac + 1: a + abbbbbbbbbbbc + 0: abbbbbbbbbbbc + 1: bbbbbbbbbbb + bbbbbbbbbbbac + 0: bbbbbbbbbbbac + 1: a +\= Expect no match + aaac +No match + abbbbbbbbbbbac +No match + +/^(b+|a){1,2}?bc/ + bbc + 0: bbc + 1: b + +/^(b*|ba){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +/^(ba|b*){1,2}?bc/ + babc + 0: babc + 1: ba + bbabc + 0: bbabc + 1: ba + bababc + 0: bababc + 1: ba +\= Expect no match + bababbc +No match + babababc +No match + +#/^\ca\cA\c[;\c:/ +# \x01\x01\e;z +# 0: \x01\x01\x1b;z + +/^[ab\]cde]/ + athing + 0: a + bthing + 0: b + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + fthing +No match + [thing +No match + \\thing +No match + +/^[]cde]/ + ]thing + 0: ] + cthing + 0: c + dthing + 0: d + ething + 0: e +\= Expect no match + athing +No match + fthing +No match + +/^[^ab\]cde]/ + fthing + 0: f + [thing + 0: [ + \\thing + 0: \ +\= Expect no match + athing +No match + bthing +No match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +/^[^]cde]/ + athing + 0: a + fthing + 0: f +\= Expect no match + ]thing +No match + cthing +No match + dthing +No match + ething +No match + +# DLC - I don't get this one +#/^\/ +#  +# 0: \x81 + +#updated to handle 16-bits utf8 +/^ÿ/ + ÿ + 0: \xc3\xbf + +/^[0-9]+$/ + 0 + 0: 0 + 1 + 0: 1 + 2 + 0: 2 + 3 + 0: 3 + 4 + 0: 4 + 5 + 0: 5 + 6 + 0: 6 + 7 + 0: 7 + 8 + 0: 8 + 9 + 0: 9 + 10 + 0: 10 + 100 + 0: 100 +\= Expect no match + abc +No match + +/^.*nter/ + enter + 0: enter + inter + 0: inter + uponter + 0: uponter + +/^xxx[0-9]+$/ + xxx0 + 0: xxx0 + xxx1234 + 0: xxx1234 +\= Expect no match + xxx +No match + +/^.+[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^.+?[0-9][0-9][0-9]$/ + x123 + 0: x123 + x1234 + 0: x1234 + xx123 + 0: xx123 + 123456 + 0: 123456 +\= Expect no match + 123 +No match + +/^([^!]+)!(.+)=apquxz\.ixr\.zzz\.ac\.uk$/ + abc!pqr=apquxz.ixr.zzz.ac.uk + 0: abc!pqr=apquxz.ixr.zzz.ac.uk + 1: abc + 2: pqr +\= Expect no match + !pqr=apquxz.ixr.zzz.ac.uk +No match + abc!=apquxz.ixr.zzz.ac.uk +No match + abc!pqr=apquxz:ixr.zzz.ac.uk +No match + abc!pqr=apquxz.ixr.zzz.ac.ukk +No match + +/:/ + Well, we need a colon: somewhere + 0: : +\= Expect no match + Fail without a colon +No match + +/([\da-f:]+)$/i + 0abc + 0: 0abc + 1: 0abc + abc + 0: abc + 1: abc + fed + 0: fed + 1: fed + E + 0: E + 1: E + :: + 0: :: + 1: :: + 5f03:12C0::932e + 0: 5f03:12C0::932e + 1: 5f03:12C0::932e + fed def + 0: def + 1: def + Any old stuff + 0: ff + 1: ff +\= Expect no match + 0zzz +No match + gzzz +No match + fed\x20 +No match + Any old rubbish +No match + +/^.*\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/ + .1.2.3 + 0: .1.2.3 + 1: 1 + 2: 2 + 3: 3 + A.12.123.0 + 0: A.12.123.0 + 1: 12 + 2: 123 + 3: 0 +\= Expect no match + .1.2.3333 +No match + 1.2.3 +No match + 1234.2.3 +No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + 1 IN SOA non-sp1 non-sp2 ( + 0: 1 IN SOA non-sp1 non-sp2 ( + 1: 1 + 2: non-sp1 + 3: non-sp2 +\= Expect no match + 1IN SOA non-sp1 non-sp2( +No match + +/^[a-zA-Z\d][a-zA-Z\d\-]*(\.[a-zA-Z\d][a-zA-z\d\-]*)*\.$/ + a. + 0: a. + Z. + 0: Z. + 2. + 0: 2. + ab-c.pq-r. + 0: ab-c.pq-r. + 1: .pq-r + sxk.zzz.ac.uk. + 0: sxk.zzz.ac.uk. + 1: .uk + x-.y-. + 0: x-.y-. + 1: .y- +\= Expect no match + -abc.peq. +No match + +/^\*\.[a-z]([a-z\-\d]*[a-z\d]+)?(\.[a-z]([a-z\-\d]*[a-z\d]+)?)*$/ + *.a + 0: *.a + *.b0-a + 0: *.b0-a + 1: 0-a + *.c3-b.c + 0: *.c3-b.c + 1: 3-b + 2: .c + *.c-a.b-c + 0: *.c-a.b-c + 1: -a + 2: .b-c + 3: -c +\= Expect no match + *.0 +No match + *.a- +No match + *.a-b.c- +No match + *.c-a.0-c +No match + +/^(?=ab(de))(abd)(e)/ + abde + 0: abde + 1: de + 2: abd + 3: e + +/^(?!(ab)de|x)(abd)(f)/ + abdf + 0: abdf + 1: + 2: abd + 3: f + +/^(?=(ab(cd)))(ab)/ + abcd + 0: ab + 1: abcd + 2: cd + 3: ab + +/^[\da-f](\.[\da-f])*$/i + a.b.c.d + 0: a.b.c.d + 1: .d + A.B.C.D + 0: A.B.C.D + 1: .D + a.b.c.1.2.3.C + 0: a.b.c.1.2.3.C + 1: .C + +/^\".*\"\s*(;.*)?$/ + \"1234\" + 0: "1234" + \"abcd\" ; + 0: "abcd" ; + 1: ; + \"\" ; rhubarb + 0: "" ; rhubarb + 1: ; rhubarb +\= Expect no match + \"1234\" : things +No match + +/^$/ + \ + 0: +\= Expect no match + A non-empty line +No match + +/ ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/x + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/(?x) ^ a (?# begins with a) b\sc (?# then b c) $ (?# then end)/ + ab c + 0: ab c +\= Expect no match + abc +No match + ab cde +No match + +/^ a\ b[c ]d $/x + a bcd + 0: a bcd + a b d + 0: a b d +\= Expect no match + abcd +No match + ab d +No match + +/^(a(b(c)))(d(e(f)))(h(i(j)))(k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: abc + 2: bc + 3: c + 4: def + 5: ef + 6: f + 7: hij + 8: ij + 9: j +10: klm +11: lm +12: m + +/^(?:a(b(c)))(?:d(e(f)))(?:h(i(j)))(?:k(l(m)))$/ + abcdefhijklm + 0: abcdefhijklm + 1: bc + 2: c + 3: ef + 4: f + 5: ij + 6: j + 7: lm + 8: m + +#/^[\w][\W][\s][\S][\d][\D][\b][\n][\c]][\022]/ +# a+ Z0+\x08\n\x1d\x12 +# 0: a+ Z0+\x08\x0a\x1d\x12 + +/^[.^$|()*+?{,}]+/ + .^\$(*+)|{?,?} + 0: .^$(*+)|{?,?} + +/^a*\w/ + z + 0: z + az + 0: az + aaaz + 0: aaaz + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + a+ + 0: a + aa+ + 0: aa + +/^a*?\w/ + z + 0: z + az + 0: a + aaaz + 0: a + a + 0: a + aa + 0: a + aaaa + 0: a + a+ + 0: a + aa+ + 0: a + +/^a+\w/ + az + 0: az + aaaz + 0: aaaz + aa + 0: aa + aaaa + 0: aaaa + aa+ + 0: aa + +/^a+?\w/ + az + 0: az + aaaz + 0: aa + aa + 0: aa + aaaa + 0: aa + aa+ + 0: aa + +/^\d{8}\w{2,}/ + 1234567890 + 0: 1234567890 + 12345678ab + 0: 12345678ab + 12345678__ + 0: 12345678__ +\= Expect no match + 1234567 +No match + +/^[aeiou\d]{4,5}$/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 12345 + aaaaa + 0: aaaaa +\= Expect no match + 123456 +No match + +/^[aeiou\d]{4,5}?/ + uoie + 0: uoie + 1234 + 0: 1234 + 12345 + 0: 1234 + aaaaa + 0: aaaa + 123456 + 0: 1234 + +/\A(abc|def)=(\1){2,3}\Z/ + abc=abcabc + 0: abc=abcabc + 1: abc + 2: abc + def=defdefdef + 0: def=defdefdef + 1: def + 2: def +\= Expect no match + abc=defdef +No match + +/^(a)(b)(c)(d)(e)(f)(g)(h)(i)(j)(k)\11*(\3\4)\1(?#)2$/ + abcdefghijkcda2 + 0: abcdefghijkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + abcdefghijkkkkcda2 + 0: abcdefghijkkkkcda2 + 1: a + 2: b + 3: c + 4: d + 5: e + 6: f + 7: g + 8: h + 9: i +10: j +11: k +12: cd + +/(cat(a(ract|tonic)|erpillar)) \1()2(3)/ + cataract cataract23 + 0: cataract cataract23 + 1: cataract + 2: aract + 3: ract + 4: + 5: 3 + catatonic catatonic23 + 0: catatonic catatonic23 + 1: catatonic + 2: atonic + 3: tonic + 4: + 5: 3 + caterpillar caterpillar23 + 0: caterpillar caterpillar23 + 1: caterpillar + 2: erpillar + 3: + 4: + 5: 3 + + +/^From +([^ ]+) +[a-zA-Z][a-zA-Z][a-zA-Z] +[a-zA-Z][a-zA-Z][a-zA-Z] +[0-9]?[0-9] +[0-9][0-9]:[0-9][0-9]/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: abcd + +/^From\s+\S+\s+([a-zA-Z]{3}\s+){2}\d{1,2}\s+\d\d:\d\d/ + From abcd Mon Sep 01 12:33:02 1997 + 0: From abcd Mon Sep 01 12:33 + 1: Sep + From abcd Mon Sep 1 12:33:02 1997 + 0: From abcd Mon Sep 1 12:33 + 1: Sep +\= Expect no match + From abcd Sep 01 12:33:02 1997 +No match + +/^12.34/s + 12\n34 + 0: 12\x0a34 + 12\r34 + 0: 12\x0d34 + +/\w+(?=\t)/ + the quick brown\t fox + 0: brown + +/foo(?!bar)(.*)/ + foobar is foolish see? + 0: foolish see? + 1: lish see? + +/(?:(?!foo)...|^.{0,2})bar(.*)/ + foobar crowbar etc + 0: rowbar etc + 1: etc + barrel + 0: barrel + 1: rel + 2barrel + 0: 2barrel + 1: rel + A barrel + 0: A barrel + 1: rel + +/^(\D*)(?=\d)(?!123)/ + abc456 + 0: abc + 1: abc +\= Expect no match + abc123 +No match + +/^1234(?# test newlines + inside)/ + 1234 + 0: 1234 + +/^1234 #comment in extended re + /x + 1234 + 0: 1234 + +/#rhubarb + abcd/x + abcd + 0: abcd + +/^abcd#rhubarb/x + abcd + 0: abcd + +/^(a)\1{2,3}(.)/ + aaab + 0: aaab + 1: a + 2: b + aaaab + 0: aaaab + 1: a + 2: b + aaaaab + 0: aaaaa + 1: a + 2: a + aaaaaab + 0: aaaaa + 1: a + 2: a + +/(?!^)abc/ + the abc + 0: abc +\= Expect no match + abc +No match + +/(?=^)abc/ + abc + 0: abc +\= Expect no match + the abc +No match + +/^[ab]{1,3}(ab*|b)/ + aabbbbb + 0: aabb + 1: b + +/^[ab]{1,3}?(ab*|b)/ + aabbbbb + 0: aabbbbb + 1: abbbbb + +/^[ab]{1,3}?(ab*?|b)/ + aabbbbb + 0: aa + 1: a + +/^[ab]{1,3}(ab*?|b)/ + aabbbbb + 0: aabb + 1: b + +/ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional leading comment +(?: (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # one word, optionally followed by.... +(?: +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] | # atom and space parts, or... +\( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) | # comments, or... + +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +# quoted strings +)* +< (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # leading < +(?: @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* + +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* , (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +)* # further okay, if led by comma +: # closing colon +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* )? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) # initial word +(?: (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +" (?: # opening quote... +[^\\\x80-\xff\n\015"] # Anything except backslash and quote +| # or +\\ [^\x80-\xff] # Escaped something (something != CR) +)* " # closing quote +) )* # further okay, if led by a period +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* @ (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # initial subdomain +(?: # +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* \. # if led by a period... +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* (?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| \[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) # ...further okay +)* +# address spec +(?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* > # trailing > +# name and address +) (?: [\040\t] | \( +(?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] | \( (?: [^\\\x80-\xff\n\015()] | \\ [^\x80-\xff] )* \) )* +\) )* # optional trailing comment +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" (a comment) + A. Other (a comment) + 0: Other (a comment) + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle @,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address +| # or +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +# leading word +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # "normal" atoms and or spaces +(?: +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +| +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +) # "special" comment or quoted string +[^()<>@,;:".\\\[\]\x80-\xff\000-\010\012-\037] * # more "normal" +)* +< +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# < +(?: +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +(?: , +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +)* # additional domains +: +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)? # optional route +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +# Atom +| # or +" # " +[^\\\x80-\xff\n\015"] * # normal +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015"] * )* # ( special normal* )* +" # " +# Quoted string +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# additional words +)* +@ +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +(?: +\. +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +(?: +[^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]+ # some number of atom characters... +(?![^(\040)<>@,;:".\\\[\]\000-\037\x80-\xff]) # ..not followed by something that could be part of an atom +| +\[ # [ +(?: [^\\\x80-\xff\n\015\[\]] | \\ [^\x80-\xff] )* # stuff +\] # ] +) +[\040\t]* # Nab whitespace. +(?: +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: # ( +(?: \\ [^\x80-\xff] | +\( # ( +[^\\\x80-\xff\n\015()] * # normal* +(?: \\ [^\x80-\xff] [^\\\x80-\xff\n\015()] * )* # (special normal*)* +\) # ) +) # special +[^\\\x80-\xff\n\015()] * # normal* +)* # )* +\) # ) +[\040\t]* )* # If comment found, allow more spaces. +# optional trailing comments +)* +# address spec +> # > +# name and address +) +/x + Alan Other + 0: Alan Other + + 0: user@dom.ain + user\@dom.ain + 0: user@dom.ain + \"A. Other\" (a comment) + 0: "A. Other" + A. Other (a comment) + 0: Other + \"/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/\"\@x400-re.lay + 0: "/s=user/ou=host/o=place/prmd=uu.yy/admd= /c=gb/"@x400-re.lay + A missing angle ?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f + +/P[^*]TAIRE[^*]{1,6}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/P[^*]TAIRE[^*]{1,}?LL/ + xxxxxxxxxxxPSTAIREISLLxxxxxxxxx + 0: PSTAIREISLL + +/(\.\d\d[1-9]?)\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 + 1.235 + 0: .235 + 1: .23 + +/(\.\d\d((?=0)|\d(?=\d)))/ + 1.230003938 + 0: .23 + 1: .23 + 2: + 1.875000282 + 0: .875 + 1: .875 + 2: 5 +\= Expect no match + 1.235 +No match + +/\b(foo)\s+(\w+)/i + Food is on the foo table + 0: foo table + 1: foo + 2: table + +/foo(.*)bar/ + The food is under the bar in the barn. + 0: food is under the bar in the bar + 1: d is under the bar in the + +/foo(.*?)bar/ + The food is under the bar in the barn. + 0: food is under the bar + 1: d is under the + +/(.*)(\d*)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 53147 + 2: + +/(.*)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d*)/ + I have 2 numbers: 53147 + 0: + 1: + 2: + +/(.*?)(\d+)/ + I have 2 numbers: 53147 + 0: I have 2 + 1: I have + 2: 2 + +/(.*)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: 5314 + 2: 7 + +/(.*?)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*)\b(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/(.*\D)(\d+)$/ + I have 2 numbers: 53147 + 0: I have 2 numbers: 53147 + 1: I have 2 numbers: + 2: 53147 + +/^\D*(?!123)/ + ABC123 + 0: AB + +/^(\D*)(?=\d)(?!123)/ + ABC445 + 0: ABC + 1: ABC +\= Expect no match + ABC123 +No match + +/^[W-]46]/ + W46]789 + 0: W46] + -46]789 + 0: -46] +\= Expect no match + Wall +No match + Zebra +No match + 42 +No match + [abcd] +No match + ]abcd[ +No match + +/^[W-\]46]/ + W46]789 + 0: W + Wall + 0: W + Zebra + 0: Z + Xylophone + 0: X + 42 + 0: 4 + [abcd] + 0: [ + ]abcd[ + 0: ] + \\backslash + 0: \ +\= Expect no match + -46]789 +No match + well +No match + +/\d\d\/\d\d\/\d\d\d\d/ + 01/01/2000 + 0: 01/01/2000 + +/word (?:[a-zA-Z0-9]+ ){0,10}otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?:[a-zA-Z0-9]+ ){0,300}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/^(a){0,0}/ + bcd + 0: + abc + 0: + aab + 0: + +/^(a){0,1}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: a + 1: a + +/^(a){0,2}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + +/^(a){0,3}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + +/^(a){0,}/ + bcd + 0: + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a + +/^(a){1,1}/ + abc + 0: a + 1: a + aab + 0: a + 1: a +\= Expect no match + bcd +No match + +/^(a){1,2}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,3}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a +\= Expect no match + bcd +No match + +/^(a){1,}/ + abc + 0: a + 1: a + aab + 0: aa + 1: a + aaa + 0: aaa + 1: a + aaaaaaaa + 0: aaaaaaaa + 1: a +\= Expect no match + bcd +No match + +/.*\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.{0,}\.gif/ + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/m + borfle\nbib.gif\nno + 0: bib.gif + +/.*\.gif/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*\.gif/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif + +/.*$/ + borfle\nbib.gif\nno + 0: no + +/.*$/m + borfle\nbib.gif\nno + 0: borfle + +/.*$/s + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ms + borfle\nbib.gif\nno + 0: borfle\x0abib.gif\x0ano + +/.*$/ + borfle\nbib.gif\nno\n + 0: no + +/.*$/m + borfle\nbib.gif\nno\n + 0: borfle + +/.*$/s + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/.*$/ms + borfle\nbib.gif\nno\n + 0: borfle\x0abib.gif\x0ano\x0a + +/(.*X|^B)/ + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/m + abcde\n1234Xyz + 0: 1234X + 1: 1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(.*X|^B)/s + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(.*X|^B)/ms + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B + abcde\nBar + 0: B + 1: B + +/(?s)(.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + 1: abcde\x0a1234X + BarFoo + 0: B + 1: B +\= Expect no match + abcde\nBar +No match + +/(?s:.*X|^B)/ + abcde\n1234Xyz + 0: abcde\x0a1234X + BarFoo + 0: B +\= Expect no match + abcde\nBar +No match + +/^.*B/ +\= Expect no match + abc\nB +No match + +/(?s)^.*B/ + abc\nB + 0: abc\x0aB + +/(?m)^.*B/ + abc\nB + 0: B + +/(?ms)^.*B/ + abc\nB + 0: abc\x0aB + +/(?ms)^B/ + abc\nB + 0: B + +/(?s)B$/ + B\n + 0: B + +/^[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/ + 123456654321 + 0: 123456654321 + +/^\d\d\d\d\d\d\d\d\d\d\d\d/ + 123456654321 + 0: 123456654321 + +/^[\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d][\d]/ + 123456654321 + 0: 123456654321 + +/^[abc]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^[a-c]{12}/ + abcabcabcabc + 0: abcabcabcabc + +/^(a|b|c){12}/ + abcabcabcabc + 0: abcabcabcabc + 1: c + +/^[abcdefghijklmnopqrstuvwxy0123456789]/ + n + 0: n +\= Expect no match + z +No match + +/abcde{0,0}/ + abcd + 0: abcd +\= Expect no match + abce +No match + +/ab[cd]{0,0}e/ + abe + 0: abe +\= Expect no match + abcde +No match + +/ab(c){0,0}d/ + abd + 0: abd +\= Expect no match + abcd +No match + +/a(b*)/ + a + 0: a + 1: + ab + 0: ab + 1: b + abbbb + 0: abbbb + 1: bbbb +\= Expect no match + bbbbb +No match + +/ab\d{0}e/ + abe + 0: abe +\= Expect no match + ab1e +No match + +/"([^\\"]+|\\.)*"/ + the \"quick\" brown fox + 0: "quick" + 1: quick + \"the \\\"quick\\\" brown fox\" + 0: "the \"quick\" brown fox" + 1: brown fox + +/]{0,})>]{0,})>([\d]{0,}\.)(.*)((
([\w\W\s\d][^<>]{0,})|[\s]{0,}))<\/a><\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD>]{0,})>([\w\W\s\d][^<>]{0,})<\/TD><\/TR>/is + 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 0: 43.Word Processor
(N-1286)
Lega lstaff.comCA - Statewide + 1: BGCOLOR='#DBE9E9' + 2: align=left valign=top + 3: 43. + 4: Word Processor
(N-1286) + 5: + 6: + 7: + 8: align=left valign=top + 9: Lega lstaff.com +10: align=left valign=top +11: CA - Statewide + +/a[^a]b/ + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/ + acb + 0: acb +\= Expect no match + a\nb +No match + +/a[^a]b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/a.b/s + acb + 0: acb + a\nb + 0: a\x0ab + +/^(b+?|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/^(b+|a){1,2}?c/ + bac + 0: bac + 1: a + bbac + 0: bbac + 1: a + bbbac + 0: bbbac + 1: a + bbbbac + 0: bbbbac + 1: a + bbbbbac + 0: bbbbbac + 1: a + +/(?!\A)x/m + a\bx\n + 0: x + a\nx\n + 0: x +\= Expect no match + x\nb\n +No match + +/(A|B)*?CD/ + CD + 0: CD + +/(A|B)*CD/ + CD + 0: CD + +/(AB)*?\1/ + ABABAB + 0: ABAB + 1: AB + +/(AB)*\1/ + ABABAB + 0: ABABAB + 1: AB + +/(?.*/)foo" + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo + 0: /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/and/foo +\= Expect no match + /this/is/a/very/long/line/in/deed/with/very/many/slashes/in/it/you/see/ +No match + +/(?>(\.\d\d[1-9]?))\d+/ + 1.230003938 + 0: .230003938 + 1: .23 + 1.875000282 + 0: .875000282 + 1: .875 +\= Expect no match + 1.235 +No match + +/^((?>\w+)|(?>\s+))*$/ + now is the time for all good men to come to the aid of the party + 0: now is the time for all good men to come to the aid of the party + 1: party +\= Expect no match + this is not a line with only words and spaces! +No match + +/(\d+)(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a + 12345+ + 0: 12345 + 1: 1234 + 2: 5 + +/((?>\d+))(\w)/ + 12345a + 0: 12345a + 1: 12345 + 2: a +\= Expect no match + 12345+ +No match + +/(?>a+)b/ + aaab + 0: aaab + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/(?>b)+/ + aaabbbccc + 0: bbb + +/(?>a+|b+|c+)*c/ + aaabbbbccccd + 0: aaabbbbc + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/\(((?>[^()]+)|\([^()]+\))+\)/ + (abc) + 0: (abc) + 1: abc + (abc(def)xyz) + 0: (abc(def)xyz) + 1: xyz +\= Expect no match + ((()aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/a(?-i)b/i + ab + 0: ab + Ab + 0: Ab +\= Expect no match + aB +No match + AB +No match + +/(a (?x)b c)d e/ + a bcd e + 0: a bcd e + 1: a bc +\= Expect no match + a b cd e +No match + abcd e +No match + a bcde +No match + +/(a b(?x)c d (?-x)e f)/ + a bcde f + 0: a bcde f + 1: a bcde f +\= Expect no match + abcdef +No match + +/(a(?i)b)c/ + abc + 0: abc + 1: ab + aBc + 0: aBc + 1: aB +\= Expect no match + abC +No match + aBC +No match + Abc +No match + ABc +No match + ABC +No match + AbC +No match + +/a(?i:b)c/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + ABC +No match + abC +No match + aBC +No match + +/a(?i:b)*c/ + aBc + 0: aBc + aBBc + 0: aBBc +\= Expect no match + aBC +No match + aBBC +No match + +/a(?=b(?i)c)\w\wd/ + abcd + 0: abcd + abCd + 0: abCd +\= Expect no match + aBCd +No match + abcD +No match + +/(?s-i:more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?:(?s-i)more.*than).*million/i + more than million + 0: more than million + more than MILLION + 0: more than MILLION + more \n than Million + 0: more \x0a than Million +\= Expect no match + MORE THAN MILLION +No match + more \n than \n million +No match + +/(?>a(?i)b+)+c/ + abc + 0: abc + aBbc + 0: aBbc + aBBc + 0: aBBc +\= Expect no match + Abc +No match + abAb +No match + abbC +No match + +/(?=a(?i)b)\w\wc/ + abc + 0: abc + aBc + 0: aBc +\= Expect no match + Ab +No match + abC +No match + aBC +No match + +/(?<=a(?i)b)(\w\w)c/ + abxxc + 0: xxc + 1: xx + aBxxc + 0: xxc + 1: xx +\= Expect no match + Abxxc +No match + ABxxc +No match + abxxC +No match + +/(?:(a)|b)(?(1)A|B)/ + aA + 0: aA + 1: a + bB + 0: bB +\= Expect no match + aB +No match + bA +No match + +/^(a)?(?(1)a|b)+$/ + aa + 0: aa + 1: a + b + 0: b + bb + 0: bb +\= Expect no match + ab +No match + +# Perl gets this next one wrong if the pattern ends with $; in that case it +# fails to match "12". + +/^(?(?=abc)\w{3}:|\d\d)/ + abc: + 0: abc: + 12 + 0: 12 + 123 + 0: 12 +\= Expect no match + xyz +No match + +/^(?(?!abc)\d\d|\w{3}:)$/ + abc: + 0: abc: + 12 + 0: 12 +\= Expect no match + 123 +No match + xyz +No match + +/(?(?<=foo)bar|cat)/ + foobar + 0: bar + cat + 0: cat + fcat + 0: cat + focat + 0: cat +\= Expect no match + foocat +No match + +/(?(?a*)*/ + a + 0: a + aa + 0: aa + aaaa + 0: aaaa + +/(abc|)+/ + abc + 0: abc + 1: + abcabc + 0: abcabc + 1: + abcabcabc + 0: abcabcabc + 1: + xyz + 0: + 1: + +/([a]*)*/ + a + 0: a + 1: + aaaaa + 0: aaaaa + 1: + +/([ab]*)*/ + a + 0: a + 1: + b + 0: b + 1: + ababab + 0: ababab + 1: + aaaabcde + 0: aaaab + 1: + bbbb + 0: bbbb + 1: + +/([^a]*)*/ + b + 0: b + 1: + bbbb + 0: bbbb + 1: + aaa + 0: + 1: + +/([^ab]*)*/ + cccc + 0: cccc + 1: + abab + 0: + 1: + +/([a]*?)*/ + a + 0: + 1: + aaaa + 0: + 1: + +/([ab]*?)*/ + a + 0: + 1: + b + 0: + 1: + abab + 0: + 1: + baba + 0: + 1: + +/([^a]*?)*/ + b + 0: + 1: + bbbb + 0: + 1: + aaa + 0: + 1: + +/([^ab]*?)*/ + c + 0: + 1: + cccc + 0: + 1: + baba + 0: + 1: + +/(?>a*)*/ + a + 0: a + aaabcde + 0: aaa + +/((?>a*))*/ + aaaaa + 0: aaaaa + 1: + aabbaa + 0: aa + 1: + +/((?>a*?))*/ + aaaaa + 0: + 1: + aabbaa + 0: + 1: + +/(?(?=[^a-z]+[a-z]) \d{2}-[a-z]{3}-\d{2} | \d{2}-\d{2}-\d{2} ) /x + 12-sep-98 + 0: 12-sep-98 + 12-09-98 + 0: 12-09-98 +\= Expect no match + sep-12-98 +No match + +/(?<=(foo))bar\1/ + foobarfoo + 0: barfoo + 1: foo + foobarfootling + 0: barfoo + 1: foo +\= Expect no match + foobar +No match + barfoo +No match + +/(?i:saturday|sunday)/ + saturday + 0: saturday + sunday + 0: sunday + Saturday + 0: Saturday + Sunday + 0: Sunday + SATURDAY + 0: SATURDAY + SUNDAY + 0: SUNDAY + SunDay + 0: SunDay + +/(a(?i)bc|BB)x/ + abcx + 0: abcx + 1: abc + aBCx + 0: aBCx + 1: aBC + bbx + 0: bbx + 1: bb + BBx + 0: BBx + 1: BB +\= Expect no match + abcX +No match + aBCX +No match + bbX +No match + BBX +No match + +/^([ab](?i)[cd]|[ef])/ + ac + 0: ac + 1: ac + aC + 0: aC + 1: aC + bD + 0: bD + 1: bD + elephant + 0: e + 1: e + Europe + 0: E + 1: E + frog + 0: f + 1: f + France + 0: F + 1: F +\= Expect no match + Africa +No match + +/^(ab|a(?i)[b-c](?m-i)d|x(?i)y|z)/ + ab + 0: ab + 1: ab + aBd + 0: aBd + 1: aBd + xy + 0: xy + 1: xy + xY + 0: xY + 1: xY + zebra + 0: z + 1: z + Zambesi + 0: Z + 1: Z +\= Expect no match + aCD +No match + XY +No match + +/(?<=foo\n)^bar/m + foo\nbar + 0: bar +\= Expect no match + bar +No match + baz\nbar +No match + +/(?<=(?]&/ + <&OUT + 0: <& + +/^(a\1?){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + AB +No match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/^(a(?(1)\1)){4}$/ + aaaaaaaaaa + 0: aaaaaaaaaa + 1: aaaa +\= Expect no match + aaaaaaaaa +No match + aaaaaaaaaaa +No match + +/(?:(f)(o)(o)|(b)(a)(r))*/ + foobar + 0: foobar + 1: f + 2: o + 3: o + 4: b + 5: a + 6: r + +/(?<=a)b/ + ab + 0: b +\= Expect no match + cb +No match + b +No match + +/(? + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(a*)b+/ + caab + 0: aab + 1: aa + +/([\w:]+::)?(\w+)$/ + abcd + 0: abcd + 1: + 2: abcd + xy:z:::abcd + 0: xy:z:::abcd + 1: xy:z::: + 2: abcd +\= Expect no match + abcd: +No match + abcd: +No match + +/^[^bcd]*(c+)/ + aexycd + 0: aexyc + 1: c + +/(>a+)ab/ + +/(?>a+)b/ + aaab + 0: aaab + +/([[:]+)/ + a:[b]: + 0: :[ + 1: :[ + +/([[=]+)/ + a=[b]= + 0: =[ + 1: =[ + +/([[.]+)/ + a.[b]. + 0: .[ + 1: .[ + +/((?>a+)b)/ + aaab + 0: aaab + 1: aaab + +/(?>(a+))b/ + aaab + 0: aaab + 1: aaa + +/((?>[^()]+)|\([^()]*\))+/ + ((abc(ade)ufh()()x + 0: abc(ade)ufh()()x + 1: x + +/a\Z/ +\= Expect no match + aaab +No match + a\nb\n +No match + +/b\Z/ + a\nb\n + 0: b + +/b\z/ + +/b\Z/ + a\nb + 0: b + +/b\z/ + a\nb + 0: b + +/^(?>(?(1)\.|())[^\W_](?>[a-z0-9-]*[^\W_])?)+$/ + a + 0: a + 1: + abc + 0: abc + 1: + a-b + 0: a-b + 1: + 0-9 + 0: 0-9 + 1: + a.b + 0: a.b + 1: + 5.6.7 + 0: 5.6.7 + 1: + the.quick.brown.fox + 0: the.quick.brown.fox + 1: + a100.b200.300c + 0: a100.b200.300c + 1: + 12-ab.1245 + 0: 12-ab.1245 + 1: +\= Expect no match + \ +No match + .a +No match + -a +No match + a- +No match + a. +No match + a_b +No match + a.- +No match + a.. +No match + ab..bc +No match + the.quick.brown.fox- +No match + the.quick.brown.fox. +No match + the.quick.brown.fox_ +No match + the.quick.brown.fox+ +No match + +/(?>.*)(?<=(abcd|wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: wxyz +\= Expect no match + a rather long string that doesn't end with one of them +No match + +/word (?>(?:(?!otherword)[a-zA-Z0-9]+ ){0,30})otherword/ + word cat dog elephant mussel cow horse canary baboon snake shark otherword + 0: word cat dog elephant mussel cow horse canary baboon snake shark otherword +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark +No match + +/word (?>[a-zA-Z0-9]+ ){0,30}otherword/ +\= Expect no match + word cat dog elephant mussel cow horse canary baboon snake shark the quick brown fox and the lazy dog and several other words getting close to thirty by now I hope +No match + +/(?<=\d{3}(?!999))foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=(?!...999)\d{3})foo/ + 999foo + 0: foo + 123999foo + 0: foo +\= Expect no match + 123abcfoo +No match + +/(?<=\d{3}(?!999)...)foo/ + 123abcfoo + 0: foo + 123456foo + 0: foo +\= Expect no match + 123999foo +No match + +/(?<=\d{3}...)(? + 2: + 3: abcd +
+ 2: + 3: abcd + \s*)=(?>\s*) # find + 2: + 3: abcd + Z)+|A)*/ + ZABCDEFG + 0: ZA + 1: A + +/((?>)+|A)*/ + ZABCDEFG + 0: + 1: + +/^[\d-a]/ + abcde + 0: a + -things + 0: - + 0digit + 0: 0 +\= Expect no match + bcdef +No match + +/[\s]+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/\s+/ + > \x09\x0a\x0c\x0d\x0b< + 0: \x09\x0a\x0c\x0d\x0b + +/a b/x + ab + 0: ab + +/(?!\A)x/m + a\nxb\n + 0: x + +/(?!^)x/m +\= Expect no match + a\nxb\n +No match + +#/abc\Qabc\Eabc/ +# abcabcabc +# 0: abcabcabc + +#/abc\Q(*+|\Eabc/ +# abc(*+|abc +# 0: abc(*+|abc + +#/ abc\Q abc\Eabc/x +# abc abcabc +# 0: abc abcabc +#\= Expect no match +# abcabcabc +#No match + +#/abc#comment +# \Q#not comment +# literal\E/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment +# /x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/abc#comment +# \Q#not comment +# literal\E #more comment/x +# abc#not comment\n literal +# 0: abc#not comment\x0a literal + +#/\Qabc\$xyz\E/ +# abc\\\$xyz +# 0: abc\$xyz + +#/\Qabc\E\$\Qxyz\E/ +# abc\$xyz +# 0: abc$xyz + +/\Gabc/ + abc + 0: abc +\= Expect no match + xyzabc +No match + +/a(?x: b c )d/ + XabcdY + 0: abcd +\= Expect no match + Xa b c d Y +No match + +/((?x)x y z | a b c)/ + XabcY + 0: abc + 1: abc + AxyzB + 0: xyz + 1: xyz + +/(?i)AB(?-i)C/ + XabCY + 0: abC +\= Expect no match + XabcY +No match + +/((?i)AB(?-i)C|D)E/ + abCE + 0: abCE + 1: abC + DE + 0: DE + 1: D +\= Expect no match + abcE +No match + abCe +No match + dE +No match + De +No match + +/(.*)\d+\1/ + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/(.*)\d+\1/s + abc123abc + 0: abc123abc + 1: abc + abc123bc + 0: bc123bc + 1: bc + +/((.*))\d+\1/ + abc123abc + 0: abc123abc + 1: abc + 2: abc + abc123bc + 0: bc123bc + 1: bc + 2: bc + +# This tests for an IPv6 address in the form where it can have up to +# eight components, one and only one of which is empty. This must be +# an internal component. + +/^(?!:) # colon disallowed at start + (?: # start of item + (?: [0-9a-f]{1,4} | # 1-4 hex digits or + (?(1)0 | () ) ) # if null previously matched, fail; else null + : # followed by colon + ){1,7} # end item; 1-7 of them required + [0-9a-f]{1,4} $ # final hex number at end of string + (?(1)|.) # check that there was an empty component + /ix + a123::a123 + 0: a123::a123 + 1: + a123:b342::abcd + 0: a123:b342::abcd + 1: + a123:b342::324e:abcd + 0: a123:b342::324e:abcd + 1: + a123:ddde:b342::324e:abcd + 0: a123:ddde:b342::324e:abcd + 1: + a123:ddde:b342::324e:dcba:abcd + 0: a123:ddde:b342::324e:dcba:abcd + 1: + a123:ddde:9999:b342::324e:dcba:abcd + 0: a123:ddde:9999:b342::324e:dcba:abcd + 1: +\= Expect no match + 1:2:3:4:5:6:7:8 +No match + a123:bce:ddde:9999:b342::324e:dcba:abcd +No match + a123::9999:b342::324e:dcba:abcd +No match + abcde:2:3:4:5:6:7:8 +No match + ::1 +No match + abcd:fee0:123:: +No match + :1 +No match + 1: +No match + +#/[z\Qa-d]\E]/ +# z +# 0: z +# a +# 0: a +# - +# 0: - +# d +# 0: d +# ] +# 0: ] +#\= Expect no match +# b +#No match + +#TODO: PCRE has an optimization to make this workable, .NET does not +#/(a+)*b/ +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +# All these had to be updated because we understand unicode +# and this looks like it's expecting single byte matches + +# .NET generates \xe4...not sure what's up, might just be different code pages +/(?i)reg(?:ul(?:[aä]|ae)r|ex)/ + REGular + 0: REGular + regulaer + 0: regulaer + Regex + 0: Regex + regulär + 0: regul\xc3\xa4r + +#/Åæåä[à-ÿÀ-ß]+/ +# Åæåäà +# 0: \xc5\xe6\xe5\xe4\xe0 +# Åæåäÿ +# 0: \xc5\xe6\xe5\xe4\xff +# ÅæåäÀ +# 0: \xc5\xe6\xe5\xe4\xc0 +# Åæåäß +# 0: \xc5\xe6\xe5\xe4\xdf + +/(?<=Z)X./ + \x84XAZXB + 0: XB + +/ab cd (?x) de fg/ + ab cd defg + 0: ab cd defg + +/ab cd(?x) de fg/ + ab cddefg + 0: ab cddefg +\= Expect no match + abcddefg +No match + +/(? + 2: + D + 0: D + 1: + 2: + +# this is really long with debug -- removing for now +#/(a|)*\d/ +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +# 1: +#\= Expect no match +# aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +#No match + +/(?>a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/(?:a|)*\d/ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 + 0: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa4 +\= Expect no match + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +No match + +/^(?s)(?>.*)(? + 2: a + +/(?>(a))b|(a)c/ + ac + 0: ac + 1: + 2: a + +/(?=(a))ab|(a)c/ + ac + 0: ac + 1: + 2: a + +/((?>(a))b|(a)c)/ + ac + 0: ac + 1: ac + 2: + 3: a + +/(?=(?>(a))b|(a)c)(..)/ + ac + 0: ac + 1: + 2: a + 3: ac + +/(?>(?>(a))b|(a)c)/ + ac + 0: ac + 1: + 2: a + +/((?>(a+)b)+(aabab))/ + aaaabaaabaabab + 0: aaaabaaabaabab + 1: aaaabaaabaabab + 2: aaa + 3: aabab + +/(?>a+|ab)+?c/ +\= Expect no match + aabc +No match + +/(?>a+|ab)+c/ +\= Expect no match + aabc +No match + +/(?:a+|ab)+c/ + aabc + 0: aabc + +/^(?:a|ab)+c/ + aaaabc + 0: aaaabc + +/(?=abc){0}xyz/ + xyz + 0: xyz + +/(?=abc){1}xyz/ +\= Expect no match + xyz +No match + +/(?=(a))?./ + ab + 0: a + 1: a + bc + 0: b + +/(?=(a))??./ + ab + 0: a + bc + 0: b + +/^(?!a){0}\w+/ + aaaaa + 0: aaaaa + +/(?<=(abc))?xyz/ + abcxyz + 0: xyz + 1: abc + pqrxyz + 0: xyz + +/^[g]+/ + ggg<<>> + 0: ggg<<>> +\= Expect no match + \\ga +No match + +/^[ga]+/ + gggagagaxyz + 0: gggagaga + +/[:a]xxx[b:]/ + :xxx: + 0: :xxx: + +/(?<=a{2})b/i + xaabc + 0: b +\= Expect no match + xabc +No match + +/(? +# 4: +# 5: c +# 6: d +# 7: Y + +#/^X(?7)(a)(?|(b|(?|(r)|(t))(s))|(q))(c)(d)(Y)/ +# XYabcdY +# 0: XYabcdY +# 1: a +# 2: b +# 3: +# 4: +# 5: c +# 6: d +# 7: Y + +/(?'abc'\w+):\k{2}/ + a:aaxyz + 0: a:aa + 1: a + ab:ababxyz + 0: ab:abab + 1: ab +\= Expect no match + a:axyz +No match + ab:abxyz +No match + +/^(?a)? (?(ab)b|c) (?(ab)d|e)/x + abd + 0: abd + 1: a + ce + 0: ce + +# .NET has more consistent grouping numbers with these dupe groups for the two options +/(?:a(? (?')|(?")) |b(? (?')|(?")) ) (?(quote)[a-z]+|[0-9]+)/x,dupnames + a\"aaaaa + 0: a"aaaaa + 1: " + 2: + 3: " + b\"aaaaa + 0: b"aaaaa + 1: " + 2: + 3: " +\= Expect no match + b\"11111 +No match + +#/(?P(?P0)(?P>L1)|(?P>L2))/ +# 0 +# 0: 0 +# 1: 0 +# 00 +# 0: 00 +# 1: 00 +# 2: 0 +# 0000 +# 0: 0000 +# 1: 0000 +# 2: 0 + +#/(?P(?P0)|(?P>L2)(?P>L1))/ +# 0 +# 0: 0 +# 1: 0 +# 2: 0 +# 00 +# 0: 0 +# 1: 0 +# 2: 0 +# 0000 +# 0: 0 +# 1: 0 +# 2: 0 + +# Check the use of names for failure + +# Check opening parens in comment when seeking forward reference. + +#/(?P(?P=abn)xxx|)+/ +# xxx +# 0: +# 1: + +#Posses +/^(a)?(\w)/ + aaaaX + 0: aa + 1: a + 2: a + YZ + 0: Y + 1: + 2: Y + +#Posses +/^(?:a)?(\w)/ + aaaaX + 0: aa + 1: a + YZ + 0: Y + 1: Y + +/\A.*?(a|bc)/ + ba + 0: ba + 1: a + +/\A.*?(?:a|bc|d)/ + ba + 0: ba + +# -------------------------- + +/(another)?(\1?)test/ + hello world test + 0: test + 1: + 2: + +/(another)?(\1+)test/ +\= Expect no match + hello world test +No match + +/((?:a?)*)*c/ + aac + 0: aac + 1: + +/((?>a?)*)*c/ + aac + 0: aac + 1: + +/(?>.*?a)(?<=ba)/ + aba + 0: ba + +/(?:.*?a)(?<=ba)/ + aba + 0: aba + +/(?>.*?a)b/s + aab + 0: ab + +/(?>.*?a)b/ + aab + 0: ab + +/(?>^a)b/s +\= Expect no match + aab +No match + +/(?>.*?)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: + 1: abcd + endingwxyz + 0: + 1: + 2: wxyz + +/(?>.*)(?<=(abcd)|(wxyz))/ + alphabetabcd + 0: alphabetabcd + 1: abcd + endingwxyz + 0: endingwxyz + 1: + 2: wxyz + +"(?>.*)foo" +\= Expect no match + abcdfooxyz +No match + +"(?>.*?)foo" + abcdfooxyz + 0: foo + +# Tests that try to figure out how Perl works. My hypothesis is that the first +# verb that is backtracked onto is the one that acts. This seems to be the case +# almost all the time, but there is one exception that is perhaps a bug. + +/a(?=bc).|abd/ + abd + 0: abd + abc + 0: ab + +/a(?>bc)d|abd/ + abceabd + 0: abd + +# These tests were formerly in test 2, but changes in PCRE and Perl have +# made them compatible. + +/^(a)?(?(1)a|b)+$/ +\= Expect no match + a +No match + +# ---- + +/^\d*\w{4}/ + 1234 + 0: 1234 +\= Expect no match + 123 +No match + +/^[^b]*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^[^b]*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/ + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/^a*\w{4}/i + aaaa + 0: aaaa +\= Expect no match + aaa +No match + +/(?:(?foo)|(?bar))\k/dupnames + foofoo + 0: foofoo + 1: foo + barbar + 0: barbar + 1: bar + +# A notable difference between PCRE and .NET. According to +# the PCRE docs: +# If you make a subroutine call to a non-unique named +# subpattern, the one that corresponds to the first +# occurrence of the name is used. In the absence of +# duplicate numbers (see the previous section) this is +# the one with the lowest number. +# .NET takes the most recently captured number according to MSDN: +# A backreference refers to the most recent definition of +# a group (the definition most immediately to the left, +# when matching left to right). When a group makes multiple +# captures, a backreference refers to the most recent capture. + +#/(?A)(?:(?foo)|(?bar))\k/dupnames +# AfooA +# 0: AfooA +# 1: A +# 2: foo +# AbarA +# 0: AbarA +# 1: A +# 2: +# 3: bar +#\= Expect no match +# Afoofoo +#No match +# Abarbar +#No match + +/^(\d+)\s+IN\s+SOA\s+(\S+)\s+(\S+)\s*\(\s*$/ + 1 IN SOA non-sp1 non-sp2( + 0: 1 IN SOA non-sp1 non-sp2( + 1: 1 + 2: non-sp1 + 3: non-sp2 + +# TODO: .NET's group number ordering here in the second example is a bit odd +/^ (?:(?A)|(?'B'B)(?A)) (?(A)x) (?(B)y)$/x,dupnames + Ax + 0: Ax + 1: A + BAxy + 0: BAxy + 1: A + 2: B + +/ ^ a + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ a + #comment + #comment + b $ /x + aaaab + 0: aaaab + +/ ^ (?> a + ) b $ /x + aaaab + 0: aaaab + +/ ^ ( a + ) + \w $ /x + aaaab + 0: aaaab + 1: aaaa + +/(?:x|(?:(xx|yy)+|x|x|x|x|x)|a|a|a)bc/ +\= Expect no match + acb +No match + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]*|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")*\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A(?:[^\"]+|\"(?:[^\"]+|\"\")+\")+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER + +#Posses +#/\A([^\"1]+|[\"2]([^\"3]*|[\"4][\"5])*[\"6])+/ +# NON QUOTED \"QUOT\"\"ED\" AFTER \"NOT MATCHED +# 0: NON QUOTED "QUOT""ED" AFTER +# 1: AFTER +# 2: + +/^\w+(?>\s*)(?<=\w)/ + test test + 0: tes + +#/(?Pa)?(?Pb)?(?()c|d)*l/ +# acl +# 0: acl +# 1: a +# bdl +# 0: bdl +# 1: +# 2: b +# adl +# 0: dl +# bcl +# 0: l + +/\sabc/ + \x0babc + 0: \x0babc + +#/[\Qa]\E]+/ +# aa]] +# 0: aa]] + +#/[\Q]a\E]+/ +# aa]] +# 0: aa]] + +/A((((((((a))))))))\8B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + +/A(((((((((a)))))))))\9B/ + AaaB + 0: AaaB + 1: a + 2: a + 3: a + 4: a + 5: a + 6: a + 7: a + 8: a + 9: a + +/(|ab)*?d/ + abd + 0: abd + 1: ab + xyd + 0: d + +/(\2|a)(\1)/ + aaa + 0: aa + 1: a + 2: a + +/(\2)(\1)/ + +"Z*(|d*){216}" + +/((((((((((((x))))))))))))\12/ + xx + 0: xx + 1: x + 2: x + 3: x + 4: x + 5: x + 6: x + 7: x + 8: x + 9: x +10: x +11: x +12: x + +#"(?|(\k'Pm')|(?'Pm'))" +# abcd +# 0: +# 1: + +#/(?|(aaa)|(b))\g{1}/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(aaa)|(b))(?1)/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# baaa +# 0: baaa +# 1: b +#\= Expect no match +# bb +#No match + +#/(?|(aaa)|(b))/ +# xaaa +# 0: aaa +# 1: aaa +# xbc +# 0: b +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))\k'a'/ +# aaaaaa +# 0: aaaaaa +# 1: aaa +# bb +# 0: bb +# 1: b + +#/(?|(?'a'aaa)|(?'a'b))(?'a'cccc)\k'a'/dupnames +# aaaccccaaa +# 0: aaaccccaaa +# 1: aaa +# 2: cccc +# bccccb +# 0: bccccb +# 1: b +# 2: cccc + +# End of testinput1 diff --git a/vendor/github.com/google/go-github/AUTHORS b/vendor/github.com/google/go-github/AUTHORS index 56622a4c..7c2f1291 100644 --- a/vendor/github.com/google/go-github/AUTHORS +++ b/vendor/github.com/google/go-github/AUTHORS @@ -26,14 +26,17 @@ Andy Lindeman Anshuman Bhartiya Antoine Pelisse Anubha Kushwaha +appilon Aravind Arıl Bozoluk Austin Dizzy +Ben Batha Beshr Kayali Beyang Liu Billy Lynch Björn Häuser Brad Harris +Brad Moylan Bradley Falzon Brian Egizi Bryan Boreham @@ -54,7 +57,10 @@ Daniel Leavitt Dave Du Cros Dave Henderson David Deng +David Jannotta +Davide Zipeto Dennis Webb +Dhi Aurrahman Diego Lapiduz Dmitri Shuralyov dmnlk @@ -63,6 +69,7 @@ Doug Turner Drew Fradette Eli Uriegas Elliott Beach +Emerson Wood erwinvaneyk Fabrice Filippo Valsorda @@ -75,7 +82,9 @@ Georgy Buranov Gnahz Google Inc. griffin_stewie +Guillaume Jacquet Guz Alexander +Guðmundur Bjarni Ólafsson Hanno Hecker Hari haran haya14busa @@ -89,6 +98,7 @@ Jan Kosecki Jeremy Morris Jihoon Chung Jimmi Dyson +Joan Saum Joe Tsai John Barton John Engelman @@ -96,6 +106,7 @@ jpbelanger-mtl Juan Basso Julien Rostand Justin Abrahms +Jusung Lee jzhoucliqr Katrina Owen Keita Urashima @@ -113,9 +124,11 @@ Luke Young Maksim Zhylinski Martin-Louis Bright Mat Geist +Matt Matt Brender Matt Landis Maxime Bury +Michael Spiegel Michael Tiller Michał Glapa Nathan VanBenschoten @@ -125,15 +138,19 @@ Nick Spragg Nikhita Raghunath Noah Zoschke ns-cweber +Oleg Kovalov Ondřej Kupka Panagiotis Moustafellos +Parham Alvani Parker Moore +parkhyukjun89 Pavel Shtanko Petr Shevtsov Pierre Carrier Piotr Zurek Quinn Slack Rackspace US, Inc. +Radek Simko Rajendra arora RaviTeja Pothana rc1140 @@ -145,6 +162,7 @@ Ryan Lower Sahil Dua saisi Sam Minnée +Sandeep Sukhani Sander van Harmelen Sanket Payghan Sarasa Kisaragi @@ -152,6 +170,7 @@ Sean Wang Sebastian Mandrean Sebastian Mæland Pedersen Sevki +Shagun Khemka Shawn Catanzarite Shawn Smith sona-tar diff --git a/vendor/github.com/google/go-github/github/apps.go b/vendor/github.com/google/go-github/github/apps.go index dd5e5a7a..32d4f2f4 100644 --- a/vendor/github.com/google/go-github/github/apps.go +++ b/vendor/github.com/google/go-github/github/apps.go @@ -20,6 +20,7 @@ type AppsService service // App represents a GitHub App. type App struct { ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` Owner *User `json:"owner,omitempty"` Name *string `json:"name,omitempty"` Description *string `json:"description,omitempty"` @@ -57,8 +58,8 @@ type Installation struct { RepositorySelection *string `json:"repository_selection,omitempty"` Events []string `json:"events,omitempty"` Permissions *InstallationPermissions `json:"permissions,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` } func (i Installation) String() string { @@ -196,6 +197,13 @@ func (s *AppsService) FindRepositoryInstallation(ctx context.Context, owner, rep return s.getInstallation(ctx, fmt.Sprintf("repos/%v/%v/installation", owner, repo)) } +// FindRepositoryInstallationByID finds the repository's installation information. +// +// Note: FindRepositoryInstallationByID uses the undocumented GitHub API endpoint /repositories/:id/installation. +func (s *AppsService) FindRepositoryInstallationByID(ctx context.Context, id int64) (*Installation, *Response, error) { + return s.getInstallation(ctx, fmt.Sprintf("repositories/%d/installation", id)) +} + // FindUserInstallation finds the user's installation information. // // GitHub API docs: https://developer.github.com/v3/apps/#find-repository-installation diff --git a/vendor/github.com/google/go-github/github/event_types.go b/vendor/github.com/google/go-github/github/event_types.go index cfc9290b..d68bf581 100644 --- a/vendor/github.com/google/go-github/github/event_types.go +++ b/vendor/github.com/google/go-github/github/event_types.go @@ -630,38 +630,39 @@ func (p PushEventCommit) String() string { // PushEventRepository represents the repo object in a PushEvent payload. type PushEventRepository struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - FullName *string `json:"full_name,omitempty"` - Owner *PushEventRepoOwner `json:"owner,omitempty"` - Private *bool `json:"private,omitempty"` - Description *string `json:"description,omitempty"` - Fork *bool `json:"fork,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - PushedAt *Timestamp `json:"pushed_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - Homepage *string `json:"homepage,omitempty"` - Size *int `json:"size,omitempty"` - StargazersCount *int `json:"stargazers_count,omitempty"` - WatchersCount *int `json:"watchers_count,omitempty"` - Language *string `json:"language,omitempty"` - HasIssues *bool `json:"has_issues,omitempty"` - HasDownloads *bool `json:"has_downloads,omitempty"` - HasWiki *bool `json:"has_wiki,omitempty"` - HasPages *bool `json:"has_pages,omitempty"` - ForksCount *int `json:"forks_count,omitempty"` - OpenIssuesCount *int `json:"open_issues_count,omitempty"` - DefaultBranch *string `json:"default_branch,omitempty"` - MasterBranch *string `json:"master_branch,omitempty"` - Organization *string `json:"organization,omitempty"` - URL *string `json:"url,omitempty"` - ArchiveURL *string `json:"archive_url,omitempty"` - HTMLURL *string `json:"html_url,omitempty"` - StatusesURL *string `json:"statuses_url,omitempty"` - GitURL *string `json:"git_url,omitempty"` - SSHURL *string `json:"ssh_url,omitempty"` - CloneURL *string `json:"clone_url,omitempty"` - SVNURL *string `json:"svn_url,omitempty"` + ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + FullName *string `json:"full_name,omitempty"` + Owner *User `json:"owner,omitempty"` + Private *bool `json:"private,omitempty"` + Description *string `json:"description,omitempty"` + Fork *bool `json:"fork,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + PushedAt *Timestamp `json:"pushed_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + Homepage *string `json:"homepage,omitempty"` + Size *int `json:"size,omitempty"` + StargazersCount *int `json:"stargazers_count,omitempty"` + WatchersCount *int `json:"watchers_count,omitempty"` + Language *string `json:"language,omitempty"` + HasIssues *bool `json:"has_issues,omitempty"` + HasDownloads *bool `json:"has_downloads,omitempty"` + HasWiki *bool `json:"has_wiki,omitempty"` + HasPages *bool `json:"has_pages,omitempty"` + ForksCount *int `json:"forks_count,omitempty"` + OpenIssuesCount *int `json:"open_issues_count,omitempty"` + DefaultBranch *string `json:"default_branch,omitempty"` + MasterBranch *string `json:"master_branch,omitempty"` + Organization *string `json:"organization,omitempty"` + URL *string `json:"url,omitempty"` + ArchiveURL *string `json:"archive_url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + StatusesURL *string `json:"statuses_url,omitempty"` + GitURL *string `json:"git_url,omitempty"` + SSHURL *string `json:"ssh_url,omitempty"` + CloneURL *string `json:"clone_url,omitempty"` + SVNURL *string `json:"svn_url,omitempty"` } // PushEventRepoOwner is a basic representation of user/org in a PushEvent payload. diff --git a/vendor/github.com/google/go-github/github/gists.go b/vendor/github.com/google/go-github/github/gists.go index 9108b642..15e0bc2c 100644 --- a/vendor/github.com/google/go-github/github/gists.go +++ b/vendor/github.com/google/go-github/github/gists.go @@ -114,9 +114,6 @@ func (s *GistsService) List(ctx context.Context, user string, opt *GistListOptio return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var gists []*Gist resp, err := s.client.Do(ctx, req, &gists) if err != nil { @@ -140,9 +137,6 @@ func (s *GistsService) ListAll(ctx context.Context, opt *GistListOptions) ([]*Gi return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var gists []*Gist resp, err := s.client.Do(ctx, req, &gists) if err != nil { @@ -166,9 +160,6 @@ func (s *GistsService) ListStarred(ctx context.Context, opt *GistListOptions) ([ return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var gists []*Gist resp, err := s.client.Do(ctx, req, &gists) if err != nil { @@ -188,9 +179,6 @@ func (s *GistsService) Get(ctx context.Context, id string) (*Gist, *Response, er return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - gist := new(Gist) resp, err := s.client.Do(ctx, req, gist) if err != nil { @@ -210,9 +198,6 @@ func (s *GistsService) GetRevision(ctx context.Context, id, sha string) (*Gist, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - gist := new(Gist) resp, err := s.client.Do(ctx, req, gist) if err != nil { @@ -232,9 +217,6 @@ func (s *GistsService) Create(ctx context.Context, gist *Gist) (*Gist, *Response return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - g := new(Gist) resp, err := s.client.Do(ctx, req, g) if err != nil { @@ -254,9 +236,6 @@ func (s *GistsService) Edit(ctx context.Context, id string, gist *Gist) (*Gist, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - g := new(Gist) resp, err := s.client.Do(ctx, req, g) if err != nil { @@ -281,9 +260,6 @@ func (s *GistsService) ListCommits(ctx context.Context, id string, opt *ListOpti return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var gistCommits []*GistCommit resp, err := s.client.Do(ctx, req, &gistCommits) if err != nil { @@ -353,9 +329,6 @@ func (s *GistsService) Fork(ctx context.Context, id string) (*Gist, *Response, e return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - g := new(Gist) resp, err := s.client.Do(ctx, req, g) if err != nil { @@ -375,9 +348,6 @@ func (s *GistsService) ListForks(ctx context.Context, id string) ([]*GistFork, * return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var gistForks []*GistFork resp, err := s.client.Do(ctx, req, &gistForks) if err != nil { diff --git a/vendor/github.com/google/go-github/github/git_blobs.go b/vendor/github.com/google/go-github/github/git_blobs.go index 5290c553..70aee14a 100644 --- a/vendor/github.com/google/go-github/github/git_blobs.go +++ b/vendor/github.com/google/go-github/github/git_blobs.go @@ -31,9 +31,6 @@ func (s *GitService) GetBlob(ctx context.Context, owner string, repo string, sha return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - blob := new(Blob) resp, err := s.client.Do(ctx, req, blob) return blob, resp, err @@ -66,9 +63,6 @@ func (s *GitService) CreateBlob(ctx context.Context, owner string, repo string, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - t := new(Blob) resp, err := s.client.Do(ctx, req, t) return t, resp, err diff --git a/vendor/github.com/google/go-github/github/git_commits.go b/vendor/github.com/google/go-github/github/git_commits.go index 29882569..1eb48a8e 100644 --- a/vendor/github.com/google/go-github/github/git_commits.go +++ b/vendor/github.com/google/go-github/github/git_commits.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" "time" ) @@ -70,8 +69,7 @@ func (s *GitService) GetCommit(ctx context.Context, owner string, repo string, s } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeGitSigningPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeGitSigningPreview) c := new(Commit) resp, err := s.client.Do(ctx, req, c) @@ -126,9 +124,6 @@ func (s *GitService) CreateCommit(ctx context.Context, owner string, repo string return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - c := new(Commit) resp, err := s.client.Do(ctx, req, c) if err != nil { diff --git a/vendor/github.com/google/go-github/github/git_refs.go b/vendor/github.com/google/go-github/github/git_refs.go index 0947d866..3b2ced23 100644 --- a/vendor/github.com/google/go-github/github/git_refs.go +++ b/vendor/github.com/google/go-github/github/git_refs.go @@ -63,9 +63,6 @@ func (s *GitService) GetRef(ctx context.Context, owner string, repo string, ref return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - r := new(Reference) resp, err := s.client.Do(ctx, req, r) if _, ok := err.(*json.UnmarshalTypeError); ok { @@ -97,9 +94,6 @@ func (s *GitService) GetRefs(ctx context.Context, owner string, repo string, ref return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var rawJSON json.RawMessage resp, err := s.client.Do(ctx, req, &rawJSON) if err != nil { @@ -154,9 +148,6 @@ func (s *GitService) ListRefs(ctx context.Context, owner, repo string, opt *Refe return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var rs []*Reference resp, err := s.client.Do(ctx, req, &rs) if err != nil { @@ -180,9 +171,6 @@ func (s *GitService) CreateRef(ctx context.Context, owner string, repo string, r return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - r := new(Reference) resp, err := s.client.Do(ctx, req, r) if err != nil { @@ -206,9 +194,6 @@ func (s *GitService) UpdateRef(ctx context.Context, owner string, repo string, r return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - r := new(Reference) resp, err := s.client.Do(ctx, req, r) if err != nil { diff --git a/vendor/github.com/google/go-github/github/git_tags.go b/vendor/github.com/google/go-github/github/git_tags.go index f3822ffa..90398b38 100644 --- a/vendor/github.com/google/go-github/github/git_tags.go +++ b/vendor/github.com/google/go-github/github/git_tags.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" ) // Tag represents a tag object. @@ -45,8 +44,7 @@ func (s *GitService) GetTag(ctx context.Context, owner string, repo string, sha } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeGitSigningPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeGitSigningPreview) tag := new(Tag) resp, err := s.client.Do(ctx, req, tag) @@ -75,9 +73,6 @@ func (s *GitService) CreateTag(ctx context.Context, owner string, repo string, t return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - t := new(Tag) resp, err := s.client.Do(ctx, req, t) return t, resp, err diff --git a/vendor/github.com/google/go-github/github/git_trees.go b/vendor/github.com/google/go-github/github/git_trees.go index 4d6809a8..4bc29135 100644 --- a/vendor/github.com/google/go-github/github/git_trees.go +++ b/vendor/github.com/google/go-github/github/git_trees.go @@ -14,6 +14,12 @@ import ( type Tree struct { SHA *string `json:"sha,omitempty"` Entries []TreeEntry `json:"tree,omitempty"` + + // Truncated is true if the number of items in the tree + // exceeded GitHub's maximum limit and the Entries were truncated + // in the response. Only populated for requests that fetch + // trees like Git.GetTree. + Truncated *bool `json:"truncated,omitempty"` } func (t Tree) String() string { diff --git a/vendor/github.com/google/go-github/github/github-accessors.go b/vendor/github.com/google/go-github/github/github-accessors.go index 4d3e0bc1..8d03cc95 100644 --- a/vendor/github.com/google/go-github/github/github-accessors.go +++ b/vendor/github.com/google/go-github/github/github-accessors.go @@ -164,6 +164,14 @@ func (a *App) GetName() string { return *a.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (a *App) GetNodeID() string { + if a == nil || a.NodeID == nil { + return "" + } + return *a.NodeID +} + // GetOwner returns the Owner field. func (a *App) GetOwner() *User { if a == nil { @@ -2365,7 +2373,7 @@ func (d *DiscussionComment) GetNodeID() string { } // GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (d *DiscussionComment) GetNumber() int64 { +func (d *DiscussionComment) GetNumber() int { if d == nil || d.Number == nil { return 0 } @@ -3357,9 +3365,9 @@ func (i *Installation) GetAppID() int64 { } // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetCreatedAt() time.Time { +func (i *Installation) GetCreatedAt() Timestamp { if i == nil || i.CreatedAt == nil { - return time.Time{} + return Timestamp{} } return *i.CreatedAt } @@ -3429,9 +3437,9 @@ func (i *Installation) GetTargetType() string { } // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. -func (i *Installation) GetUpdatedAt() time.Time { +func (i *Installation) GetUpdatedAt() Timestamp { if i == nil || i.UpdatedAt == nil { - return time.Time{} + return Timestamp{} } return *i.UpdatedAt } @@ -3604,6 +3612,14 @@ func (i *Invitation) GetTeamCount() int { return *i.TeamCount } +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (i *Issue) GetActiveLockReason() string { + if i == nil || i.ActiveLockReason == nil { + return "" + } + return *i.ActiveLockReason +} + // GetAssignee returns the Assignee field. func (i *Issue) GetAssignee() *User { if i == nil { @@ -4004,6 +4020,14 @@ func (i *IssueEvent) GetLabel() *Label { return i.Label } +// GetLockReason returns the LockReason field if it's non-nil, zero value otherwise. +func (i *IssueEvent) GetLockReason() string { + if i == nil || i.LockReason == nil { + return "" + } + return *i.LockReason +} + // GetMilestone returns the Milestone field. func (i *IssueEvent) GetMilestone() *Milestone { if i == nil { @@ -6100,6 +6124,38 @@ func (p *Plan) GetSpace() int { return *p.Space } +// GetConfigURL returns the ConfigURL field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetConfigURL() string { + if p == nil || p.ConfigURL == nil { + return "" + } + return *p.ConfigURL +} + +// GetEnforcement returns the Enforcement field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetEnforcement() string { + if p == nil || p.Enforcement == nil { + return "" + } + return *p.Enforcement +} + +// GetID returns the ID field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetID() int64 { + if p == nil || p.ID == nil { + return 0 + } + return *p.ID +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *PreReceiveHook) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + // GetBody returns the Body field if it's non-nil, zero value otherwise. func (p *Project) GetBody() string { if p == nil || p.Body == nil { @@ -6108,6 +6164,14 @@ func (p *Project) GetBody() string { return *p.Body } +// GetColumnsURL returns the ColumnsURL field if it's non-nil, zero value otherwise. +func (p *Project) GetColumnsURL() string { + if p == nil || p.ColumnsURL == nil { + return "" + } + return *p.ColumnsURL +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (p *Project) GetCreatedAt() Timestamp { if p == nil || p.CreatedAt == nil { @@ -6124,6 +6188,14 @@ func (p *Project) GetCreator() *User { return p.Creator } +// GetHTMLURL returns the HTMLURL field if it's non-nil, zero value otherwise. +func (p *Project) GetHTMLURL() string { + if p == nil || p.HTMLURL == nil { + return "" + } + return *p.HTMLURL +} + // GetID returns the ID field if it's non-nil, zero value otherwise. func (p *Project) GetID() int64 { if p == nil || p.ID == nil { @@ -6164,6 +6236,14 @@ func (p *Project) GetOwnerURL() string { return *p.OwnerURL } +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *Project) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + // GetUpdatedAt returns the UpdatedAt field if it's non-nil, zero value otherwise. func (p *Project) GetUpdatedAt() Timestamp { if p == nil || p.UpdatedAt == nil { @@ -6348,6 +6428,14 @@ func (p *ProjectCardOptions) GetArchived() bool { return *p.Archived } +// GetCardsURL returns the CardsURL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetCardsURL() string { + if p == nil || p.CardsURL == nil { + return "" + } + return *p.CardsURL +} + // GetCreatedAt returns the CreatedAt field if it's non-nil, zero value otherwise. func (p *ProjectColumn) GetCreatedAt() Timestamp { if p == nil || p.CreatedAt == nil { @@ -6396,6 +6484,14 @@ func (p *ProjectColumn) GetUpdatedAt() Timestamp { return *p.UpdatedAt } +// GetURL returns the URL field if it's non-nil, zero value otherwise. +func (p *ProjectColumn) GetURL() string { + if p == nil || p.URL == nil { + return "" + } + return *p.URL +} + // GetAction returns the Action field if it's non-nil, zero value otherwise. func (p *ProjectColumnEvent) GetAction() string { if p == nil || p.Action == nil { @@ -6516,6 +6612,46 @@ func (p *ProjectEvent) GetSender() *User { return p.Sender } +// GetBody returns the Body field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetBody() string { + if p == nil || p.Body == nil { + return "" + } + return *p.Body +} + +// GetName returns the Name field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetName() string { + if p == nil || p.Name == nil { + return "" + } + return *p.Name +} + +// GetOrganizationPermission returns the OrganizationPermission field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetOrganizationPermission() string { + if p == nil || p.OrganizationPermission == nil { + return "" + } + return *p.OrganizationPermission +} + +// GetPublic returns the Public field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetPublic() bool { + if p == nil || p.Public == nil { + return false + } + return *p.Public +} + +// GetState returns the State field if it's non-nil, zero value otherwise. +func (p *ProjectOptions) GetState() string { + if p == nil || p.State == nil { + return "" + } + return *p.State +} + // GetEnforceAdmins returns the EnforceAdmins field. func (p *Protection) GetEnforceAdmins() *AdminEnforcement { if p == nil { @@ -6596,6 +6732,14 @@ func (p *PublicEvent) GetSender() *User { return p.Sender } +// GetActiveLockReason returns the ActiveLockReason field if it's non-nil, zero value otherwise. +func (p *PullRequest) GetActiveLockReason() string { + if p == nil || p.ActiveLockReason == nil { + return "" + } + return *p.ActiveLockReason +} + // GetAdditions returns the Additions field if it's non-nil, zero value otherwise. func (p *PullRequest) GetAdditions() int { if p == nil || p.Additions == nil { @@ -7868,6 +8012,14 @@ func (p *PushEventRepository) GetName() string { return *p.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (p *PushEventRepository) GetNodeID() string { + if p == nil || p.NodeID == nil { + return "" + } + return *p.NodeID +} + // GetOpenIssuesCount returns the OpenIssuesCount field if it's non-nil, zero value otherwise. func (p *PushEventRepository) GetOpenIssuesCount() int { if p == nil || p.OpenIssuesCount == nil { @@ -7885,7 +8037,7 @@ func (p *PushEventRepository) GetOrganization() string { } // GetOwner returns the Owner field. -func (p *PushEventRepository) GetOwner() *PushEventRepoOwner { +func (p *PushEventRepository) GetOwner() *User { if p == nil { return nil } @@ -8740,6 +8892,14 @@ func (r *Repository) GetNetworkCount() int { return *r.NetworkCount } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (r *Repository) GetNodeID() string { + if r == nil || r.NodeID == nil { + return "" + } + return *r.NodeID +} + // GetNotificationsURL returns the NotificationsURL field if it's non-nil, zero value otherwise. func (r *Repository) GetNotificationsURL() string { if r == nil || r.NotificationsURL == nil { @@ -9764,6 +9924,14 @@ func (r *RepoStatus) GetURL() string { return *r.URL } +// GetStrict returns the Strict field if it's non-nil, zero value otherwise. +func (r *RequiredStatusChecksRequest) GetStrict() bool { + if r == nil || r.Strict == nil { + return false + } + return *r.Strict +} + // GetName returns the Name field if it's non-nil, zero value otherwise. func (s *ServiceHook) GetName() string { if s == nil || s.Name == nil { @@ -10325,7 +10493,7 @@ func (t *TeamDiscussion) GetBodyVersion() string { } // GetCommentsCount returns the CommentsCount field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetCommentsCount() int64 { +func (t *TeamDiscussion) GetCommentsCount() int { if t == nil || t.CommentsCount == nil { return 0 } @@ -10373,7 +10541,7 @@ func (t *TeamDiscussion) GetNodeID() string { } // GetNumber returns the Number field if it's non-nil, zero value otherwise. -func (t *TeamDiscussion) GetNumber() int64 { +func (t *TeamDiscussion) GetNumber() int { if t == nil || t.Number == nil { return 0 } @@ -10812,6 +10980,14 @@ func (t *Tree) GetSHA() string { return *t.SHA } +// GetTruncated returns the Truncated field if it's non-nil, zero value otherwise. +func (t *Tree) GetTruncated() bool { + if t == nil || t.Truncated == nil { + return false + } + return *t.Truncated +} + // GetContent returns the Content field if it's non-nil, zero value otherwise. func (t *TreeEntry) GetContent() string { if t == nil || t.Content == nil { @@ -11100,6 +11276,14 @@ func (u *User) GetName() string { return *u.Name } +// GetNodeID returns the NodeID field if it's non-nil, zero value otherwise. +func (u *User) GetNodeID() string { + if u == nil || u.NodeID == nil { + return "" + } + return *u.NodeID +} + // GetOrganizationsURL returns the OrganizationsURL field if it's non-nil, zero value otherwise. func (u *User) GetOrganizationsURL() string { if u == nil || u.OrganizationsURL == nil { @@ -11236,6 +11420,22 @@ func (u *User) GetURL() string { return *u.URL } +// GetMessage returns the Message field if it's non-nil, zero value otherwise. +func (u *UserContext) GetMessage() string { + if u == nil || u.Message == nil { + return "" + } + return *u.Message +} + +// GetOcticon returns the Octicon field if it's non-nil, zero value otherwise. +func (u *UserContext) GetOcticon() string { + if u == nil || u.Octicon == nil { + return "" + } + return *u.Octicon +} + // GetEmail returns the Email field if it's non-nil, zero value otherwise. func (u *UserEmail) GetEmail() string { if u == nil || u.Email == nil { diff --git a/vendor/github.com/google/go-github/github/github.go b/vendor/github.com/google/go-github/github/github.go index 3fa46c30..0685852d 100644 --- a/vendor/github.com/google/go-github/github/github.go +++ b/vendor/github.com/google/go-github/github/github.go @@ -48,9 +48,6 @@ const ( // https://developer.github.com/changes/2014-12-09-new-attributes-for-stars-api/ mediaTypeStarringPreview = "application/vnd.github.v3.star+json" - // https://developer.github.com/changes/2015-11-11-protected-branches-api/ - mediaTypeProtectedBranchesPreview = "application/vnd.github.loki-preview+json" - // https://help.github.com/enterprise/2.4/admin/guides/migrations/exporting-the-github-com-organization-s-repositories/ mediaTypeMigrationsPreview = "application/vnd.github.wyandotte-preview+json" @@ -102,20 +99,29 @@ const ( // https://developer.github.com/changes/2017-11-09-repository-transfer-api-preview/ mediaTypeRepositoryTransferPreview = "application/vnd.github.nightshade-preview+json" - // https://developer.github.com/changes/2017-12-19-graphql-node-id/ - mediaTypeGraphQLNodeIDPreview = "application/vnd.github.jean-grey-preview+json" - // https://developer.github.com/changes/2018-01-25-organization-invitation-api-preview/ mediaTypeOrganizationInvitationPreview = "application/vnd.github.dazzler-preview+json" + // https://developer.github.com/changes/2018-03-16-protected-branches-required-approving-reviews/ + mediaTypeRequiredApprovingReviewsPreview = "application/vnd.github.luke-cage-preview+json" + // https://developer.github.com/changes/2018-02-22-label-description-search-preview/ mediaTypeLabelDescriptionSearchPreview = "application/vnd.github.symmetra-preview+json" // https://developer.github.com/changes/2018-02-07-team-discussions-api/ mediaTypeTeamDiscussionsPreview = "application/vnd.github.echo-preview+json" + // https://developer.github.com/changes/2018-03-21-hovercard-api-preview/ + mediaTypeHovercardPreview = "application/vnd.github.hagar-preview+json" + + // https://developer.github.com/changes/2018-01-10-lock-reason-api-preview/ + mediaTypeLockReasonPreview = "application/vnd.github.sailor-v-preview+json" + // https://developer.github.com/changes/2018-05-07-new-checks-api-public-beta/ mediaTypeCheckRunsPreview = "application/vnd.github.antiope-preview+json" + + // https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/ + mediaTypePreReceiveHooksPreview = "application/vnd.github.eye-scream-preview" ) // A Client manages communication with the GitHub API. diff --git a/vendor/github.com/google/go-github/github/issues.go b/vendor/github.com/google/go-github/github/issues.go index ded07f0a..47537544 100644 --- a/vendor/github.com/google/go-github/github/issues.go +++ b/vendor/github.com/google/go-github/github/issues.go @@ -56,6 +56,10 @@ type Issue struct { // TextMatches is only populated from search results that request text matches // See: search.go and https://developer.github.com/v3/search/#text-match-metadata TextMatches []TextMatch `json:"text_matches,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the issue. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` } func (i Issue) String() string { @@ -156,7 +160,7 @@ func (s *IssuesService) listIssues(ctx context.Context, u string, opt *IssueList } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var issues []*Issue @@ -224,7 +228,7 @@ func (s *IssuesService) ListByRepo(ctx context.Context, owner string, repo strin } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var issues []*Issue @@ -247,7 +251,7 @@ func (s *IssuesService) Get(ctx context.Context, owner string, repo string, numb } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) issue := new(Issue) @@ -270,8 +274,7 @@ func (s *IssuesService) Create(ctx context.Context, owner string, repo string, i } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) i := new(Issue) resp, err := s.client.Do(ctx, req, i) @@ -293,8 +296,7 @@ func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, num } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) i := new(Issue) resp, err := s.client.Do(ctx, req, i) @@ -305,16 +307,29 @@ func (s *IssuesService) Edit(ctx context.Context, owner string, repo string, num return i, resp, nil } +// LockIssueOptions specifies the optional parameters to the +// IssuesService.Lock method. +type LockIssueOptions struct { + // LockReason specifies the reason to lock this issue. + // Providing a lock reason can help make it clearer to contributors why an issue + // was locked. Possible values are: "off-topic", "too heated", "resolved", and "spam". + LockReason string `json:"lock_reason,omitempty"` +} + // Lock an issue's conversation. // // GitHub API docs: https://developer.github.com/v3/issues/#lock-an-issue -func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int) (*Response, error) { +func (s *IssuesService) Lock(ctx context.Context, owner string, repo string, number int, opt *LockIssueOptions) (*Response, error) { u := fmt.Sprintf("repos/%v/%v/issues/%d/lock", owner, repo, number) - req, err := s.client.NewRequest("PUT", u, nil) + req, err := s.client.NewRequest("PUT", u, opt) if err != nil { return nil, err } + if opt != nil { + req.Header.Set("Accept", mediaTypeLockReasonPreview) + } + return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/issues_events.go b/vendor/github.com/google/go-github/github/issues_events.go index 55e6d431..f71e4636 100644 --- a/vendor/github.com/google/go-github/github/issues_events.go +++ b/vendor/github.com/google/go-github/github/issues_events.go @@ -34,9 +34,13 @@ type IssueEvent struct { // The Actor committed to master a commit mentioning the issue in its commit message. // CommitID holds the SHA1 of the commit. // - // reopened, locked, unlocked + // reopened, unlocked // The Actor did that to the issue. // + // locked + // The Actor locked the issue. + // LockReason holds the reason of locking the issue (if provided while locking). + // // renamed // The Actor changed the issue title from Rename.From to Rename.To. // @@ -64,12 +68,13 @@ type IssueEvent struct { Issue *Issue `json:"issue,omitempty"` // Only present on certain events; see above. - Assignee *User `json:"assignee,omitempty"` - Assigner *User `json:"assigner,omitempty"` - CommitID *string `json:"commit_id,omitempty"` - Milestone *Milestone `json:"milestone,omitempty"` - Label *Label `json:"label,omitempty"` - Rename *Rename `json:"rename,omitempty"` + Assignee *User `json:"assignee,omitempty"` + Assigner *User `json:"assigner,omitempty"` + CommitID *string `json:"commit_id,omitempty"` + Milestone *Milestone `json:"milestone,omitempty"` + Label *Label `json:"label,omitempty"` + Rename *Rename `json:"rename,omitempty"` + LockReason *string `json:"lock_reason,omitempty"` } // ListIssueEvents lists events for the specified issue. @@ -87,6 +92,8 @@ func (s *IssuesService) ListIssueEvents(ctx context.Context, owner, repo string, return nil, nil, err } + req.Header.Set("Accept", mediaTypeLockReasonPreview) + var events []*IssueEvent resp, err := s.client.Do(ctx, req, &events) if err != nil { diff --git a/vendor/github.com/google/go-github/github/issues_labels.go b/vendor/github.com/google/go-github/github/issues_labels.go index 4328997b..adcbe068 100644 --- a/vendor/github.com/google/go-github/github/issues_labels.go +++ b/vendor/github.com/google/go-github/github/issues_labels.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" ) // Label represents a GitHub label on an Issue @@ -42,8 +41,7 @@ func (s *IssuesService) ListLabels(ctx context.Context, owner string, repo strin } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) var labels []*Label resp, err := s.client.Do(ctx, req, &labels) @@ -65,8 +63,7 @@ func (s *IssuesService) GetLabel(ctx context.Context, owner string, repo string, } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) label := new(Label) resp, err := s.client.Do(ctx, req, label) @@ -88,8 +85,7 @@ func (s *IssuesService) CreateLabel(ctx context.Context, owner string, repo stri } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) l := new(Label) resp, err := s.client.Do(ctx, req, l) @@ -111,8 +107,7 @@ func (s *IssuesService) EditLabel(ctx context.Context, owner string, repo string } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) l := new(Label) resp, err := s.client.Do(ctx, req, l) @@ -151,8 +146,7 @@ func (s *IssuesService) ListLabelsByIssue(ctx context.Context, owner string, rep } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) var labels []*Label resp, err := s.client.Do(ctx, req, &labels) @@ -174,8 +168,7 @@ func (s *IssuesService) AddLabelsToIssue(ctx context.Context, owner string, repo } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) var l []*Label resp, err := s.client.Do(ctx, req, &l) @@ -213,8 +206,7 @@ func (s *IssuesService) ReplaceLabelsForIssue(ctx context.Context, owner string, } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) var l []*Label resp, err := s.client.Do(ctx, req, &l) @@ -257,8 +249,7 @@ func (s *IssuesService) ListLabelsForMilestone(ctx context.Context, owner string } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) var labels []*Label resp, err := s.client.Do(ctx, req, &labels) diff --git a/vendor/github.com/google/go-github/github/issues_milestones.go b/vendor/github.com/google/go-github/github/issues_milestones.go index 6af1cc03..ffe9aae1 100644 --- a/vendor/github.com/google/go-github/github/issues_milestones.go +++ b/vendor/github.com/google/go-github/github/issues_milestones.go @@ -68,9 +68,6 @@ func (s *IssuesService) ListMilestones(ctx context.Context, owner string, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var milestones []*Milestone resp, err := s.client.Do(ctx, req, &milestones) if err != nil { @@ -90,9 +87,6 @@ func (s *IssuesService) GetMilestone(ctx context.Context, owner string, repo str return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - milestone := new(Milestone) resp, err := s.client.Do(ctx, req, milestone) if err != nil { @@ -112,9 +106,6 @@ func (s *IssuesService) CreateMilestone(ctx context.Context, owner string, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - m := new(Milestone) resp, err := s.client.Do(ctx, req, m) if err != nil { @@ -134,9 +125,6 @@ func (s *IssuesService) EditMilestone(ctx context.Context, owner string, repo st return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - m := new(Milestone) resp, err := s.client.Do(ctx, req, m) if err != nil { diff --git a/vendor/github.com/google/go-github/github/orgs.go b/vendor/github.com/google/go-github/github/orgs.go index 78320535..044dff57 100644 --- a/vendor/github.com/google/go-github/github/orgs.go +++ b/vendor/github.com/google/go-github/github/orgs.go @@ -21,6 +21,7 @@ type OrganizationsService service type Organization struct { Login *string `json:"login,omitempty"` ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` AvatarURL *string `json:"avatar_url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` Name *string `json:"name,omitempty"` @@ -43,7 +44,6 @@ type Organization struct { BillingEmail *string `json:"billing_email,omitempty"` Type *string `json:"type,omitempty"` Plan *Plan `json:"plan,omitempty"` - NodeID *string `json:"node_id,omitempty"` // API URLs URL *string `json:"url,omitempty"` @@ -101,9 +101,6 @@ func (s *OrganizationsService) ListAll(ctx context.Context, opt *OrganizationsLi return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - orgs := []*Organization{} resp, err := s.client.Do(ctx, req, &orgs) if err != nil { @@ -133,9 +130,6 @@ func (s *OrganizationsService) List(ctx context.Context, user string, opt *ListO return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var orgs []*Organization resp, err := s.client.Do(ctx, req, &orgs) if err != nil { @@ -155,9 +149,6 @@ func (s *OrganizationsService) Get(ctx context.Context, org string) (*Organizati return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - organization := new(Organization) resp, err := s.client.Do(ctx, req, organization) if err != nil { @@ -177,9 +168,6 @@ func (s *OrganizationsService) GetByID(ctx context.Context, id int64) (*Organiza return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - organization := new(Organization) resp, err := s.client.Do(ctx, req, organization) if err != nil { @@ -199,9 +187,6 @@ func (s *OrganizationsService) Edit(ctx context.Context, name string, org *Organ return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - o := new(Organization) resp, err := s.client.Do(ctx, req, o) if err != nil { diff --git a/vendor/github.com/google/go-github/github/orgs_members.go b/vendor/github.com/google/go-github/github/orgs_members.go index 98e138e7..d1843599 100644 --- a/vendor/github.com/google/go-github/github/orgs_members.go +++ b/vendor/github.com/google/go-github/github/orgs_members.go @@ -59,7 +59,7 @@ type ListMembersOptions struct { // Possible values are: // all - all members of the organization, regardless of role // admin - organization owners - // member - non-organization members + // member - non-owner organization members // // Default is "all". Role string `url:"role,omitempty"` diff --git a/vendor/github.com/google/go-github/github/orgs_teams.go b/vendor/github.com/google/go-github/github/orgs_teams.go deleted file mode 100644 index b3cc9f07..00000000 --- a/vendor/github.com/google/go-github/github/orgs_teams.go +++ /dev/null @@ -1,514 +0,0 @@ -// Copyright 2013 The go-github AUTHORS. All rights reserved. -// -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package github - -import ( - "context" - "fmt" - "strings" - "time" -) - -// Team represents a team within a GitHub organization. Teams are used to -// manage access to an organization's repositories. -type Team struct { - ID *int64 `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Description *string `json:"description,omitempty"` - URL *string `json:"url,omitempty"` - Slug *string `json:"slug,omitempty"` - - // Permission specifies the default permission for repositories owned by the team. - Permission *string `json:"permission,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - MembersCount *int `json:"members_count,omitempty"` - ReposCount *int `json:"repos_count,omitempty"` - Organization *Organization `json:"organization,omitempty"` - MembersURL *string `json:"members_url,omitempty"` - RepositoriesURL *string `json:"repositories_url,omitempty"` - Parent *Team `json:"parent,omitempty"` - - // LDAPDN is only available in GitHub Enterprise and when the team - // membership is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (t Team) String() string { - return Stringify(t) -} - -// Invitation represents a team member's invitation status. -type Invitation struct { - ID *int64 `json:"id,omitempty"` - Login *string `json:"login,omitempty"` - Email *string `json:"email,omitempty"` - // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. - Role *string `json:"role,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - Inviter *User `json:"inviter,omitempty"` - TeamCount *int `json:"team_count,omitempty"` - InvitationTeamURL *string `json:"invitation_team_url,omitempty"` -} - -func (i Invitation) String() string { - return Stringify(i) -} - -// ListTeams lists all of the teams for an organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-teams -func (s *OrganizationsService) ListTeams(ctx context.Context, org string, opt *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeam fetches a team by ID. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team -func (s *OrganizationsService) GetTeam(ctx context.Context, team int64) (*Team, *Response, error) { - u := fmt.Sprintf("teams/%v", team) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// NewTeam represents a team to be created or modified. -type NewTeam struct { - Name string `json:"name"` // Name of the team. (Required.) - Description *string `json:"description,omitempty"` - Maintainers []string `json:"maintainers,omitempty"` - RepoNames []string `json:"repo_names,omitempty"` - ParentTeamID *int64 `json:"parent_team_id,omitempty"` - - // Deprecated: Permission is deprecated when creating or editing a team in an org - // using the new GitHub permission model. It no longer identifies the - // permission a team has on its repos, but only specifies the default - // permission a repo is initially added with. Avoid confusion by - // specifying a permission value when calling AddTeamRepo. - Permission *string `json:"permission,omitempty"` - - // Privacy identifies the level of privacy this team should have. - // Possible values are: - // secret - only visible to organization owners and members of this team - // closed - visible to all members of this organization - // Default is "secret". - Privacy *string `json:"privacy,omitempty"` - - // LDAPDN may be used in GitHub Enterprise when the team membership - // is synchronized with LDAP. - LDAPDN *string `json:"ldap_dn,omitempty"` -} - -func (s NewTeam) String() string { - return Stringify(s) -} - -// CreateTeam creates a new team within an organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#create-team -func (s *OrganizationsService) CreateTeam(ctx context.Context, org string, team *NewTeam) (*Team, *Response, error) { - u := fmt.Sprintf("orgs/%v/teams", org) - req, err := s.client.NewRequest("POST", u, team) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// EditTeam edits a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#edit-team -func (s *OrganizationsService) EditTeam(ctx context.Context, id int64, team *NewTeam) (*Team, *Response, error) { - u := fmt.Sprintf("teams/%v", id) - req, err := s.client.NewRequest("PATCH", u, team) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - t := new(Team) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// DeleteTeam deletes a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#delete-team -func (s *OrganizationsService) DeleteTeam(ctx context.Context, team int64) (*Response, error) { - u := fmt.Sprintf("teams/%v", team) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - return s.client.Do(ctx, req, nil) -} - -// OrganizationListTeamMembersOptions specifies the optional parameters to the -// OrganizationsService.ListTeamMembers method. -type OrganizationListTeamMembersOptions struct { - // Role filters members returned by their role in the team. Possible - // values are "all", "member", "maintainer". Default is "all". - Role string `url:"role,omitempty"` - - ListOptions -} - -// ListChildTeams lists child teams for a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-child-teams -func (s *OrganizationsService) ListChildTeams(ctx context.Context, teamID int64, opt *ListOptions) ([]*Team, *Response, error) { - u := fmt.Sprintf("teams/%v/teams", teamID) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// ListTeamMembers lists all of the users who are members of the specified -// team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-team-members -func (s *OrganizationsService) ListTeamMembers(ctx context.Context, team int64, opt *OrganizationListTeamMembersOptions) ([]*User, *Response, error) { - u := fmt.Sprintf("teams/%v/members", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - var members []*User - resp, err := s.client.Do(ctx, req, &members) - if err != nil { - return nil, resp, err - } - - return members, resp, nil -} - -// IsTeamMember checks if a user is a member of the specified team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team-member -// -// Deprecated: This API has been marked as deprecated in the Github API docs, -// OrganizationsService.GetTeamMembership method should be used instead. -func (s *OrganizationsService) IsTeamMember(ctx context.Context, team int64, user string) (bool, *Response, error) { - u := fmt.Sprintf("teams/%v/members/%v", team, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return false, nil, err - } - - resp, err := s.client.Do(ctx, req, nil) - member, err := parseBoolResponse(err) - return member, resp, err -} - -// ListTeamRepos lists the repositories that the specified team has access to. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-team-repos -func (s *OrganizationsService) ListTeamRepos(ctx context.Context, team int64, opt *ListOptions) ([]*Repository, *Response, error) { - u := fmt.Sprintf("teams/%v/repos", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when topics API fully launches. - headers := []string{mediaTypeTopicsPreview, mediaTypeNestedTeamsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - var repos []*Repository - resp, err := s.client.Do(ctx, req, &repos) - if err != nil { - return nil, resp, err - } - - return repos, resp, nil -} - -// IsTeamRepo checks if a team manages the specified repository. If the -// repository is managed by team, a Repository is returned which includes the -// permissions team has for that repo. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#check-if-a-team-manages-a-repository -func (s *OrganizationsService) IsTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Repository, *Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - headers := []string{mediaTypeOrgPermissionRepo, mediaTypeNestedTeamsPreview} - req.Header.Set("Accept", strings.Join(headers, ", ")) - - repository := new(Repository) - resp, err := s.client.Do(ctx, req, repository) - if err != nil { - return nil, resp, err - } - - return repository, resp, nil -} - -// OrganizationAddTeamRepoOptions specifies the optional parameters to the -// OrganizationsService.AddTeamRepo method. -type OrganizationAddTeamRepoOptions struct { - // Permission specifies the permission to grant the team on this repository. - // Possible values are: - // pull - team members can pull, but not push to or administer this repository - // push - team members can pull and push, but not administer this repository - // admin - team members can pull, push and administer this repository - // - // If not specified, the team's permission attribute will be used. - Permission string `json:"permission,omitempty"` -} - -// AddTeamRepo adds a repository to be managed by the specified team. The -// specified repository must be owned by the organization to which the team -// belongs, or a direct fork of a repository owned by the organization. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#add-team-repo -func (s *OrganizationsService) AddTeamRepo(ctx context.Context, team int64, owner string, repo string, opt *OrganizationAddTeamRepoOptions) (*Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("PUT", u, opt) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// RemoveTeamRepo removes a repository from being managed by the specified -// team. Note that this does not delete the repository, it just removes it -// from the team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#remove-team-repo -func (s *OrganizationsService) RemoveTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Response, error) { - u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListUserTeams lists a user's teams -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-user-teams -func (s *OrganizationsService) ListUserTeams(ctx context.Context, opt *ListOptions) ([]*Team, *Response, error) { - u := "user/teams" - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - var teams []*Team - resp, err := s.client.Do(ctx, req, &teams) - if err != nil { - return nil, resp, err - } - - return teams, resp, nil -} - -// GetTeamMembership returns the membership status for a user in a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#get-team-membership -func (s *OrganizationsService) GetTeamMembership(ctx context.Context, team int64, user string) (*Membership, *Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - req.Header.Set("Accept", mediaTypeNestedTeamsPreview) - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// OrganizationAddTeamMembershipOptions does stuff specifies the optional -// parameters to the OrganizationsService.AddTeamMembership method. -type OrganizationAddTeamMembershipOptions struct { - // Role specifies the role the user should have in the team. Possible - // values are: - // member - a normal member of the team - // maintainer - a team maintainer. Able to add/remove other team - // members, promote other team members to team - // maintainer, and edit the team’s name and description - // - // Default value is "member". - Role string `json:"role,omitempty"` -} - -// AddTeamMembership adds or invites a user to a team. -// -// In order to add a membership between a user and a team, the authenticated -// user must have 'admin' permissions to the team or be an owner of the -// organization that the team is associated with. -// -// If the user is already a part of the team's organization (meaning they're on -// at least one other team in the organization), this endpoint will add the -// user to the team. -// -// If the user is completely unaffiliated with the team's organization (meaning -// they're on none of the organization's teams), this endpoint will send an -// invitation to the user via email. This newly-created membership will be in -// the "pending" state until the user accepts the invitation, at which point -// the membership will transition to the "active" state and the user will be -// added as a member of the team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#add-team-membership -func (s *OrganizationsService) AddTeamMembership(ctx context.Context, team int64, user string, opt *OrganizationAddTeamMembershipOptions) (*Membership, *Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("PUT", u, opt) - if err != nil { - return nil, nil, err - } - - t := new(Membership) - resp, err := s.client.Do(ctx, req, t) - if err != nil { - return nil, resp, err - } - - return t, resp, nil -} - -// RemoveTeamMembership removes a user from a team. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#remove-team-membership -func (s *OrganizationsService) RemoveTeamMembership(ctx context.Context, team int64, user string) (*Response, error) { - u := fmt.Sprintf("teams/%v/memberships/%v", team, user) - req, err := s.client.NewRequest("DELETE", u, nil) - if err != nil { - return nil, err - } - - return s.client.Do(ctx, req, nil) -} - -// ListPendingTeamInvitations get pending invitaion list in team. -// Warning: The API may change without advance notice during the preview period. -// Preview features are not supported for production use. -// -// GitHub API docs: https://developer.github.com/v3/orgs/teams/#list-pending-team-invitations -func (s *OrganizationsService) ListPendingTeamInvitations(ctx context.Context, team int64, opt *ListOptions) ([]*Invitation, *Response, error) { - u := fmt.Sprintf("teams/%v/invitations", team) - u, err := addOptions(u, opt) - if err != nil { - return nil, nil, err - } - - req, err := s.client.NewRequest("GET", u, nil) - if err != nil { - return nil, nil, err - } - - var pendingInvitations []*Invitation - resp, err := s.client.Do(ctx, req, &pendingInvitations) - if err != nil { - return nil, resp, err - } - - return pendingInvitations, resp, nil -} diff --git a/vendor/github.com/google/go-github/github/projects.go b/vendor/github.com/google/go-github/github/projects.go index 4df8a49e..76ef1e0a 100644 --- a/vendor/github.com/google/go-github/github/projects.go +++ b/vendor/github.com/google/go-github/github/projects.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" ) // ProjectsService provides access to the projects functions in the @@ -19,15 +18,18 @@ type ProjectsService service // Project represents a GitHub Project. type Project struct { - ID *int64 `json:"id,omitempty"` - URL *string `json:"url,omitempty"` - OwnerURL *string `json:"owner_url,omitempty"` - Name *string `json:"name,omitempty"` - Body *string `json:"body,omitempty"` - Number *int `json:"number,omitempty"` - CreatedAt *Timestamp `json:"created_at,omitempty"` - UpdatedAt *Timestamp `json:"updated_at,omitempty"` - NodeID *string `json:"node_id,omitempty"` + ID *int64 `json:"id,omitempty"` + URL *string `json:"url,omitempty"` + HTMLURL *string `json:"html_url,omitempty"` + ColumnsURL *string `json:"columns_url,omitempty"` + OwnerURL *string `json:"owner_url,omitempty"` + Name *string `json:"name,omitempty"` + Body *string `json:"body,omitempty"` + Number *int `json:"number,omitempty"` + State *string `json:"state,omitempty"` + CreatedAt *Timestamp `json:"created_at,omitempty"` + UpdatedAt *Timestamp `json:"updated_at,omitempty"` + NodeID *string `json:"node_id,omitempty"` // The User object that generated the project. Creator *User `json:"creator,omitempty"` @@ -48,8 +50,7 @@ func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, * } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} resp, err := s.client.Do(ctx, req, project) @@ -65,15 +66,24 @@ func (s *ProjectsService) GetProject(ctx context.Context, id int64) (*Project, * // ProjectsService.UpdateProject methods. type ProjectOptions struct { // The name of the project. (Required for creation; optional for update.) - Name string `json:"name,omitempty"` + Name *string `json:"name,omitempty"` // The body of the project. (Optional.) - Body string `json:"body,omitempty"` + Body *string `json:"body,omitempty"` // The following field(s) are only applicable for update. // They should be left with zero values for creation. // State of the project. Either "open" or "closed". (Optional.) - State string `json:"state,omitempty"` + State *string `json:"state,omitempty"` + // The permission level that all members of the project's organization + // will have on this project. + // Setting the organization permission is only available + // for organization projects. (Optional.) + OrganizationPermission *string `json:"organization_permission,omitempty"` + // Sets visibility of the project within the organization. + // Setting visibility is only available + // for organization projects.(Optional.) + Public *bool `json:"public,omitempty"` } // UpdateProject updates a repository project. @@ -87,8 +97,7 @@ func (s *ProjectsService) UpdateProject(ctx context.Context, id int64, opt *Proj } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} resp, err := s.client.Do(ctx, req, project) @@ -121,7 +130,9 @@ func (s *ProjectsService) DeleteProject(ctx context.Context, id int64) (*Respons type ProjectColumn struct { ID *int64 `json:"id,omitempty"` Name *string `json:"name,omitempty"` + URL *string `json:"url,omitempty"` ProjectURL *string `json:"project_url,omitempty"` + CardsURL *string `json:"cards_url,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` UpdatedAt *Timestamp `json:"updated_at,omitempty"` NodeID *string `json:"node_id,omitempty"` @@ -143,8 +154,7 @@ func (s *ProjectsService) ListProjectColumns(ctx context.Context, projectID int6 } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) columns := []*ProjectColumn{} resp, err := s.client.Do(ctx, req, &columns) @@ -166,8 +176,7 @@ func (s *ProjectsService) GetProjectColumn(ctx context.Context, id int64) (*Proj } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} resp, err := s.client.Do(ctx, req, column) @@ -197,8 +206,7 @@ func (s *ProjectsService) CreateProjectColumn(ctx context.Context, projectID int } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} resp, err := s.client.Do(ctx, req, column) @@ -220,8 +228,7 @@ func (s *ProjectsService) UpdateProjectColumn(ctx context.Context, columnID int6 } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) column := &ProjectColumn{} resp, err := s.client.Do(ctx, req, column) @@ -317,8 +324,7 @@ func (s *ProjectsService) ListProjectCards(ctx context.Context, columnID int64, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) cards := []*ProjectCard{} resp, err := s.client.Do(ctx, req, &cards) @@ -340,8 +346,7 @@ func (s *ProjectsService) GetProjectCard(ctx context.Context, columnID int64) (* } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} resp, err := s.client.Do(ctx, req, card) @@ -379,8 +384,7 @@ func (s *ProjectsService) CreateProjectCard(ctx context.Context, columnID int64, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} resp, err := s.client.Do(ctx, req, card) @@ -402,8 +406,7 @@ func (s *ProjectsService) UpdateProjectCard(ctx context.Context, cardID int64, o } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) card := &ProjectCard{} resp, err := s.client.Do(ctx, req, card) diff --git a/vendor/github.com/google/go-github/github/pulls.go b/vendor/github.com/google/go-github/github/pulls.go index 1f344690..a123ec56 100644 --- a/vendor/github.com/google/go-github/github/pulls.go +++ b/vendor/github.com/google/go-github/github/pulls.go @@ -62,6 +62,10 @@ type PullRequest struct { Head *PullRequestBranch `json:"head,omitempty"` Base *PullRequestBranch `json:"base,omitempty"` + + // ActiveLockReason is populated only when LockReason is provided while locking the pull request. + // Possible values are: "off-topic", "too heated", "resolved", and "spam". + ActiveLockReason *string `json:"active_lock_reason,omitempty"` } func (p PullRequest) String() string { @@ -119,7 +123,7 @@ func (s *PullRequestsService) List(ctx context.Context, owner string, repo strin } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) var pulls []*PullRequest @@ -142,7 +146,7 @@ func (s *PullRequestsService) Get(ctx context.Context, owner string, repo string } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) pull := new(PullRequest) @@ -201,8 +205,7 @@ func (s *PullRequestsService) Create(ctx context.Context, owner string, repo str } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeLabelDescriptionSearchPreview) p := new(PullRequest) resp, err := s.client.Do(ctx, req, p) @@ -251,7 +254,7 @@ func (s *PullRequestsService) Edit(ctx context.Context, owner string, repo strin } // TODO: remove custom Accept header when this API fully launches. - acceptHeaders := []string{mediaTypeGraphQLNodeIDPreview, mediaTypeLabelDescriptionSearchPreview} + acceptHeaders := []string{mediaTypeLabelDescriptionSearchPreview, mediaTypeLockReasonPreview} req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) p := new(PullRequest) diff --git a/vendor/github.com/google/go-github/github/reactions.go b/vendor/github.com/google/go-github/github/reactions.go index 19b533f3..97b2818c 100644 --- a/vendor/github.com/google/go-github/github/reactions.go +++ b/vendor/github.com/google/go-github/github/reactions.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" ) // ReactionsService provides access to the reactions-related functions in the @@ -61,8 +60,7 @@ func (s *ReactionsService) ListCommentReactions(ctx context.Context, owner, repo } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction resp, err := s.client.Do(ctx, req, &m) @@ -88,8 +86,7 @@ func (s ReactionsService) CreateCommentReaction(ctx context.Context, owner, repo } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} resp, err := s.client.Do(ctx, req, m) @@ -116,8 +113,7 @@ func (s *ReactionsService) ListIssueReactions(ctx context.Context, owner, repo s } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction resp, err := s.client.Do(ctx, req, &m) @@ -143,8 +139,7 @@ func (s ReactionsService) CreateIssueReaction(ctx context.Context, owner, repo s } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} resp, err := s.client.Do(ctx, req, m) @@ -171,8 +166,7 @@ func (s *ReactionsService) ListIssueCommentReactions(ctx context.Context, owner, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction resp, err := s.client.Do(ctx, req, &m) @@ -198,8 +192,7 @@ func (s ReactionsService) CreateIssueCommentReaction(ctx context.Context, owner, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} resp, err := s.client.Do(ctx, req, m) @@ -226,8 +219,7 @@ func (s *ReactionsService) ListPullRequestCommentReactions(ctx context.Context, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) var m []*Reaction resp, err := s.client.Do(ctx, req, &m) @@ -253,8 +245,7 @@ func (s ReactionsService) CreatePullRequestCommentReaction(ctx context.Context, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeReactionsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeReactionsPreview) m := &Reaction{} resp, err := s.client.Do(ctx, req, m) diff --git a/vendor/github.com/google/go-github/github/repos.go b/vendor/github.com/google/go-github/github/repos.go index fce4ad54..fe05272a 100644 --- a/vendor/github.com/google/go-github/github/repos.go +++ b/vendor/github.com/google/go-github/github/repos.go @@ -20,6 +20,7 @@ type RepositoriesService service // Repository represents a GitHub repository. type Repository struct { ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` Owner *User `json:"owner,omitempty"` Name *string `json:"name,omitempty"` FullName *string `json:"full_name,omitempty"` @@ -553,6 +554,12 @@ type RequiredStatusChecks struct { Contexts []string `json:"contexts"` } +// RequiredStatusChecksRequest represents a request to edit a protected branch's status checks. +type RequiredStatusChecksRequest struct { + Strict *bool `json:"strict,omitempty"` + Contexts []string `json:"contexts,omitempty"` +} + // PullRequestReviewsEnforcement represents the pull request reviews enforcement of a protected branch. type PullRequestReviewsEnforcement struct { // Specifies which users and teams can dismiss pull request reviews. @@ -561,6 +568,9 @@ type PullRequestReviewsEnforcement struct { DismissStaleReviews bool `json:"dismiss_stale_reviews"` // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // PullRequestReviewsEnforcementRequest represents request to set the pull request review @@ -575,6 +585,9 @@ type PullRequestReviewsEnforcementRequest struct { DismissStaleReviews bool `json:"dismiss_stale_reviews"` // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. RequireCodeOwnerReviews bool `json:"require_code_owner_reviews"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1-6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // PullRequestReviewsEnforcementUpdate represents request to patch the pull request review @@ -587,6 +600,9 @@ type PullRequestReviewsEnforcementUpdate struct { DismissStaleReviews *bool `json:"dismiss_stale_reviews,omitempty"` // RequireCodeOwnerReviews specifies if an approved review is required in pull requests including files with a designated code owner. RequireCodeOwnerReviews bool `json:"require_code_owner_reviews,omitempty"` + // RequiredApprovingReviewCount specifies the number of approvals required before the pull request can be merged. + // Valid values are 1 - 6. + RequiredApprovingReviewCount int `json:"required_approving_review_count"` } // AdminEnforcement represents the configuration to enforce required status checks for repository administrators. @@ -651,7 +667,7 @@ func (s *RepositoriesService) ListBranches(ctx context.Context, owner string, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) var branches []*Branch resp, err := s.client.Do(ctx, req, &branches) @@ -673,7 +689,7 @@ func (s *RepositoriesService) GetBranch(ctx context.Context, owner, repo, branch } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) b := new(Branch) resp, err := s.client.Do(ctx, req, b) @@ -695,7 +711,7 @@ func (s *RepositoriesService) GetBranchProtection(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(Protection) resp, err := s.client.Do(ctx, req, p) @@ -717,7 +733,7 @@ func (s *RepositoriesService) GetRequiredStatusChecks(ctx context.Context, owner } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(RequiredStatusChecks) resp, err := s.client.Do(ctx, req, p) @@ -739,7 +755,7 @@ func (s *RepositoriesService) ListRequiredStatusChecksContexts(ctx context.Conte } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) resp, err = s.client.Do(ctx, req, &contexts) if err != nil { @@ -760,7 +776,7 @@ func (s *RepositoriesService) UpdateBranchProtection(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) p := new(Protection) resp, err := s.client.Do(ctx, req, p) @@ -782,11 +798,30 @@ func (s *RepositoriesService) RemoveBranchProtection(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) return s.client.Do(ctx, req, nil) } +// UpdateRequiredStatusChecks updates the required status checks for a given protected branch. +// +// GitHub API docs: https://developer.github.com/v3/repos/branches/#update-required-status-checks-of-protected-branch +func (s *RepositoriesService) UpdateRequiredStatusChecks(ctx context.Context, owner, repo, branch string, sreq *RequiredStatusChecksRequest) (*RequiredStatusChecks, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/branches/%v/protection/required_status_checks", owner, repo, branch) + req, err := s.client.NewRequest("PATCH", u, sreq) + if err != nil { + return nil, nil, err + } + + sc := new(RequiredStatusChecks) + resp, err := s.client.Do(ctx, req, sc) + if err != nil { + return nil, resp, err + } + + return sc, resp, nil +} + // License gets the contents of a repository's license if one is detected. // // GitHub API docs: https://developer.github.com/v3/licenses/#get-the-contents-of-a-repositorys-license @@ -817,7 +852,7 @@ func (s *RepositoriesService) GetPullRequestReviewEnforcement(ctx context.Contex } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -840,7 +875,7 @@ func (s *RepositoriesService) UpdatePullRequestReviewEnforcement(ctx context.Con } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -868,7 +903,7 @@ func (s *RepositoriesService) DisableDismissalRestrictions(ctx context.Context, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(PullRequestReviewsEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -890,7 +925,7 @@ func (s *RepositoriesService) RemovePullRequestReviewEnforcement(ctx context.Con } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) return s.client.Do(ctx, req, nil) } @@ -906,7 +941,7 @@ func (s *RepositoriesService) GetAdminEnforcement(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(AdminEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -929,7 +964,7 @@ func (s *RepositoriesService) AddAdminEnforcement(ctx context.Context, owner, re } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) r := new(AdminEnforcement) resp, err := s.client.Do(ctx, req, r) @@ -951,7 +986,7 @@ func (s *RepositoriesService) RemoveAdminEnforcement(ctx context.Context, owner, } // TODO: remove custom Accept header when this API fully launches - req.Header.Set("Accept", mediaTypeProtectedBranchesPreview) + req.Header.Set("Accept", mediaTypeRequiredApprovingReviewsPreview) return s.client.Do(ctx, req, nil) } diff --git a/vendor/github.com/google/go-github/github/repos_deployments.go b/vendor/github.com/google/go-github/github/repos_deployments.go index 1300f05e..794c3232 100644 --- a/vendor/github.com/google/go-github/github/repos_deployments.go +++ b/vendor/github.com/google/go-github/github/repos_deployments.go @@ -9,7 +9,6 @@ import ( "context" "encoding/json" "fmt" - "strings" ) // Deployment represents a deployment in a repo @@ -76,9 +75,6 @@ func (s *RepositoriesService) ListDeployments(ctx context.Context, owner, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var deployments []*Deployment resp, err := s.client.Do(ctx, req, &deployments) if err != nil { @@ -99,9 +95,6 @@ func (s *RepositoriesService) GetDeployment(ctx context.Context, owner, repo str return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - deployment := new(Deployment) resp, err := s.client.Do(ctx, req, deployment) if err != nil { @@ -123,8 +116,7 @@ func (s *RepositoriesService) CreateDeployment(ctx context.Context, owner, repo } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) d := new(Deployment) resp, err := s.client.Do(ctx, req, d) @@ -176,9 +168,6 @@ func (s *RepositoriesService) ListDeploymentStatuses(ctx context.Context, owner, return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var statuses []*DeploymentStatus resp, err := s.client.Do(ctx, req, &statuses) if err != nil { @@ -200,8 +189,7 @@ func (s *RepositoriesService) GetDeploymentStatus(ctx context.Context, owner, re } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) d := new(DeploymentStatus) resp, err := s.client.Do(ctx, req, d) @@ -224,8 +212,7 @@ func (s *RepositoriesService) CreateDeploymentStatus(ctx context.Context, owner, } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeDeploymentStatusPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeDeploymentStatusPreview) d := new(DeploymentStatus) resp, err := s.client.Do(ctx, req, d) diff --git a/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go b/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go new file mode 100644 index 00000000..cab09f74 --- /dev/null +++ b/vendor/github.com/google/go-github/github/repos_prereceive_hooks.go @@ -0,0 +1,110 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// PreReceiveHook represents a GitHub pre-receive hook for a repository. +type PreReceiveHook struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Enforcement *string `json:"enforcement,omitempty"` + ConfigURL *string `json:"configuration_url,omitempty"` +} + +func (p PreReceiveHook) String() string { + return Stringify(p) +} + +// ListPreReceiveHooks lists all pre-receive hooks for the specified repository. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#list-pre-receive-hooks +func (s *RepositoriesService) ListPreReceiveHooks(ctx context.Context, owner, repo string, opt *ListOptions) ([]*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks", owner, repo) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + var hooks []*PreReceiveHook + resp, err := s.client.Do(ctx, req, &hooks) + if err != nil { + return nil, resp, err + } + + return hooks, resp, nil +} + +// GetPreReceiveHook returns a single specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#get-a-single-pre-receive-hook +func (s *RepositoriesService) GetPreReceiveHook(ctx context.Context, owner, repo string, id int64) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// UpdatePreReceiveHook updates a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#update-pre-receive-hook-enforcement +func (s *RepositoriesService) UpdatePreReceiveHook(ctx context.Context, owner, repo string, id int64, hook *PreReceiveHook) (*PreReceiveHook, *Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("PATCH", u, hook) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + h := new(PreReceiveHook) + resp, err := s.client.Do(ctx, req, h) + if err != nil { + return nil, resp, err + } + + return h, resp, nil +} + +// DeletePreReceiveHook deletes a specified pre-receive hook. +// +// GitHub API docs: https://developer.github.com/enterprise/2.13/v3/repos/pre_receive_hooks/#remove-enforcement-overrides-for-a-pre-receive-hook +func (s *RepositoriesService) DeletePreReceiveHook(ctx context.Context, owner, repo string, id int64) (*Response, error) { + u := fmt.Sprintf("repos/%v/%v/pre-receive-hooks/%d", owner, repo, id) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypePreReceiveHooksPreview) + + return s.client.Do(ctx, req, nil) +} diff --git a/vendor/github.com/google/go-github/github/repos_projects.go b/vendor/github.com/google/go-github/github/repos_projects.go index 97a045f6..d6486d29 100644 --- a/vendor/github.com/google/go-github/github/repos_projects.go +++ b/vendor/github.com/google/go-github/github/repos_projects.go @@ -8,7 +8,6 @@ package github import ( "context" "fmt" - "strings" ) // ProjectListOptions specifies the optional parameters to the @@ -36,8 +35,7 @@ func (s *RepositoriesService) ListProjects(ctx context.Context, owner, repo stri } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) var projects []*Project resp, err := s.client.Do(ctx, req, &projects) @@ -59,8 +57,7 @@ func (s *RepositoriesService) CreateProject(ctx context.Context, owner, repo str } // TODO: remove custom Accept headers when APIs fully launch. - acceptHeaders := []string{mediaTypeProjectsPreview, mediaTypeGraphQLNodeIDPreview} - req.Header.Set("Accept", strings.Join(acceptHeaders, ", ")) + req.Header.Set("Accept", mediaTypeProjectsPreview) project := &Project{} resp, err := s.client.Do(ctx, req, project) diff --git a/vendor/github.com/google/go-github/github/repos_releases.go b/vendor/github.com/google/go-github/github/repos_releases.go index d5dfc702..c23601d1 100644 --- a/vendor/github.com/google/go-github/github/repos_releases.go +++ b/vendor/github.com/google/go-github/github/repos_releases.go @@ -79,9 +79,6 @@ func (s *RepositoriesService) ListReleases(ctx context.Context, owner, repo stri return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var releases []*RepositoryRelease resp, err := s.client.Do(ctx, req, &releases) if err != nil { @@ -120,9 +117,6 @@ func (s *RepositoriesService) getSingleRelease(ctx context.Context, url string) return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - release := new(RepositoryRelease) resp, err := s.client.Do(ctx, req, release) if err != nil { @@ -142,9 +136,6 @@ func (s *RepositoriesService) CreateRelease(ctx context.Context, owner, repo str return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - r := new(RepositoryRelease) resp, err := s.client.Do(ctx, req, r) if err != nil { @@ -164,9 +155,6 @@ func (s *RepositoriesService) EditRelease(ctx context.Context, owner, repo strin return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - r := new(RepositoryRelease) resp, err := s.client.Do(ctx, req, r) if err != nil { @@ -203,9 +191,6 @@ func (s *RepositoriesService) ListReleaseAssets(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - var assets []*ReleaseAsset resp, err := s.client.Do(ctx, req, &assets) if err != nil { @@ -225,9 +210,6 @@ func (s *RepositoriesService) GetReleaseAsset(ctx context.Context, owner, repo s return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - asset := new(ReleaseAsset) resp, err := s.client.Do(ctx, req, asset) if err != nil { @@ -292,9 +274,6 @@ func (s *RepositoriesService) EditReleaseAsset(ctx context.Context, owner, repo return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - asset := new(ReleaseAsset) resp, err := s.client.Do(ctx, req, asset) if err != nil { @@ -341,9 +320,6 @@ func (s *RepositoriesService) UploadReleaseAsset(ctx context.Context, owner, rep return nil, nil, err } - // TODO: remove custom Accept header when this API fully launches. - req.Header.Set("Accept", mediaTypeGraphQLNodeIDPreview) - asset := new(ReleaseAsset) resp, err := s.client.Do(ctx, req, asset) if err != nil { diff --git a/vendor/github.com/google/go-github/github/search.go b/vendor/github.com/google/go-github/github/search.go index 6e0000df..abaf5e1f 100644 --- a/vendor/github.com/google/go-github/github/search.go +++ b/vendor/github.com/google/go-github/github/search.go @@ -8,7 +8,9 @@ package github import ( "context" "fmt" + "net/url" "strconv" + "strings" qs "github.com/google/go-querystring/query" ) @@ -221,11 +223,15 @@ func (s *SearchService) search(ctx context.Context, searchType string, parameter if err != nil { return nil, err } - params.Set("q", parameters.Query) + q := strings.Replace(parameters.Query, " ", "+", -1) if parameters.RepositoryID != nil { params.Set("repository_id", strconv.FormatInt(*parameters.RepositoryID, 10)) } - u := fmt.Sprintf("search/%s?%s", searchType, params.Encode()) + query := "q=" + url.PathEscape(q) + if v := params.Encode(); v != "" { + query = query + "&" + v + } + u := fmt.Sprintf("search/%s?%s", searchType, query) req, err := s.client.NewRequest("GET", u, nil) if err != nil { diff --git a/vendor/github.com/google/go-github/github/teams.go b/vendor/github.com/google/go-github/github/teams.go index 1021d538..c3773e00 100644 --- a/vendor/github.com/google/go-github/github/teams.go +++ b/vendor/github.com/google/go-github/github/teams.go @@ -1,7 +1,357 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + package github +import ( + "context" + "fmt" + "strings" + "time" +) + // TeamsService provides access to the team-related functions // in the GitHub API. // // GitHub API docs: https://developer.github.com/v3/teams/ type TeamsService service + +// Team represents a team within a GitHub organization. Teams are used to +// manage access to an organization's repositories. +type Team struct { + ID *int64 `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Description *string `json:"description,omitempty"` + URL *string `json:"url,omitempty"` + Slug *string `json:"slug,omitempty"` + + // Permission specifies the default permission for repositories owned by the team. + Permission *string `json:"permission,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + MembersCount *int `json:"members_count,omitempty"` + ReposCount *int `json:"repos_count,omitempty"` + Organization *Organization `json:"organization,omitempty"` + MembersURL *string `json:"members_url,omitempty"` + RepositoriesURL *string `json:"repositories_url,omitempty"` + Parent *Team `json:"parent,omitempty"` + + // LDAPDN is only available in GitHub Enterprise and when the team + // membership is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (t Team) String() string { + return Stringify(t) +} + +// Invitation represents a team member's invitation status. +type Invitation struct { + ID *int64 `json:"id,omitempty"` + Login *string `json:"login,omitempty"` + Email *string `json:"email,omitempty"` + // Role can be one of the values - 'direct_member', 'admin', 'billing_manager', 'hiring_manager', or 'reinstate'. + Role *string `json:"role,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + Inviter *User `json:"inviter,omitempty"` + TeamCount *int `json:"team_count,omitempty"` + InvitationTeamURL *string `json:"invitation_team_url,omitempty"` +} + +func (i Invitation) String() string { + return Stringify(i) +} + +// ListTeams lists all of the teams for an organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-teams +func (s *TeamsService) ListTeams(ctx context.Context, org string, opt *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// GetTeam fetches a team by ID. +// +// GitHub API docs: https://developer.github.com/v3/teams/#get-team +func (s *TeamsService) GetTeam(ctx context.Context, team int64) (*Team, *Response, error) { + u := fmt.Sprintf("teams/%v", team) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// NewTeam represents a team to be created or modified. +type NewTeam struct { + Name string `json:"name"` // Name of the team. (Required.) + Description *string `json:"description,omitempty"` + Maintainers []string `json:"maintainers,omitempty"` + RepoNames []string `json:"repo_names,omitempty"` + ParentTeamID *int64 `json:"parent_team_id,omitempty"` + + // Deprecated: Permission is deprecated when creating or editing a team in an org + // using the new GitHub permission model. It no longer identifies the + // permission a team has on its repos, but only specifies the default + // permission a repo is initially added with. Avoid confusion by + // specifying a permission value when calling AddTeamRepo. + Permission *string `json:"permission,omitempty"` + + // Privacy identifies the level of privacy this team should have. + // Possible values are: + // secret - only visible to organization owners and members of this team + // closed - visible to all members of this organization + // Default is "secret". + Privacy *string `json:"privacy,omitempty"` + + // LDAPDN may be used in GitHub Enterprise when the team membership + // is synchronized with LDAP. + LDAPDN *string `json:"ldap_dn,omitempty"` +} + +func (s NewTeam) String() string { + return Stringify(s) +} + +// CreateTeam creates a new team within an organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#create-team +func (s *TeamsService) CreateTeam(ctx context.Context, org string, team NewTeam) (*Team, *Response, error) { + u := fmt.Sprintf("orgs/%v/teams", org) + req, err := s.client.NewRequest("POST", u, team) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// EditTeam edits a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#edit-team +func (s *TeamsService) EditTeam(ctx context.Context, id int64, team NewTeam) (*Team, *Response, error) { + u := fmt.Sprintf("teams/%v", id) + req, err := s.client.NewRequest("PATCH", u, team) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Team) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// DeleteTeam deletes a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#delete-team +func (s *TeamsService) DeleteTeam(ctx context.Context, team int64) (*Response, error) { + u := fmt.Sprintf("teams/%v", team) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + return s.client.Do(ctx, req, nil) +} + +// ListChildTeams lists child teams for a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-child-teams +func (s *TeamsService) ListChildTeams(ctx context.Context, teamID int64, opt *ListOptions) ([]*Team, *Response, error) { + u := fmt.Sprintf("teams/%v/teams", teamID) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} + +// ListTeamRepos lists the repositories that the specified team has access to. +// +// GitHub API docs: https://developer.github.com/v3/teams/#list-team-repos +func (s *TeamsService) ListTeamRepos(ctx context.Context, team int64, opt *ListOptions) ([]*Repository, *Response, error) { + u := fmt.Sprintf("teams/%v/repos", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when topics API fully launches. + headers := []string{mediaTypeTopicsPreview, mediaTypeNestedTeamsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + var repos []*Repository + resp, err := s.client.Do(ctx, req, &repos) + if err != nil { + return nil, resp, err + } + + return repos, resp, nil +} + +// IsTeamRepo checks if a team manages the specified repository. If the +// repository is managed by team, a Repository is returned which includes the +// permissions team has for that repo. +// +// GitHub API docs: https://developer.github.com/v3/teams/#check-if-a-team-manages-a-repository +func (s *TeamsService) IsTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Repository, *Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + headers := []string{mediaTypeOrgPermissionRepo, mediaTypeNestedTeamsPreview} + req.Header.Set("Accept", strings.Join(headers, ", ")) + + repository := new(Repository) + resp, err := s.client.Do(ctx, req, repository) + if err != nil { + return nil, resp, err + } + + return repository, resp, nil +} + +// TeamAddTeamRepoOptions specifies the optional parameters to the +// TeamsService.AddTeamRepo method. +type TeamAddTeamRepoOptions struct { + // Permission specifies the permission to grant the team on this repository. + // Possible values are: + // pull - team members can pull, but not push to or administer this repository + // push - team members can pull and push, but not administer this repository + // admin - team members can pull, push and administer this repository + // + // If not specified, the team's permission attribute will be used. + Permission string `json:"permission,omitempty"` +} + +// AddTeamRepo adds a repository to be managed by the specified team. The +// specified repository must be owned by the organization to which the team +// belongs, or a direct fork of a repository owned by the organization. +// +// GitHub API docs: https://developer.github.com/v3/teams/#add-team-repo +func (s *TeamsService) AddTeamRepo(ctx context.Context, team int64, owner string, repo string, opt *TeamAddTeamRepoOptions) (*Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// RemoveTeamRepo removes a repository from being managed by the specified +// team. Note that this does not delete the repository, it just removes it +// from the team. +// +// GitHub API docs: https://developer.github.com/v3/teams/#remove-team-repo +func (s *TeamsService) RemoveTeamRepo(ctx context.Context, team int64, owner string, repo string) (*Response, error) { + u := fmt.Sprintf("teams/%v/repos/%v/%v", team, owner, repo) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListUserTeams lists a user's teams +// GitHub API docs: https://developer.github.com/v3/teams/#list-user-teams +func (s *TeamsService) ListUserTeams(ctx context.Context, opt *ListOptions) ([]*Team, *Response, error) { + u := "user/teams" + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var teams []*Team + resp, err := s.client.Do(ctx, req, &teams) + if err != nil { + return nil, resp, err + } + + return teams, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/teams_discussion_comments.go b/vendor/github.com/google/go-github/github/teams_discussion_comments.go index 26d0e8c5..383d559e 100644 --- a/vendor/github.com/google/go-github/github/teams_discussion_comments.go +++ b/vendor/github.com/google/go-github/github/teams_discussion_comments.go @@ -21,7 +21,7 @@ type DiscussionComment struct { DiscussionURL *string `json:"discussion_url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` NodeID *string `json:"node_id,omitempty"` - Number *int64 `json:"number,omitempty"` + Number *int `json:"number,omitempty"` UpdatedAt *Timestamp `json:"updated_at,omitempty"` URL *string `json:"url,omitempty"` } diff --git a/vendor/github.com/google/go-github/github/teams_discussions.go b/vendor/github.com/google/go-github/github/teams_discussions.go index fc9b25a5..5db06d10 100644 --- a/vendor/github.com/google/go-github/github/teams_discussions.go +++ b/vendor/github.com/google/go-github/github/teams_discussions.go @@ -16,13 +16,13 @@ type TeamDiscussion struct { Body *string `json:"body,omitempty"` BodyHTML *string `json:"body_html,omitempty"` BodyVersion *string `json:"body_version,omitempty"` - CommentsCount *int64 `json:"comments_count,omitempty"` + CommentsCount *int `json:"comments_count,omitempty"` CommentsURL *string `json:"comments_url,omitempty"` CreatedAt *Timestamp `json:"created_at,omitempty"` LastEditedAt *Timestamp `json:"last_edited_at,omitempty"` HTMLURL *string `json:"html_url,omitempty"` NodeID *string `json:"node_id,omitempty"` - Number *int64 `json:"number,omitempty"` + Number *int `json:"number,omitempty"` Pinned *bool `json:"pinned,omitempty"` Private *bool `json:"private,omitempty"` TeamURL *string `json:"team_url,omitempty"` diff --git a/vendor/github.com/google/go-github/github/teams_members.go b/vendor/github.com/google/go-github/github/teams_members.go new file mode 100644 index 00000000..d5cfa0dc --- /dev/null +++ b/vendor/github.com/google/go-github/github/teams_members.go @@ -0,0 +1,174 @@ +// Copyright 2018 The go-github AUTHORS. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package github + +import ( + "context" + "fmt" +) + +// TeamListTeamMembersOptions specifies the optional parameters to the +// TeamsService.ListTeamMembers method. +type TeamListTeamMembersOptions struct { + // Role filters members returned by their role in the team. Possible + // values are "all", "member", "maintainer". Default is "all". + Role string `url:"role,omitempty"` + + ListOptions +} + +// ListTeamMembers lists all of the users who are members of the specified +// team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#list-team-members +func (s *TeamsService) ListTeamMembers(ctx context.Context, team int64, opt *TeamListTeamMembersOptions) ([]*User, *Response, error) { + u := fmt.Sprintf("teams/%v/members", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + var members []*User + resp, err := s.client.Do(ctx, req, &members) + if err != nil { + return nil, resp, err + } + + return members, resp, nil +} + +// IsTeamMember checks if a user is a member of the specified team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#get-team-member +// +// Deprecated: This API has been marked as deprecated in the Github API docs, +// TeamsService.GetTeamMembership method should be used instead. +func (s *TeamsService) IsTeamMember(ctx context.Context, team int64, user string) (bool, *Response, error) { + u := fmt.Sprintf("teams/%v/members/%v", team, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return false, nil, err + } + + resp, err := s.client.Do(ctx, req, nil) + member, err := parseBoolResponse(err) + return member, resp, err +} + +// GetTeamMembership returns the membership status for a user in a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#get-team-membership +func (s *TeamsService) GetTeamMembership(ctx context.Context, team int64, user string) (*Membership, *Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + req.Header.Set("Accept", mediaTypeNestedTeamsPreview) + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// TeamAddTeamMembershipOptions specifies the optional +// parameters to the TeamsService.AddTeamMembership method. +type TeamAddTeamMembershipOptions struct { + // Role specifies the role the user should have in the team. Possible + // values are: + // member - a normal member of the team + // maintainer - a team maintainer. Able to add/remove other team + // members, promote other team members to team + // maintainer, and edit the team’s name and description + // + // Default value is "member". + Role string `json:"role,omitempty"` +} + +// AddTeamMembership adds or invites a user to a team. +// +// In order to add a membership between a user and a team, the authenticated +// user must have 'admin' permissions to the team or be an owner of the +// organization that the team is associated with. +// +// If the user is already a part of the team's organization (meaning they're on +// at least one other team in the organization), this endpoint will add the +// user to the team. +// +// If the user is completely unaffiliated with the team's organization (meaning +// they're on none of the organization's teams), this endpoint will send an +// invitation to the user via email. This newly-created membership will be in +// the "pending" state until the user accepts the invitation, at which point +// the membership will transition to the "active" state and the user will be +// added as a member of the team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#add-or-update-team-membership +func (s *TeamsService) AddTeamMembership(ctx context.Context, team int64, user string, opt *TeamAddTeamMembershipOptions) (*Membership, *Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("PUT", u, opt) + if err != nil { + return nil, nil, err + } + + t := new(Membership) + resp, err := s.client.Do(ctx, req, t) + if err != nil { + return nil, resp, err + } + + return t, resp, nil +} + +// RemoveTeamMembership removes a user from a team. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#remove-team-membership +func (s *TeamsService) RemoveTeamMembership(ctx context.Context, team int64, user string) (*Response, error) { + u := fmt.Sprintf("teams/%v/memberships/%v", team, user) + req, err := s.client.NewRequest("DELETE", u, nil) + if err != nil { + return nil, err + } + + return s.client.Do(ctx, req, nil) +} + +// ListPendingTeamInvitations get pending invitaion list in team. +// Warning: The API may change without advance notice during the preview period. +// Preview features are not supported for production use. +// +// GitHub API docs: https://developer.github.com/v3/teams/members/#list-pending-team-invitations +func (s *TeamsService) ListPendingTeamInvitations(ctx context.Context, team int64, opt *ListOptions) ([]*Invitation, *Response, error) { + u := fmt.Sprintf("teams/%v/invitations", team) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + var pendingInvitations []*Invitation + resp, err := s.client.Do(ctx, req, &pendingInvitations) + if err != nil { + return nil, resp, err + } + + return pendingInvitations, resp, nil +} diff --git a/vendor/github.com/google/go-github/github/timestamp.go b/vendor/github.com/google/go-github/github/timestamp.go index a1c1554a..90929d57 100644 --- a/vendor/github.com/google/go-github/github/timestamp.go +++ b/vendor/github.com/google/go-github/github/timestamp.go @@ -28,9 +28,9 @@ func (t *Timestamp) UnmarshalJSON(data []byte) (err error) { str := string(data) i, err := strconv.ParseInt(str, 10, 64) if err == nil { - (*t).Time = time.Unix(i, 0) + t.Time = time.Unix(i, 0) } else { - (*t).Time, err = time.Parse(`"`+time.RFC3339+`"`, str) + t.Time, err = time.Parse(`"`+time.RFC3339+`"`, str) } return } diff --git a/vendor/github.com/google/go-github/github/users.go b/vendor/github.com/google/go-github/github/users.go index 8c4efe1d..f164d559 100644 --- a/vendor/github.com/google/go-github/github/users.go +++ b/vendor/github.com/google/go-github/github/users.go @@ -20,6 +20,7 @@ type UsersService service type User struct { Login *string `json:"login,omitempty"` ID *int64 `json:"id,omitempty"` + NodeID *string `json:"node_id,omitempty"` AvatarURL *string `json:"avatar_url,omitempty"` HTMLURL *string `json:"html_url,omitempty"` GravatarID *string `json:"gravatar_id,omitempty"` @@ -134,6 +135,56 @@ func (s *UsersService) Edit(ctx context.Context, user *User) (*User, *Response, return uResp, resp, nil } +// HovercardOptions specifies optional parameters to the UsersService.GetHovercard +// method. +type HovercardOptions struct { + // SubjectType specifies the additional information to be received about the hovercard. + // Possible values are: organization, repository, issue, pull_request. (Required when using subject_id.) + SubjectType string `url:"subject_type"` + + // SubjectID specifies the ID for the SubjectType. (Required when using subject_type.) + SubjectID string `url:"subject_id"` +} + +// Hovercard represents hovercard information about a user. +type Hovercard struct { + Contexts []*UserContext `json:"contexts,omitempty"` +} + +// UserContext represents the contextual information about user. +type UserContext struct { + Message *string `json:"message,omitempty"` + Octicon *string `json:"octicon,omitempty"` +} + +// GetHovercard fetches contextual information about user. It requires authentication +// via Basic Auth or via OAuth with the repo scope. +// +// GitHub API docs: https://developer.github.com/v3/users/#get-contextual-information-about-a-user +func (s *UsersService) GetHovercard(ctx context.Context, user string, opt *HovercardOptions) (*Hovercard, *Response, error) { + u := fmt.Sprintf("users/%v/hovercard", user) + u, err := addOptions(u, opt) + if err != nil { + return nil, nil, err + } + + req, err := s.client.NewRequest("GET", u, nil) + if err != nil { + return nil, nil, err + } + + // TODO: remove custom Accept header when this API fully launches. + req.Header.Set("Accept", mediaTypeHovercardPreview) + + hc := new(Hovercard) + resp, err := s.client.Do(ctx, req, hc) + if err != nil { + return nil, resp, err + } + + return hc, resp, nil +} + // UserListOptions specifies optional parameters to the UsersService.ListAll // method. type UserListOptions struct { diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go index 2164497a..82568a1b 100644 --- a/vendor/github.com/mattn/go-runewidth/runewidth.go +++ b/vendor/github.com/mattn/go-runewidth/runewidth.go @@ -1,13 +1,24 @@ package runewidth +import "os" + var ( // EastAsianWidth will be set true if the current locale is CJK - EastAsianWidth = IsEastAsian() + EastAsianWidth bool // DefaultCondition is a condition in current locale DefaultCondition = &Condition{EastAsianWidth} ) +func init() { + env := os.Getenv("RUNEWIDTH_EASTASIAN") + if env == "" { + EastAsianWidth = IsEastAsian() + } else { + EastAsianWidth = env == "1" + } +} + type interval struct { first rune last rune @@ -55,6 +66,7 @@ var private = table{ var nonprint = table{ {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD}, {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F}, + {0x2028, 0x2029}, {0x202A, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF}, } diff --git a/vendor/github.com/rivo/tview/LICENSE.txt b/vendor/github.com/rivo/tview/LICENSE.txt index 8aa26455..9d694307 100644 --- a/vendor/github.com/rivo/tview/LICENSE.txt +++ b/vendor/github.com/rivo/tview/LICENSE.txt @@ -1,6 +1,6 @@ MIT License -Copyright (c) [year] [fullname] +Copyright (c) 2018 Oliver Kuederle Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/github.com/rivo/tview/box.go b/vendor/github.com/rivo/tview/box.go index 654d907c..acbb1492 100644 --- a/vendor/github.com/rivo/tview/box.go +++ b/vendor/github.com/rivo/tview/box.go @@ -32,6 +32,9 @@ type Box struct { // The color of the border. borderColor tcell.Color + // The style attributes of the border. + borderAttributes tcell.AttrMask + // The title. Only visible if there is a border, too. title string @@ -193,6 +196,15 @@ func (b *Box) SetBorderColor(color tcell.Color) *Box { return b } +// SetBorderAttributes sets the border's style attributes. You can combine +// different attributes using bitmask operations: +// +// box.SetBorderAttributes(tcell.AttrUnderline | tcell.AttrBold) +func (b *Box) SetBorderAttributes(attr tcell.AttrMask) *Box { + b.borderAttributes = attr + return b +} + // SetTitle sets the box's title. func (b *Box) SetTitle(title string) *Box { b.title = title @@ -233,7 +245,7 @@ func (b *Box) Draw(screen tcell.Screen) { // Draw border. if b.border && b.width >= 2 && b.height >= 2 { - border := background.Foreground(b.borderColor) + border := background.Foreground(b.borderColor) | tcell.Style(b.borderAttributes) var vertical, horizontal, topLeft, topRight, bottomLeft, bottomRight rune if b.focus.HasFocus() { horizontal = Borders.HorizontalFocus diff --git a/vendor/github.com/rivo/tview/form.go b/vendor/github.com/rivo/tview/form.go index 0fcc0479..3593adfd 100644 --- a/vendor/github.com/rivo/tview/form.go +++ b/vendor/github.com/rivo/tview/form.go @@ -26,7 +26,7 @@ type FormItem interface { // required. GetFieldWidth() int - // SetEnteredFunc sets the handler function for when the user finished + // SetFinishedFunc sets the handler function for when the user finished // entering data into the item. The handler may receive events for the // Enter key (we're done), the Escape key (cancel input), the Tab key (move to // next field), and the Backtab key (move to previous field). diff --git a/vendor/github.com/rivo/tview/modal.go b/vendor/github.com/rivo/tview/modal.go index 8ed535e7..9ef7e382 100644 --- a/vendor/github.com/rivo/tview/modal.go +++ b/vendor/github.com/rivo/tview/modal.go @@ -40,6 +40,11 @@ func NewModal() *Modal { SetButtonBackgroundColor(Styles.PrimitiveBackgroundColor). SetButtonTextColor(Styles.PrimaryTextColor) m.form.SetBackgroundColor(Styles.ContrastBackgroundColor).SetBorderPadding(0, 0, 0, 0) + m.form.SetCancelFunc(func() { + if m.done != nil { + m.done(-1, "") + } + }) m.frame = NewFrame(m.form).SetBorders(0, 0, 1, 0, 0, 0) m.frame.SetBorder(true). SetBackgroundColor(Styles.ContrastBackgroundColor). diff --git a/vendor/github.com/xanzy/go-gitlab/README.md b/vendor/github.com/xanzy/go-gitlab/README.md index dc793162..da749949 100644 --- a/vendor/github.com/xanzy/go-gitlab/README.md +++ b/vendor/github.com/xanzy/go-gitlab/README.md @@ -23,8 +23,8 @@ to add new and/or missing endpoints. Currently the following services are suppor - [x] Award Emojis - [x] Branches - [x] Broadcast Messages -- [ ] Project-level Variables -- [ ] Group-level Variables +- [x] Project-level Variables +- [x] Group-level Variables - [x] Commits - [ ] Custom Attributes - [x] Deployments @@ -42,8 +42,9 @@ to add new and/or missing endpoints. Currently the following services are suppor - [x] Group Members - [x] Issues - [x] Issue Boards +- [x] Group Issue Boards - [x] Jobs -- [ ] Keys +- [x] Keys - [x] Labels - [ ] License - [x] Merge Requests diff --git a/vendor/github.com/xanzy/go-gitlab/commits.go b/vendor/github.com/xanzy/go-gitlab/commits.go index 01bfdae8..1bbcc401 100644 --- a/vendor/github.com/xanzy/go-gitlab/commits.go +++ b/vendor/github.com/xanzy/go-gitlab/commits.go @@ -459,6 +459,32 @@ func (s *CommitsService) SetCommitStatus(pid interface{}, sha string, opt *SetCo return cs, resp, err } +// GetMergeRequestsByCommit gets merge request associated with a commit. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/commits.html#list-merge-requests-associated-with-a-commit +func (s *CommitsService) GetMergeRequestsByCommit(pid interface{}, sha string, options ...OptionFunc) ([]*MergeRequest, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/repository/commits/%s/merge_requests", + url.QueryEscape(project), url.QueryEscape(sha)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + var mrs []*MergeRequest + resp, err := s.client.Do(req, &mrs) + if err != nil { + return nil, resp, err + } + + return mrs, resp, err +} + // CherryPickCommitOptions represents the available options for cherry-picking a commit. // // GitLab API docs: https://docs.gitlab.com/ce/api/commits.html#cherry-pick-a-commit diff --git a/vendor/github.com/xanzy/go-gitlab/events.go b/vendor/github.com/xanzy/go-gitlab/events.go index 4740e4ab..ee6fbad3 100644 --- a/vendor/github.com/xanzy/go-gitlab/events.go +++ b/vendor/github.com/xanzy/go-gitlab/events.go @@ -122,15 +122,15 @@ func (s *EventsService) ListCurrentUserContributionEvents(opt *ListContributionE return cs, resp, err } -// ListProjectContributionEvents gets a list currently authenticated user's events +// ListProjectVisibleEvents gets a list of visible events for a particular project // -// GitLab API docs: https://docs.gitlab.com/ce/api/events.html#list-a-project-39-s-visible-events -func (s *EventsService) ListProjectContributionEvents(pid interface{}, opt *ListContributionEventsOptions, options ...OptionFunc) ([]*ContributionEvent, *Response, error) { +// GitLab API docs: https://docs.gitlab.com/ee/api/events.html#list-a-project-s-visible-events +func (s *EventsService) ListProjectVisibleEvents(pid interface{}, opt *ListContributionEventsOptions, options ...OptionFunc) ([]*ContributionEvent, *Response, error) { project, err := parseID(pid) if err != nil { return nil, nil, err } - u := fmt.Sprintf("%s/events", url.QueryEscape(project)) + u := fmt.Sprintf("projects/%s/events", url.QueryEscape(project)) req, err := s.client.NewRequest("GET", u, opt, options) if err != nil { diff --git a/vendor/github.com/xanzy/go-gitlab/gitlab.go b/vendor/github.com/xanzy/go-gitlab/gitlab.go index 65cb4198..e600f158 100644 --- a/vendor/github.com/xanzy/go-gitlab/gitlab.go +++ b/vendor/github.com/xanzy/go-gitlab/gitlab.go @@ -285,11 +285,14 @@ type Client struct { Features *FeaturesService GitIgnoreTemplates *GitIgnoreTemplatesService Groups *GroupsService + GroupIssueBoards *GroupIssueBoardsService GroupMembers *GroupMembersService GroupMilestones *GroupMilestonesService + GroupVariables *GroupVariablesService Issues *IssuesService IssueLinks *IssueLinksService Jobs *JobsService + Keys *KeysService Boards *IssueBoardsService Labels *LabelsService MergeRequests *MergeRequestsService @@ -305,6 +308,7 @@ type Client struct { Projects *ProjectsService ProjectMembers *ProjectMembersService ProjectSnippets *ProjectSnippetsService + ProjectVariables *ProjectVariablesService ProtectedBranches *ProtectedBranchesService Repositories *RepositoriesService RepositoryFiles *RepositoryFilesService @@ -415,11 +419,14 @@ func newClient(httpClient *http.Client) *Client { c.Features = &FeaturesService{client: c} c.GitIgnoreTemplates = &GitIgnoreTemplatesService{client: c} c.Groups = &GroupsService{client: c} + c.GroupIssueBoards = &GroupIssueBoardsService{client: c} c.GroupMembers = &GroupMembersService{client: c} c.GroupMilestones = &GroupMilestonesService{client: c} + c.GroupVariables = &GroupVariablesService{client: c} c.Issues = &IssuesService{client: c, timeStats: timeStats} c.IssueLinks = &IssueLinksService{client: c} c.Jobs = &JobsService{client: c} + c.Keys = &KeysService{client: c} c.Boards = &IssueBoardsService{client: c} c.Labels = &LabelsService{client: c} c.MergeRequests = &MergeRequestsService{client: c, timeStats: timeStats} @@ -435,6 +442,7 @@ func newClient(httpClient *http.Client) *Client { c.Projects = &ProjectsService{client: c} c.ProjectMembers = &ProjectMembersService{client: c} c.ProjectSnippets = &ProjectSnippetsService{client: c} + c.ProjectVariables = &ProjectVariablesService{client: c} c.ProtectedBranches = &ProtectedBranchesService{client: c} c.Repositories = &RepositoriesService{client: c} c.RepositoryFiles = &RepositoryFilesService{client: c} diff --git a/vendor/github.com/xanzy/go-gitlab/group_boards.go b/vendor/github.com/xanzy/go-gitlab/group_boards.go new file mode 100644 index 00000000..764df14e --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/group_boards.go @@ -0,0 +1,262 @@ +// +// Copyright 2018, Patrick Webster +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/url" +) + +// GroupIssueBoardsService handles communication with the group issue board +// related methods of the GitLab API. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html +type GroupIssueBoardsService struct { + client *Client +} + +// GroupIssueBoard represents a GitLab group issue board. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html +type GroupIssueBoard struct { + ID int `json:"id"` + Name string `json:"name"` + Group *Group `json:"group"` + Milestone *Milestone `json:"milestone"` + Lists []*BoardList `json:"lists"` +} + +func (b GroupIssueBoard) String() string { + return Stringify(b) +} + +// ListGroupIssueBoardsOptions represents the available +// ListGroupIssueBoards() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#group-board +type ListGroupIssueBoardsOptions ListOptions + +// ListGroupIssueBoards gets a list of all issue boards in a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#group-board +func (s *GroupIssueBoardsService) ListGroupIssueBoards(gid interface{}, opt *ListGroupIssueBoardsOptions, options ...OptionFunc) ([]*GroupIssueBoard, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards", url.QueryEscape(group)) + + req, err := s.client.NewRequest("GET", u, opt, options) + if err != nil { + return nil, nil, err + } + + var gs []*GroupIssueBoard + resp, err := s.client.Do(req, &gs) + if err != nil { + return nil, resp, err + } + + return gs, resp, err +} + +// GetGroupIssueBoard gets a single issue board of a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#single-board +func (s *GroupIssueBoardsService) GetGroupIssueBoard(gid interface{}, board int, options ...OptionFunc) (*GroupIssueBoard, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d", url.QueryEscape(group), board) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + gib := new(GroupIssueBoard) + resp, err := s.client.Do(req, gib) + if err != nil { + return nil, resp, err + } + + return gib, resp, err +} + +// ListGroupIssueBoardListsOptions represents the available +// ListGroupIssueBoardLists() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#list-board-lists +type ListGroupIssueBoardListsOptions ListOptions + +// ListGroupIssueBoardLists gets a list of the issue board's lists. Does not include +// backlog and closed lists. +// +// GitLab API docs: https://docs.gitlab.com/ce/api/group_boards.html#list-board-lists +func (s *GroupIssueBoardsService) ListGroupIssueBoardLists(gid interface{}, board int, opt *ListGroupIssueBoardListsOptions, options ...OptionFunc) ([]*BoardList, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d/lists", url.QueryEscape(group), board) + + req, err := s.client.NewRequest("GET", u, opt, options) + if err != nil { + return nil, nil, err + } + + var gbl []*BoardList + resp, err := s.client.Do(req, &gbl) + if err != nil { + return nil, resp, err + } + + return gbl, resp, err +} + +// GetGroupIssueBoardList gets a single issue board list. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#single-board-list +func (s *GroupIssueBoardsService) GetGroupIssueBoardList(gid interface{}, board, list int, options ...OptionFunc) (*BoardList, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", + url.QueryEscape(group), + board, + list, + ) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + gbl := new(BoardList) + resp, err := s.client.Do(req, gbl) + if err != nil { + return nil, resp, err + } + + return gbl, resp, err +} + +// CreateGroupIssueBoardListOptions represents the available +// CreateGroupIssueBoardList() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#new-board-list +type CreateGroupIssueBoardListOptions struct { + LabelID *int `url:"label_id" json:"label_id"` +} + +// CreateGroupIssueBoardList creates a new issue board list. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#new-board-list +func (s *GroupIssueBoardsService) CreateGroupIssueBoardList(gid interface{}, board int, opt *CreateGroupIssueBoardListOptions, options ...OptionFunc) (*BoardList, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d/lists", url.QueryEscape(group), board) + + req, err := s.client.NewRequest("POST", u, opt, options) + if err != nil { + return nil, nil, err + } + + gbl := new(BoardList) + resp, err := s.client.Do(req, gbl) + if err != nil { + return nil, resp, err + } + + return gbl, resp, err +} + +// UpdateGroupIssueBoardListOptions represents the available +// UpdateGroupIssueBoardList() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#edit-board-list +type UpdateGroupIssueBoardListOptions struct { + Position *int `url:"position" json:"position"` +} + +// UpdateIssueBoardList updates the position of an existing +// group issue board list. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#edit-board-list +func (s *GroupIssueBoardsService) UpdateIssueBoardList(gid interface{}, board, list int, opt *UpdateGroupIssueBoardListOptions, options ...OptionFunc) ([]*BoardList, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", + url.QueryEscape(group), + board, + list, + ) + + req, err := s.client.NewRequest("PUT", u, opt, options) + if err != nil { + return nil, nil, err + } + + var gbl []*BoardList + resp, err := s.client.Do(req, gbl) + if err != nil { + return nil, resp, err + } + + return gbl, resp, err +} + +// DeleteGroupIssueBoardList soft deletes a group issue board list. +// Only for admins and group owners. +// +// GitLab API docs: +// https://docs.gitlab.com/ce/api/group_boards.html#delete-a-board-list +func (s *GroupIssueBoardsService) DeleteGroupIssueBoardList(gid interface{}, board, list int, options ...OptionFunc) (*Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("groups/%s/boards/%d/lists/%d", + url.QueryEscape(group), + board, + list, + ) + + req, err := s.client.NewRequest("DELETE", u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/group_variables.go b/vendor/github.com/xanzy/go-gitlab/group_variables.go new file mode 100644 index 00000000..cc28e4fd --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/group_variables.go @@ -0,0 +1,171 @@ +// +// Copyright 2018, Patrick Webster +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/url" +) + +// GroupVariablesService handles communication with the +// group variables related methods of the GitLab API. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html +type GroupVariablesService struct { + client *Client +} + +// GroupVariable represents a GitLab group Variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html +type GroupVariable struct { + Key string `json:"key"` + Value string `json:"value"` + Protected bool `json:"protected"` +} + +func (v GroupVariable) String() string { + return Stringify(v) +} + +// ListVariables gets a list of all variables for a group. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html#list-group-variables +func (s *GroupVariablesService) ListVariables(gid interface{}, options ...OptionFunc) ([]*GroupVariable, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/variables", url.QueryEscape(group)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + var vs []*GroupVariable + resp, err := s.client.Do(req, &vs) + if err != nil { + return nil, resp, err + } + + return vs, resp, err +} + +// GetVariable gets a variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html#show-variable-details +func (s *GroupVariablesService) GetVariable(gid interface{}, key string, options ...OptionFunc) (*GroupVariable, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/variables/%s", url.QueryEscape(group), url.QueryEscape(key)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + v := new(GroupVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// CreateVariable creates a new group variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html#create-variable +func (s *GroupVariablesService) CreateVariable(gid interface{}, opt *CreateVariableOptions, options ...OptionFunc) (*GroupVariable, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/variables", url.QueryEscape(group)) + + req, err := s.client.NewRequest("POST", u, opt, options) + if err != nil { + return nil, nil, err + } + + v := new(GroupVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// UpdateVariable updates the position of an existing +// group issue board list. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html#update-variable +func (s *GroupVariablesService) UpdateVariable(gid interface{}, key string, opt *UpdateVariableOptions, options ...OptionFunc) (*GroupVariable, *Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("groups/%s/variables/%s", + url.QueryEscape(group), + url.QueryEscape(key), + ) + + req, err := s.client.NewRequest("PUT", u, opt, options) + if err != nil { + return nil, nil, err + } + + v := new(GroupVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// RemoveVariable removes a group's variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/group_level_variables.html#remove-variable +func (s *GroupVariablesService) RemoveVariable(gid interface{}, key string, options ...OptionFunc) (*Response, error) { + group, err := parseID(gid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("groups/%s/variables/%s", + url.QueryEscape(group), + url.QueryEscape(key), + ) + + req, err := s.client.NewRequest("DELETE", u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/keys.go b/vendor/github.com/xanzy/go-gitlab/keys.go new file mode 100644 index 00000000..dab9b25a --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/keys.go @@ -0,0 +1,70 @@ +// +// Copyright 2018, Patrick Webster +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/url" + "time" +) + +// KeysService handles communication with the +// keys related methods of the GitLab API. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/keys.html +type KeysService struct { + client *Client +} + +// Key represents a GitLab user's SSH key. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/keys.html +type Key struct { + ID int `json:"id"` + Title string `json:"title"` + Key string `json:"key"` + CreatedAt *time.Time `json:"created_at"` + User User `json:"user"` +} + +// GetKeyWithUser gets a single key by id along with the associated +// user information. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/keys.html#get-ssh-key-with-user-by-id-of-an-ssh-key +func (s *KeysService) GetKeyWithUser(kid interface{}, options ...OptionFunc) (*Key, *Response, error) { + key, err := parseID(kid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("keys/%s", url.QueryEscape(key)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + k := new(Key) + resp, err := s.client.Do(req, k) + if err != nil { + return nil, resp, err + } + + return k, resp, err +} diff --git a/vendor/github.com/xanzy/go-gitlab/merge_requests.go b/vendor/github.com/xanzy/go-gitlab/merge_requests.go index daddd623..da1794cb 100644 --- a/vendor/github.com/xanzy/go-gitlab/merge_requests.go +++ b/vendor/github.com/xanzy/go-gitlab/merge_requests.go @@ -403,12 +403,17 @@ func (s *MergeRequestsService) GetIssuesClosedOnMerge(pid interface{}, mergeRequ // GitLab API docs: // https://docs.gitlab.com/ce/api/merge_requests.html#create-mr type CreateMergeRequestOptions struct { - Title *string `url:"title,omitempty" json:"title,omitempty"` - Description *string `url:"description,omitempty" json:"description,omitempty"` - SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` - TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` - AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` - TargetProjectID *int `url:"target_project_id,omitempty" json:"target_project_id,omitempty"` + Title *string `url:"title,omitempty" json:"title,omitempty"` + Description *string `url:"description,omitempty" json:"description,omitempty"` + SourceBranch *string `url:"source_branch,omitempty" json:"source_branch,omitempty"` + TargetBranch *string `url:"target_branch,omitempty" json:"target_branch,omitempty"` + Labels Labels `url:"labels,comma,omitempty" json:"labels,omitempty"` + AssigneeID *int `url:"assignee_id,omitempty" json:"assignee_id,omitempty"` + TargetProjectID *int `url:"target_project_id,omitempty" json:"target_project_id,omitempty"` + MilestoneID *int `url:"milestone_id,omitempty" json:"milestone_id,omitempty"` + RemoveSourceBranch *bool `url:"remove_source_branch,omitempty" json:"remove_source_branch,omitempty"` + Squash *bool `url:"squash,omitempty" json:"squash,omitempty"` + AllowCollaboration *bool `url:"allow_collaboration,omitempty" json:"allow_collaboration,omitempty"` } // CreateMergeRequest creates a new merge request. diff --git a/vendor/github.com/xanzy/go-gitlab/project_variables.go b/vendor/github.com/xanzy/go-gitlab/project_variables.go new file mode 100644 index 00000000..5b9f7630 --- /dev/null +++ b/vendor/github.com/xanzy/go-gitlab/project_variables.go @@ -0,0 +1,196 @@ +// +// Copyright 2018, Patrick Webster +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package gitlab + +import ( + "fmt" + "net/url" +) + +// ProjectVariablesService handles communication with the +// project variables related methods of the GitLab API. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html +type ProjectVariablesService struct { + client *Client +} + +// ProjectVariable represents a GitLab Project Variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html +type ProjectVariable struct { + Key string `json:"key"` + Value string `json:"value"` + Protected bool `json:"protected"` + EnvironmentScope string `json:"environment_scope"` +} + +func (v ProjectVariable) String() string { + return Stringify(v) +} + +// ListVariables gets a list of all variables in a project. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#list-project-variables +func (s *ProjectVariablesService) ListVariables(pid interface{}, options ...OptionFunc) ([]*ProjectVariable, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/variables", url.QueryEscape(project)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + var vs []*ProjectVariable + resp, err := s.client.Do(req, &vs) + if err != nil { + return nil, resp, err + } + + return vs, resp, err +} + +// GetVariable gets a variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#show-variable-details +func (s *ProjectVariablesService) GetVariable(pid interface{}, key string, options ...OptionFunc) (*ProjectVariable, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/variables/%s", url.QueryEscape(project), url.QueryEscape(key)) + + req, err := s.client.NewRequest("GET", u, nil, options) + if err != nil { + return nil, nil, err + } + + v := new(ProjectVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// CreateVariableOptions represents the available +// CreateVariable() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#create-variable +type CreateVariableOptions struct { + Key *string `url:"key,omitempty" json:"key,omitempty"` + Value *string `url:"value,omitempty" json:"value,omitempty"` + Protected *bool `url:"protected,omitempty" json:"protected,omitempty"` + EnvironmentScope *string `url:"environment_scope,omitempty" json:"environment_scope,omitempty"` +} + +// CreateVariable creates a new project variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#create-variable +func (s *ProjectVariablesService) CreateVariable(pid interface{}, opt *CreateVariableOptions, options ...OptionFunc) (*ProjectVariable, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/variables", url.QueryEscape(project)) + + req, err := s.client.NewRequest("POST", u, opt, options) + if err != nil { + return nil, nil, err + } + + v := new(ProjectVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// UpdateVariableOptions represents the available +// UpdateVariable() options. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#update-variable +type UpdateVariableOptions struct { + Key *string `url:"key,omitempty" json:"key,omitempty"` + Value *string `url:"value,omitempty" json:"value,omitempty"` + Protected *bool `url:"protected,omitempty" json:"protected,omitempty"` + EnvironmentScope *string `url:"environment_scope,omitempty" json:"environment_scope,omitempty"` +} + +// UpdateVariable updates the position of an existing +// group issue board list. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#update-variable +func (s *ProjectVariablesService) UpdateVariable(pid interface{}, key string, opt *UpdateVariableOptions, options ...OptionFunc) (*ProjectVariable, *Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, nil, err + } + u := fmt.Sprintf("projects/%s/variables/%s", + url.QueryEscape(project), + url.QueryEscape(key), + ) + + req, err := s.client.NewRequest("PUT", u, opt, options) + if err != nil { + return nil, nil, err + } + + v := new(ProjectVariable) + resp, err := s.client.Do(req, v) + if err != nil { + return nil, resp, err + } + + return v, resp, err +} + +// RemoveVariable removes a project's variable. +// +// GitLab API docs: +// https://docs.gitlab.com/ee/api/project_level_variables.html#remove-variable +func (s *ProjectVariablesService) RemoveVariable(pid interface{}, key string, options ...OptionFunc) (*Response, error) { + project, err := parseID(pid) + if err != nil { + return nil, err + } + u := fmt.Sprintf("projects/%s/variables/%s", + url.QueryEscape(project), + url.QueryEscape(key), + ) + + req, err := s.client.NewRequest("DELETE", u, nil, options) + if err != nil { + return nil, err + } + + return s.client.Do(req, nil) +} diff --git a/vendor/github.com/xanzy/go-gitlab/repositories.go b/vendor/github.com/xanzy/go-gitlab/repositories.go index 7d109974..01d767a8 100644 --- a/vendor/github.com/xanzy/go-gitlab/repositories.go +++ b/vendor/github.com/xanzy/go-gitlab/repositories.go @@ -185,8 +185,9 @@ func (c Compare) String() string { // GitLab API docs: // https://docs.gitlab.com/ce/api/repositories.html#compare-branches-tags-or-commits type CompareOptions struct { - From *string `url:"from,omitempty" json:"from,omitempty"` - To *string `url:"to,omitempty" json:"to,omitempty"` + From *string `url:"from,omitempty" json:"from,omitempty"` + To *string `url:"to,omitempty" json:"to,omitempty"` + Straight *bool `url:"straight,omitempty" json:"straight,omitempty"` } // Compare compares branches, tags or commits. diff --git a/vendor/google.golang.org/api/calendar/v3/calendar-api.json b/vendor/google.golang.org/api/calendar/v3/calendar-api.json index 4a24845e..b1dd97e6 100644 --- a/vendor/google.golang.org/api/calendar/v3/calendar-api.json +++ b/vendor/google.golang.org/api/calendar/v3/calendar-api.json @@ -17,7 +17,7 @@ "description": "Manipulates events and other calendar data.", "discoveryVersion": "v1", "documentationLink": "https://developers.google.com/google-apps/calendar/firstapp", - "etag": "\"J3WqvAcMk4eQjJXvfSI4Yr8VouA/ITK5FY7POTu4MHcI4-Fs0HUENwM\"", + "etag": "\"J3WqvAcMk4eQjJXvfSI4Yr8VouA/UfSqMqLGclhjNS9q0g-j705eXE8\"", "icons": { "x16": "http://www.google.com/images/icons/product/calendar-16.png", "x32": "http://www.google.com/images/icons/product/calendar-32.png" @@ -342,7 +342,7 @@ "calendarList": { "methods": { "delete": { - "description": "Deletes an entry on the user's calendar list.", + "description": "Removes a calendar from the user's calendar list.", "httpMethod": "DELETE", "id": "calendar.calendarList.delete", "parameterOrder": [ @@ -362,7 +362,7 @@ ] }, "get": { - "description": "Returns an entry on the user's calendar list.", + "description": "Returns a calendar from the user's calendar list.", "httpMethod": "GET", "id": "calendar.calendarList.get", "parameterOrder": [ @@ -386,7 +386,7 @@ ] }, "insert": { - "description": "Adds an entry to the user's calendar list.", + "description": "Inserts an existing calendar into the user's calendar list.", "httpMethod": "POST", "id": "calendar.calendarList.insert", "parameters": { @@ -408,7 +408,7 @@ ] }, "list": { - "description": "Returns entries on the user's calendar list.", + "description": "Returns the calendars on the user's calendar list.", "httpMethod": "GET", "id": "calendar.calendarList.list", "parameters": { @@ -468,7 +468,7 @@ "supportsSubscription": true }, "patch": { - "description": "Updates an entry on the user's calendar list. This method supports patch semantics.", + "description": "Updates an existing calendar on the user's calendar list. This method supports patch semantics.", "httpMethod": "PATCH", "id": "calendar.calendarList.patch", "parameterOrder": [ @@ -499,7 +499,7 @@ ] }, "update": { - "description": "Updates an entry on the user's calendar list.", + "description": "Updates an existing calendar on the user's calendar list.", "httpMethod": "PUT", "id": "calendar.calendarList.update", "parameterOrder": [ @@ -1604,6 +1604,7 @@ } } }, + "revision": "20180814", "rootUrl": "https://www.googleapis.com/", "schemas": { "Acl": { @@ -1870,23 +1871,11 @@ "id": "CalendarNotification", "properties": { "method": { - "annotations": { - "required": [ - "calendar.calendarList.insert", - "calendar.calendarList.update" - ] - }, - "description": "The method used to deliver the notification. Possible values are: \n- \"email\" - Reminders are sent via email. \n- \"sms\" - Reminders are sent via SMS. This value is read-only and is ignored on inserts and updates. SMS reminders are only available for G Suite customers.", + "description": "The method used to deliver the notification. Possible values are: \n- \"email\" - Reminders are sent via email. \n- \"sms\" - Reminders are sent via SMS. This value is read-only and is ignored on inserts and updates. SMS reminders are only available for G Suite customers. \nRequired when adding a notification.", "type": "string" }, "type": { - "annotations": { - "required": [ - "calendar.calendarList.insert", - "calendar.calendarList.update" - ] - }, - "description": "The type of notification. Possible values are: \n- \"eventCreation\" - Notification sent when a new event is put on the calendar. \n- \"eventChange\" - Notification sent when an event is changed. \n- \"eventCancellation\" - Notification sent when an event is cancelled. \n- \"eventResponse\" - Notification sent when an event is changed. \n- \"agenda\" - An agenda with the events of the day (sent out in the morning).", + "description": "The type of notification. Possible values are: \n- \"eventCreation\" - Notification sent when a new event is put on the calendar. \n- \"eventChange\" - Notification sent when an event is changed. \n- \"eventCancellation\" - Notification sent when an event is cancelled. \n- \"eventResponse\" - Notification sent when an attendee responds to the event invitation. \n- \"agenda\" - An agenda with the events of the day (sent out in the morning). \nRequired when adding a notification.", "type": "string" } }, @@ -2222,7 +2211,7 @@ "type": "string" }, "id": { - "description": "The creator's Profile ID, if available. It corresponds to theid field in the People collection of the Google+ API", + "description": "The creator's Profile ID, if available. It corresponds to the id field in the People collection of the Google+ API", "type": "string" }, "self": { @@ -2385,7 +2374,7 @@ "type": "string" }, "id": { - "description": "The organizer's Profile ID, if available. It corresponds to theid field in the People collection of the Google+ API", + "description": "The organizer's Profile ID, if available. It corresponds to the id field in the People collection of the Google+ API", "type": "string" }, "self": { @@ -2398,7 +2387,7 @@ }, "originalStartTime": { "$ref": "EventDateTime", - "description": "For an instance of a recurring event, this is the time at which this event would start according to the recurrence data in the recurring event identified by recurringEventId. Immutable." + "description": "For an instance of a recurring event, this is the time at which this event would start according to the recurrence data in the recurring event identified by recurringEventId. It uniquely identifies the instance within the recurring event series even if the instance was moved to a different time. Immutable." }, "privateCopy": { "default": "false", @@ -2497,14 +2486,7 @@ "type": "string" }, "fileUrl": { - "annotations": { - "required": [ - "calendar.events.import", - "calendar.events.insert", - "calendar.events.update" - ] - }, - "description": "URL link to the attachment.\nFor adding Google Drive file attachments use the same format as in alternateLink property of the Files resource in the Drive API.", + "description": "URL link to the attachment.\nFor adding Google Drive file attachments use the same format as in alternateLink property of the Files resource in the Drive API.\nRequired when adding an attachment.", "type": "string" }, "iconLink": { @@ -2540,18 +2522,11 @@ "type": "string" }, "email": { - "annotations": { - "required": [ - "calendar.events.import", - "calendar.events.insert", - "calendar.events.update" - ] - }, - "description": "The attendee's email address, if available. This field must be present when adding an attendee. It must be a valid email address as per RFC5322.", + "description": "The attendee's email address, if available. This field must be present when adding an attendee. It must be a valid email address as per RFC5322.\nRequired when adding an attendee.", "type": "string" }, "id": { - "description": "The attendee's Profile ID, if available. It corresponds to theid field in the People collection of the Google+ API", + "description": "The attendee's Profile ID, if available. It corresponds to the id field in the People collection of the Google+ API", "type": "string" }, "optional": { @@ -2604,29 +2579,11 @@ "id": "EventReminder", "properties": { "method": { - "annotations": { - "required": [ - "calendar.calendarList.insert", - "calendar.calendarList.update", - "calendar.events.import", - "calendar.events.insert", - "calendar.events.update" - ] - }, - "description": "The method used by this reminder. Possible values are: \n- \"email\" - Reminders are sent via email. \n- \"sms\" - Reminders are sent via SMS. These are only available for G Suite customers. Requests to set SMS reminders for other account types are ignored. \n- \"popup\" - Reminders are sent via a UI popup.", + "description": "The method used by this reminder. Possible values are: \n- \"email\" - Reminders are sent via email. \n- \"sms\" - Reminders are sent via SMS. These are only available for G Suite customers. Requests to set SMS reminders for other account types are ignored. \n- \"popup\" - Reminders are sent via a UI popup. \nRequired when adding a reminder.", "type": "string" }, "minutes": { - "annotations": { - "required": [ - "calendar.calendarList.insert", - "calendar.calendarList.update", - "calendar.events.import", - "calendar.events.insert", - "calendar.events.update" - ] - }, - "description": "Number of minutes before the start of the event when the reminder should trigger. Valid values are between 0 and 40320 (4 weeks in minutes).", + "description": "Number of minutes before the start of the event when the reminder should trigger. Valid values are between 0 and 40320 (4 weeks in minutes).\nRequired when adding a reminder.", "format": "int32", "type": "integer" } @@ -2735,12 +2692,12 @@ "id": "FreeBusyRequest", "properties": { "calendarExpansionMax": { - "description": "Maximal number of calendars for which FreeBusy information is to be provided. Optional.", + "description": "Maximal number of calendars for which FreeBusy information is to be provided. Optional. Maximum value is 50.", "format": "int32", "type": "integer" }, "groupExpansionMax": { - "description": "Maximal number of calendar identifiers to be provided for a single group. Optional. An error will be returned for a group with more members than this value.", + "description": "Maximal number of calendar identifiers to be provided for a single group. Optional. An error is returned for a group with more members than this value. Maximum value is 100.", "format": "int32", "type": "integer" }, @@ -2752,12 +2709,12 @@ "type": "array" }, "timeMax": { - "description": "The end of the interval for the query.", + "description": "The end of the interval for the query formatted as per RFC3339.", "format": "date-time", "type": "string" }, "timeMin": { - "description": "The start of the interval for the query.", + "description": "The start of the interval for the query formatted as per RFC3339.", "format": "date-time", "type": "string" }, diff --git a/vendor/google.golang.org/api/calendar/v3/calendar-gen.go b/vendor/google.golang.org/api/calendar/v3/calendar-gen.go index 8bb77909..8f274d67 100644 --- a/vendor/google.golang.org/api/calendar/v3/calendar-gen.go +++ b/vendor/google.golang.org/api/calendar/v3/calendar-gen.go @@ -572,6 +572,7 @@ type CalendarNotification struct { // - "sms" - Reminders are sent via SMS. This value is read-only and is // ignored on inserts and updates. SMS reminders are only available for // G Suite customers. + // Required when adding a notification. Method string `json:"method,omitempty"` // Type: The type of notification. Possible values are: @@ -580,9 +581,11 @@ type CalendarNotification struct { // - "eventChange" - Notification sent when an event is changed. // - "eventCancellation" - Notification sent when an event is cancelled. // - // - "eventResponse" - Notification sent when an event is changed. + // - "eventResponse" - Notification sent when an attendee responds to + // the event invitation. // - "agenda" - An agenda with the events of the day (sent out in the // morning). + // Required when adding a notification. Type string `json:"type,omitempty"` // ForceSendFields is a list of field names (e.g. "Method") to @@ -1335,7 +1338,9 @@ type Event struct { // OriginalStartTime: For an instance of a recurring event, this is the // time at which this event would start according to the recurrence data - // in the recurring event identified by recurringEventId. Immutable. + // in the recurring event identified by recurringEventId. It uniquely + // identifies the instance within the recurring event series even if the + // instance was moved to a different time. Immutable. OriginalStartTime *EventDateTime `json:"originalStartTime,omitempty"` // PrivateCopy: Whether this is a private event copy where changes are @@ -1469,7 +1474,7 @@ type EventCreator struct { // Email: The creator's email address, if available. Email string `json:"email,omitempty"` - // Id: The creator's Profile ID, if available. It corresponds to theid + // Id: The creator's Profile ID, if available. It corresponds to the id // field in the People collection of the Google+ API Id string `json:"id,omitempty"` @@ -1600,8 +1605,8 @@ type EventOrganizer struct { // valid email address as per RFC5322. Email string `json:"email,omitempty"` - // Id: The organizer's Profile ID, if available. It corresponds to theid - // field in the People collection of the Google+ API + // Id: The organizer's Profile ID, if available. It corresponds to the + // id field in the People collection of the Google+ API Id string `json:"id,omitempty"` // Self: Whether the organizer corresponds to the calendar on which this @@ -1711,7 +1716,9 @@ type EventAttachment struct { // FileUrl: URL link to the attachment. // For adding Google Drive file attachments use the same format as in - // alternateLink property of the Files resource in the Drive API. + // alternateLink property of the Files resource in the Drive + // API. + // Required when adding an attachment. FileUrl string `json:"fileUrl,omitempty"` // IconLink: URL link to the attachment's icon. Read-only. @@ -1760,9 +1767,10 @@ type EventAttendee struct { // Email: The attendee's email address, if available. This field must be // present when adding an attendee. It must be a valid email address as // per RFC5322. + // Required when adding an attendee. Email string `json:"email,omitempty"` - // Id: The attendee's Profile ID, if available. It corresponds to theid + // Id: The attendee's Profile ID, if available. It corresponds to the id // field in the People collection of the Google+ API Id string `json:"id,omitempty"` @@ -1864,11 +1872,13 @@ type EventReminder struct { // Suite customers. Requests to set SMS reminders for other account // types are ignored. // - "popup" - Reminders are sent via a UI popup. + // Required when adding a reminder. Method string `json:"method,omitempty"` // Minutes: Number of minutes before the start of the event when the // reminder should trigger. Valid values are between 0 and 40320 (4 // weeks in minutes). + // Required when adding a reminder. Minutes int64 `json:"minutes,omitempty"` // ForceSendFields is a list of field names (e.g. "Method") to @@ -2040,21 +2050,23 @@ func (s *FreeBusyGroup) MarshalJSON() ([]byte, error) { type FreeBusyRequest struct { // CalendarExpansionMax: Maximal number of calendars for which FreeBusy - // information is to be provided. Optional. + // information is to be provided. Optional. Maximum value is 50. CalendarExpansionMax int64 `json:"calendarExpansionMax,omitempty"` // GroupExpansionMax: Maximal number of calendar identifiers to be - // provided for a single group. Optional. An error will be returned for - // a group with more members than this value. + // provided for a single group. Optional. An error is returned for a + // group with more members than this value. Maximum value is 100. GroupExpansionMax int64 `json:"groupExpansionMax,omitempty"` // Items: List of calendars and/or groups to query. Items []*FreeBusyRequestItem `json:"items,omitempty"` - // TimeMax: The end of the interval for the query. + // TimeMax: The end of the interval for the query formatted as per + // RFC3339. TimeMax string `json:"timeMax,omitempty"` - // TimeMin: The start of the interval for the query. + // TimeMin: The start of the interval for the query formatted as per + // RFC3339. TimeMin string `json:"timeMin,omitempty"` // TimeZone: Time zone used in the response. Optional. The default is @@ -3418,7 +3430,7 @@ type CalendarListDeleteCall struct { header_ http.Header } -// Delete: Deletes an entry on the user's calendar list. +// Delete: Removes a calendar from the user's calendar list. func (r *CalendarListService) Delete(calendarId string) *CalendarListDeleteCall { c := &CalendarListDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.calendarId = calendarId @@ -3481,7 +3493,7 @@ func (c *CalendarListDeleteCall) Do(opts ...googleapi.CallOption) error { } return nil // { - // "description": "Deletes an entry on the user's calendar list.", + // "description": "Removes a calendar from the user's calendar list.", // "httpMethod": "DELETE", // "id": "calendar.calendarList.delete", // "parameterOrder": [ @@ -3514,7 +3526,7 @@ type CalendarListGetCall struct { header_ http.Header } -// Get: Returns an entry on the user's calendar list. +// Get: Returns a calendar from the user's calendar list. func (r *CalendarListService) Get(calendarId string) *CalendarListGetCall { c := &CalendarListGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.calendarId = calendarId @@ -3615,7 +3627,7 @@ func (c *CalendarListGetCall) Do(opts ...googleapi.CallOption) (*CalendarListEnt } return ret, nil // { - // "description": "Returns an entry on the user's calendar list.", + // "description": "Returns a calendar from the user's calendar list.", // "httpMethod": "GET", // "id": "calendar.calendarList.get", // "parameterOrder": [ @@ -3651,7 +3663,7 @@ type CalendarListInsertCall struct { header_ http.Header } -// Insert: Adds an entry to the user's calendar list. +// Insert: Inserts an existing calendar into the user's calendar list. func (r *CalendarListService) Insert(calendarlistentry *CalendarListEntry) *CalendarListInsertCall { c := &CalendarListInsertCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.calendarlistentry = calendarlistentry @@ -3751,7 +3763,7 @@ func (c *CalendarListInsertCall) Do(opts ...googleapi.CallOption) (*CalendarList } return ret, nil // { - // "description": "Adds an entry to the user's calendar list.", + // "description": "Inserts an existing calendar into the user's calendar list.", // "httpMethod": "POST", // "id": "calendar.calendarList.insert", // "parameters": { @@ -3785,7 +3797,7 @@ type CalendarListListCall struct { header_ http.Header } -// List: Returns entries on the user's calendar list. +// List: Returns the calendars on the user's calendar list. func (r *CalendarListService) List() *CalendarListListCall { c := &CalendarListListCall{s: r.s, urlParams_: make(gensupport.URLParams)} return c @@ -3947,7 +3959,7 @@ func (c *CalendarListListCall) Do(opts ...googleapi.CallOption) (*CalendarList, } return ret, nil // { - // "description": "Returns entries on the user's calendar list.", + // "description": "Returns the calendars on the user's calendar list.", // "httpMethod": "GET", // "id": "calendar.calendarList.list", // "parameters": { @@ -4041,8 +4053,8 @@ type CalendarListPatchCall struct { header_ http.Header } -// Patch: Updates an entry on the user's calendar list. This method -// supports patch semantics. +// Patch: Updates an existing calendar on the user's calendar list. This +// method supports patch semantics. func (r *CalendarListService) Patch(calendarId string, calendarlistentry *CalendarListEntry) *CalendarListPatchCall { c := &CalendarListPatchCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.calendarId = calendarId @@ -4146,7 +4158,7 @@ func (c *CalendarListPatchCall) Do(opts ...googleapi.CallOption) (*CalendarListE } return ret, nil // { - // "description": "Updates an entry on the user's calendar list. This method supports patch semantics.", + // "description": "Updates an existing calendar on the user's calendar list. This method supports patch semantics.", // "httpMethod": "PATCH", // "id": "calendar.calendarList.patch", // "parameterOrder": [ @@ -4190,7 +4202,7 @@ type CalendarListUpdateCall struct { header_ http.Header } -// Update: Updates an entry on the user's calendar list. +// Update: Updates an existing calendar on the user's calendar list. func (r *CalendarListService) Update(calendarId string, calendarlistentry *CalendarListEntry) *CalendarListUpdateCall { c := &CalendarListUpdateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.calendarId = calendarId @@ -4294,7 +4306,7 @@ func (c *CalendarListUpdateCall) Do(opts ...googleapi.CallOption) (*CalendarList } return ret, nil // { - // "description": "Updates an entry on the user's calendar list.", + // "description": "Updates an existing calendar on the user's calendar list.", // "httpMethod": "PUT", // "id": "calendar.calendarList.update", // "parameterOrder": [ diff --git a/vendor/google.golang.org/api/sheets/v4/sheets-api.json b/vendor/google.golang.org/api/sheets/v4/sheets-api.json index 963d1730..e1db22a4 100644 --- a/vendor/google.golang.org/api/sheets/v4/sheets-api.json +++ b/vendor/google.golang.org/api/sheets/v4/sheets-api.json @@ -808,7 +808,7 @@ } } }, - "revision": "20180611", + "revision": "20180727", "rootUrl": "https://sheets.googleapis.com/", "schemas": { "AddBandingRequest": { @@ -2399,7 +2399,7 @@ "type": "object" }, "Color": { - "description": "Represents a color in the RGBA color space. This representation is designed\nfor simplicity of conversion to/from color representations in various\nlanguages over compactness; for example, the fields of this representation\ncan be trivially provided to the constructor of \"java.awt.Color\" in Java; it\ncan also be trivially provided to UIColor's \"+colorWithRed:green:blue:alpha\"\nmethod in iOS; and, with just a little work, it can be easily formatted into\na CSS \"rgba()\" string in JavaScript, as well. Here are some examples:\n\nExample (Java):\n\n import com.google.type.Color;\n\n // ...\n public static java.awt.Color fromProto(Color protocolor) {\n float alpha = protocolor.hasAlpha()\n ? protocolor.getAlpha().getValue()\n : 1.0;\n\n return new java.awt.Color(\n protocolor.getRed(),\n protocolor.getGreen(),\n protocolor.getBlue(),\n alpha);\n }\n\n public static Color toProto(java.awt.Color color) {\n float red = (float) color.getRed();\n float green = (float) color.getGreen();\n float blue = (float) color.getBlue();\n float denominator = 255.0;\n Color.Builder resultBuilder =\n Color\n .newBuilder()\n .setRed(red / denominator)\n .setGreen(green / denominator)\n .setBlue(blue / denominator);\n int alpha = color.getAlpha();\n if (alpha != 255) {\n result.setAlpha(\n FloatValue\n .newBuilder()\n .setValue(((float) alpha) / denominator)\n .build());\n }\n return resultBuilder.build();\n }\n // ...\n\nExample (iOS / Obj-C):\n\n // ...\n static UIColor* fromProto(Color* protocolor) {\n float red = [protocolor red];\n float green = [protocolor green];\n float blue = [protocolor blue];\n FloatValue* alpha_wrapper = [protocolor alpha];\n float alpha = 1.0;\n if (alpha_wrapper != nil) {\n alpha = [alpha_wrapper value];\n }\n return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];\n }\n\n static Color* toProto(UIColor* color) {\n CGFloat red, green, blue, alpha;\n if (![color getRed:\u0026red green:\u0026green blue:\u0026blue alpha:\u0026alpha]) {\n return nil;\n }\n Color* result = [Color alloc] init];\n [result setRed:red];\n [result setGreen:green];\n [result setBlue:blue];\n if (alpha \u003c= 0.9999) {\n [result setAlpha:floatWrapperWithValue(alpha)];\n }\n [result autorelease];\n return result;\n }\n // ...\n\n Example (JavaScript):\n\n // ...\n\n var protoToCssColor = function(rgb_color) {\n var redFrac = rgb_color.red || 0.0;\n var greenFrac = rgb_color.green || 0.0;\n var blueFrac = rgb_color.blue || 0.0;\n var red = Math.floor(redFrac * 255);\n var green = Math.floor(greenFrac * 255);\n var blue = Math.floor(blueFrac * 255);\n\n if (!('alpha' in rgb_color)) {\n return rgbToCssColor_(red, green, blue);\n }\n\n var alphaFrac = rgb_color.alpha.value || 0.0;\n var rgbParams = [red, green, blue].join(',');\n return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');\n };\n\n var rgbToCssColor_ = function(red, green, blue) {\n var rgbNumber = new Number((red \u003c\u003c 16) | (green \u003c\u003c 8) | blue);\n var hexString = rgbNumber.toString(16);\n var missingZeros = 6 - hexString.length;\n var resultBuilder = ['#'];\n for (var i = 0; i \u003c missingZeros; i++) {\n resultBuilder.push('0');\n }\n resultBuilder.push(hexString);\n return resultBuilder.join('');\n };\n\n // ...", + "description": "Represents a color in the RGBA color space. This representation is designed\nfor simplicity of conversion to/from color representations in various\nlanguages over compactness; for example, the fields of this representation\ncan be trivially provided to the constructor of \"java.awt.Color\" in Java; it\ncan also be trivially provided to UIColor's \"+colorWithRed:green:blue:alpha\"\nmethod in iOS; and, with just a little work, it can be easily formatted into\na CSS \"rgba()\" string in JavaScript, as well. Here are some examples:\n\nExample (Java):\n\n import com.google.type.Color;\n\n // ...\n public static java.awt.Color fromProto(Color protocolor) {\n float alpha = protocolor.hasAlpha()\n ? protocolor.getAlpha().getValue()\n : 1.0;\n\n return new java.awt.Color(\n protocolor.getRed(),\n protocolor.getGreen(),\n protocolor.getBlue(),\n alpha);\n }\n\n public static Color toProto(java.awt.Color color) {\n float red = (float) color.getRed();\n float green = (float) color.getGreen();\n float blue = (float) color.getBlue();\n float denominator = 255.0;\n Color.Builder resultBuilder =\n Color\n .newBuilder()\n .setRed(red / denominator)\n .setGreen(green / denominator)\n .setBlue(blue / denominator);\n int alpha = color.getAlpha();\n if (alpha != 255) {\n result.setAlpha(\n FloatValue\n .newBuilder()\n .setValue(((float) alpha) / denominator)\n .build());\n }\n return resultBuilder.build();\n }\n // ...\n\nExample (iOS / Obj-C):\n\n // ...\n static UIColor* fromProto(Color* protocolor) {\n float red = [protocolor red];\n float green = [protocolor green];\n float blue = [protocolor blue];\n FloatValue* alpha_wrapper = [protocolor alpha];\n float alpha = 1.0;\n if (alpha_wrapper != nil) {\n alpha = [alpha_wrapper value];\n }\n return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];\n }\n\n static Color* toProto(UIColor* color) {\n CGFloat red, green, blue, alpha;\n if (![color getRed:\u0026red green:\u0026green blue:\u0026blue alpha:\u0026alpha]) {\n return nil;\n }\n Color* result = [[Color alloc] init];\n [result setRed:red];\n [result setGreen:green];\n [result setBlue:blue];\n if (alpha \u003c= 0.9999) {\n [result setAlpha:floatWrapperWithValue(alpha)];\n }\n [result autorelease];\n return result;\n }\n // ...\n\n Example (JavaScript):\n\n // ...\n\n var protoToCssColor = function(rgb_color) {\n var redFrac = rgb_color.red || 0.0;\n var greenFrac = rgb_color.green || 0.0;\n var blueFrac = rgb_color.blue || 0.0;\n var red = Math.floor(redFrac * 255);\n var green = Math.floor(greenFrac * 255);\n var blue = Math.floor(blueFrac * 255);\n\n if (!('alpha' in rgb_color)) {\n return rgbToCssColor_(red, green, blue);\n }\n\n var alphaFrac = rgb_color.alpha.value || 0.0;\n var rgbParams = [red, green, blue].join(',');\n return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');\n };\n\n var rgbToCssColor_ = function(red, green, blue) {\n var rgbNumber = new Number((red \u003c\u003c 16) | (green \u003c\u003c 8) | blue);\n var hexString = rgbNumber.toString(16);\n var missingZeros = 6 - hexString.length;\n var resultBuilder = ['#'];\n for (var i = 0; i \u003c missingZeros; i++) {\n resultBuilder.push('0');\n }\n resultBuilder.push(hexString);\n return resultBuilder.join('');\n };\n\n // ...", "id": "Color", "properties": { "alpha": { diff --git a/vendor/google.golang.org/api/sheets/v4/sheets-gen.go b/vendor/google.golang.org/api/sheets/v4/sheets-gen.go index 9c23d015..61649011 100644 --- a/vendor/google.golang.org/api/sheets/v4/sheets-gen.go +++ b/vendor/google.golang.org/api/sheets/v4/sheets-gen.go @@ -3185,7 +3185,7 @@ func (s *ClearValuesResponse) MarshalJSON() ([]byte, error) { // alpha:&alpha]) { // return nil; // } -// Color* result = [Color alloc] init]; +// Color* result = [[Color alloc] init]; // [result setRed:red]; // [result setGreen:green]; // [result setBlue:blue];