import functools import importlib import json import os import re import sys import types import pystache from pygments import lexer as pygments_lexer from pygments.token import _TokenType TEMPLATE = r''' package lexers import ( . "github.com/alecthomas/chroma" // nolint "github.com/alecthomas/chroma/lexers/internal" ) // {{upper_name}} lexer. var {{upper_name}} = internal.Register(MustNewLexer( &Config{ Name: "{{name}}", Aliases: []string{ {{#aliases}}"{{.}}", {{/aliases}} }, Filenames: []string{ {{#filenames}}"{{.}}", {{/filenames}} }, MimeTypes: []string{ {{#mimetypes}}"{{.}}", {{/mimetypes}} }, {{#re_not_multiline}} NotMultiline: true, {{/re_not_multiline}} {{#re_dotall}} DotAll: true, {{/re_dotall}} {{#re_ignorecase}} CaseInsensitive: true, {{/re_ignorecase}} }, Rules{ {{#tokens}} "{{state}}": { {{#rules}} {{{.}}}, {{/rules}} }, {{/tokens}} }, )) ''' def go_regex(s): return go_string(s) def go_string(s): if '`' not in s: return '`' + s + '`' return json.dumps(s) def to_camel_case(snake_str): components = snake_str.split('_') return ''.join(x.title() for x in components) def warning(message): print('warning: ' + message, file=sys.stderr) def resolve_emitter(emitter): if isinstance(emitter, types.FunctionType): if repr(emitter).startswith('