diff --git a/grammars/python3.cson b/grammars/python3.cson new file mode 100644 index 0000000..92ff6ea --- /dev/null +++ b/grammars/python3.cson @@ -0,0 +1,1399 @@ +scopeName: 'source.python.3' +name: 'Python3' +fileTypes: [ + 'cpy' + 'gyp' + 'gypi' + 'kv' + 'py' + 'pyw' + 'rpy' + 'SConscript' + 'SConstruct' + 'Sconstruct' + 'sconstruct' + 'Snakefile' + 'tac' + 'wsgi' +] +firstLineMatch: '^#!/.*\\bpython[\\d\\.]*\\b' +repository: + builtin_functions: + match: ''' + (?x)\\b( + abs|all|any|ascii|bin|callable|chr|classmethod|compile|delattr|dir|divmod + |enumerate|eval|exec|filter|format|getattr|globals|hasattr|hash|help|hex + |id|input|isinstance|issubclass|iter|len|locals|map|max|min|next|oct|open + |ord|pow|print|property|repr|reversed|round|setattr|sorted|staticmethod + |sum|super|vars|zip|__import__ + )\\b + ''' + name: 'support.function.builtin.python.3' + + builtin_types: + match: ''' + (?x)\\b( + bool|bytearray|bytes|complex|dict|float|frozenset|int|list|memoryview + |object|range|set|slice|str|tuple + )\\b + ''' + name: 'support.type.python.3' + + builtin_exceptions: + match: ''' + (?x)\\b( + (Base)?Exception + |((Is|Not)ADirectory|Arithmetic|Assertion|Attribute|BlockingIO|BrokenPipe + |Buffer|ChildProcess|Connection(Aborted|Refused|Reset)?|EOF + |File(Exists|NotFound)|FloatingPoint|Import|Indentation|Index + |InterruptedError|Key|(Process)?Lookup|Memory|Name|NotImplemented|OS + |Overflow|PermissionError|Recursion|Runtime|Syntax|System|Tab|Timeout + |Type|UnboundLocal|Unicode(Encode|Decode|Translate)?|Value + |ZeroDivision)Error + |(User|(Pending)?Deprecation|Syntax|Runtime|Import|Unicode|Bytes + |Resource)Warning + |GeneratorExit|KeyboradInterrupt|StopIteration|StopAsyncIteration + |SytemExit + )\\b + ''' + name: 'support.type.exception.python.3' + + constant_placeholder_printf: + patterns: [ + { + match: '%%' + name: 'constant.other.placeholder-escape.printf.python.3' + } + { + match: '%(\\(\\w+\\))?#?0?\\-?[ ]?\\+?([0-9]*|\\*)(\\.([0-9]*|\\*))?([hlL][a-z]|[a-z%])' + name: 'constant.other.placeholder.printf.python.3' + } + ] + + constant_placeholder_format: + patterns: [ + { + match: '\\{\\{|\\}\\}' + name: 'constant.other.placeholder-escape.format.python.3' + } + { + match: '\\{([!\\[\\].:\\w ]+)?\\}' + name: 'constant.other.placeholder.format.python.3' + } + ] + + docstrings: + patterns: [ + { + begin: '^\\s*(?=([uU]|[rR][bB]?|[bB][rR]?)?""")' + end: '(?<=""")' + name: 'comment.block.python.3' + patterns: [ + { + include: '#string_quoted_double' + } + ] + } + { + begin: '^\\s*(?=([uU]|[rR][bB]?|[bB][rR]?)?\'\'\')' + end: '(?<=\'\'\')' + name: 'comment.block.python.3' + patterns: [ + { + include: '#string_quoted_single' + } + ] + } + ] + + line_comments: + begin: '(^[ \\t]+)?(?=#)' + beginCaptures: + '1': + name: 'punctuation.whitespace.comment.leading.python.3' + end: '(?!\\G)' + patterns: [ + { + begin: '#' + beginCaptures: + '0': + name: 'punctuation.definition.comment.python.3' + end: '\\n' + name: 'comment.line.number-sign.python.3' + } + ] + + dotted_name: + begin: '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*)' + end: '(?![A-Za-z0-9_\\.])' + patterns: [ + { + begin: '(\\.)(?=[A-Za-z_][A-Za-z0-9_]*)' + end: '(?![A-Za-z0-9_])' + patterns: [ + { + include: '#magic_function_names' + } + { + include: '#magic_variable_names' + } + { + include: '#illegal_names' + } + { + include: '#generic_names' + } + ] + } + { + begin: '(?\\=|\\=\\=|<|>|\\!\\=' + name: 'keyword.operator.comparison.python.3' + } + { + match: '\\+\\=|-\\=|\\*\\=|/\\=|//\\=|%\\=|@=|&\\=|\\|\\=|\\^\\=|>>\\=|<<\\=|\\*\\*\\=' + name: 'keyword.operator.assignment.augmented.python.3' + } + { + match: '\\+|\\-|\\*|\\*\\*|/|//|%|@|<<|>>|&|\\||\\^|~' + name: 'keyword.operator.arithmetic.python.3' + } + { + match: '\\=' + name: 'keyword.operator.assignment.python.3' + } + { + begin: '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9]*\\s*\\:)' + beginCaptures: + '1': + name: 'storage.type.class.python.3' + contentName: 'entity.name.type.class.python.3' + end: '\\s*(:)' + endCaptures: + '1': + name: 'punctuation.section.class.begin.python.3' + name: 'meta.class.old-style.python.3' + patterns: [ + { + include: '#entity_name_class' + } + ] + } + { + begin: '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9]*\\s*\\()' + beginCaptures: + '1': + name: 'storage.type.class.python.3' + end: '(\\))\\s*(?:(\\:)|(.*$\\n?))' + endCaptures: + '1': + name: 'punctuation.definition.inheritance.end.python.3' + '2': + name: 'punctuation.section.class.begin.python.3' + '3': + name: 'invalid.illegal.missing-section-begin.python.3' + name: 'meta.class.python.3' + patterns: [ + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*)' + contentName: 'entity.name.type.class.python.3' + end: '(?![A-Za-z0-9_])' + patterns: [ + { + include: '#entity_name_class' + } + ] + } + { + begin: '(\\()' + beginCaptures: + '1': + name: 'punctuation.definition.inheritance.begin.python.3' + contentName: 'meta.class.inheritance.python.3' + end: '(?=\\)|:)' + patterns: [ + { + begin: '(?<=\\(|,)\\s*' + contentName: 'entity.other.inherited-class.python.3' + end: '\\s*(?:(,)|(?=\\)))' + endCaptures: + '1': + name: 'punctuation.separator.inheritance.python.3' + patterns: [ + { + include: '$self' + } + ] + } + ] + } + ] + } + { + begin: '^\\s*(class)\\s+(?=[a-zA-Z_][a-zA-Z_0-9])' + beginCaptures: + '1': + name: 'storage.type.class.python.3' + end: '(\\()|(\\s*$\\n?|#.*$\\n?)' + endCaptures: + '1': + name: 'punctuation.definition.inheritance.begin.python.3' + '2': + name: 'invalid.illegal.missing-inheritance.python.3' + name: 'meta.class.python.3' + patterns: [ + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*)' + contentName: 'entity.name.type.class.python.3' + end: '(?![A-Za-z0-9_])' + patterns: [ + { + include: '#entity_name_function' + } + ] + } + ] + } + { + begin: '(?<=\\)|\\])\\s*(\\()' + beginCaptures: + '1': + name: 'punctuation.definition.arguments.begin.python.3' + contentName: 'meta.function-call.arguments.python.3' + end: '(\\))' + endCaptures: + '1': + name: 'punctuation.definition.arguments.end.python.3' + name: 'meta.function-call.python.3' + patterns: [ + { + include: '#keyword_arguments' + } + { + include: '$self' + } + ] + } + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\()' + end: '(\\))' + endCaptures: + '1': + name: 'punctuation.definition.arguments.end.python.3' + name: 'meta.function-call.python.3' + patterns: [ + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*\\s*\\()' + end: '(?=\\s*\\()' + patterns: [ + { + include: '#dotted_name' + } + ] + } + { + begin: '(\\()' + beginCaptures: + '1': + name: 'punctuation.definition.arguments.begin.python.3' + contentName: 'meta.function-call.arguments.python.3' + end: '(?=\\))' + patterns: [ + { + include: '#keyword_arguments' + } + { + include: '$self' + } + ] + } + ] + } + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z_0-9]*)*\\s*\\[)' + end: '(\\])' + endCaptures: + '1': + name: 'punctuation.definition.arguments.end.python.3' + name: 'meta.item-access.python.3' + patterns: [ + { + begin: '(?=[A-Za-z_][A-Za-z0-9_]*(?:\\.[A-Za-z_][A-Za-z0-9_]*)*\\s*\\[)' + end: '(?=\\s*\\[)' + patterns: [ + { + include: '#dotted_name' + } + ] + } + { + begin: '(\\[)' + beginCaptures: + '1': + name: 'punctuation.definition.arguments.begin.python.3' + contentName: 'meta.item-access.arguments.python.3' + end: '(?=\\])' + patterns: [ + { + include: '$self' + } + ] + } + ] + } + { + begin: '(?<=\\)|\\])\\s+(\\[)' + beginCaptures: + '1': + name: 'punctuation.definition.arguments.begin.python.3' + contentName: 'meta.item-access.arguments.python.3' + end: '(\\])' + endCaptures: + '1': + name: 'punctuation.definition.arguments.end.python.3' + name: 'meta.item-access.python.3' + patterns: [ + { + include: '$self' + } + ] + } + { + captures: + '1': + name: 'storage.type.function.python.3' + match: '\\b(def|lambda)\\b' + } + { + captures: + '1': + name: 'storage.type.class.python.3' + match: '\\b(class)\\b' + } + { + include: '#line_continuation' + } + { + match: '\\b(None|True|False|Ellipsis|NotImplemented)\\b' + name: 'constant.language.python.3' + } + { + include: '#string_quoted_single' + } + { + include: '#string_quoted_double' + } + { + include: '#dotted_name' + } + { + include: '#language_variables' + } + { + begin: '(\\()' + end: '(\\))' + patterns: [ + { + include: '$self' + } + ] + } + { + captures: + '1': + name: 'punctuation.definition.list.begin.python.3' + '2': + name: 'meta.empty-list.python.3' + '3': + name: 'punctuation.definition.list.end.python.3' + match: '(\\[)(\\s*(\\]))\\b' + } + { + begin: '(\\[)' + beginCaptures: + '1': + name: 'punctuation.definition.list.begin.python.3' + end: '(\\])' + endCaptures: + '1': + name: 'punctuation.definition.list.end.python.3' + name: 'meta.structure.list.python.3' + patterns: [ + { + begin: '(?<=\\[|\\,)\\s*(?![\\],])' + contentName: 'meta.structure.list.item.python.3' + end: '\\s*(?:(,)|(?=\\]))' + endCaptures: + '1': + name: 'punctuation.separator.list.python.3' + patterns: [ + { + include: '$self' + } + ] + } + ] + } + { + captures: + '1': + name: 'punctuation.definition.tuple.begin.python.3' + '2': + name: 'meta.empty-tuple.python.3' + '3': + name: 'punctuation.definition.tuple.end.python.3' + match: '(\\()(\\s*(\\)))' + name: 'meta.structure.tuple.python.3' + } + { + captures: + '1': + name: 'punctuation.definition.dictionary.begin.python.3' + '2': + name: 'meta.empty-dictionary.python.3' + '3': + name: 'punctuation.definition.dictionary.end.python.3' + match: '(\\{)(\\s*(\\}))' + name: 'meta.structure.dictionary.python.3' + } + { + begin: '(\\{)' + beginCaptures: + '1': + name: 'punctuation.definition.dictionary.begin.python.3' + end: '(\\})' + endCaptures: + '1': + name: 'punctuation.definition.dictionary.end.python.3' + name: 'meta.structure.dictionary.python.3' + patterns: [ + { + begin: '(?<=\\{|\\,|^)\\s*(?![\\},])' + contentName: 'meta.structure.dictionary.key.python.3' + end: '\\s*(?:(?=\\})|(\\:))' + endCaptures: + '1': + name: 'punctuation.separator.valuepair.dictionary.python.3' + patterns: [ + { + include: '$self' + } + ] + } + { + begin: '(?<=\\:|^)\\s*' + contentName: 'meta.structure.dictionary.value.python.3' + end: '\\s*(?:(?=\\})|(,))' + endCaptures: + '1': + name: 'punctuation.separator.dictionary.python.3' + patterns: [ + { + include: '$self' + } + ] + } + ] + } +] diff --git a/grammars/regular expressions (python3).cson b/grammars/regular expressions (python3).cson new file mode 100644 index 0000000..0ba0354 --- /dev/null +++ b/grammars/regular expressions (python3).cson @@ -0,0 +1,166 @@ +comment: 'Matches Python\'s regular expression syntax.' +name: 'Regular Expressions (Python)' +scopeName: 'source.regexp.python.3' +fileTypes: [ + 're' +] +foldingStartMarker: '(/\\*|\\{|\\()' +foldingStopMarker: '(\\*/|\\}|\\))' +patterns: [ + { + match: '\\\\[bBAZzG]|\\^|\\$' + name: 'keyword.control.anchor.regexp' + } + { + match: '\\\\[1-9][0-9]?' + name: 'keyword.other.back-reference.regexp' + } + {include: '#character_class'} + {include: '#escaped_char'} + { + match: '[?+*][?+]?|\\{(\\d+,\\d+|\\d+,|,\\d+|\\d+)\\}\\??' + name: 'keyword.operator.quantifier.regexp' + } + { + match: '\\|' + name: 'keyword.operator.or.regexp' + } + { + begin: '\\(\\?\\#' + end: '\\)' + name: 'comment.block.regexp' + } + { + comment: 'We are restrictive in what we allow to go after the comment character to avoid false positives, since the availability of comments depend on regexp flags.' + match: '(?<=^|\\s)#\\s[[a-zA-Z0-9,. \\t?!-:][^\\x{00}-\\x{7F}]]*$' + name: 'comment.line.number-sign.regexp' + } + { + match: '\\(\\?[iLmsux]+\\)' + name: 'keyword.other.option-toggle.regexp' + } + { + match: '(\\()(\\?P=([a-zA-Z_][a-zA-Z_0-9]*\\w*))(\\))' + name: 'keyword.other.back-reference.named.regexp' + } + { + begin: '(\\()((\\?=)|(\\?!)|(\\?<=)|(\\?)|(\\?:))?' + beginCaptures: + '1': + name: 'punctuation.definition.group.regexp' + '3': + name: 'punctuation.definition.group.capture.regexp' + '4': + name: 'entity.name.section.group.regexp' + '5': + name: 'punctuation.definition.group.capture.regexp' + '6': + name: 'punctuation.definition.group.no-capture.regexp' + end: '(\\))' + endCaptures: + '1': + name: 'punctuation.definition.group.regexp' + name: 'meta.group.regexp' + patterns: [ + {include: '$self'} + ] + } +] + +repository: + character_class: + patterns: [ + { + match: '\\\\[wWsSdDhH]|\\.' + name: 'constant.character.character-class.regexp' + } + { + begin: '(\\[)(\\^)?' + beginCaptures: + '1': + name: 'punctuation.definition.character-class.regexp' + '2': + name: 'keyword.operator.negation.regexp' + end: '(\\])' + endCaptures: + '1': + name: 'punctuation.definition.character-class.regexp' + name: 'constant.other.character-class.set.regexp' + patterns: [ + {include: '#character-class'} + {include: '#escaped_char'} + ] + } + ] + + escaped_char: + patterns: [ + { + match:'(\\\\")|(\\\\\')' + name: 'constant.character.escape.quote.regexp' + } + { + match: '\\\\(0[0-7]|[0-7]{3})' + name: 'constant.character.escape.octal.regexp' + } + { + match: '\\\\x[[:xdigit:]]{2}' + name: 'constant.character.escape.hex.regexp' + } + { + match: '\\\\(u[[:xdigit:]]{4}|U[[:xdigit:]]{8})' + name: 'constant.character.escape.unicode.regexp' + } + { + match: '\\\\[.^$*+?|{}()\\[\\]\\\\]' + name: 'constant.character.escape.operator.regexp' + } + { + match: '\\\\[abfnrtv]' + name: 'constant.character.escape.regexp' + } + { + match: '\\\\.' + name: 'invalid.deprecated.backslash-escape.regexp' + } + ] diff --git a/spec/python3-spec.coffee b/spec/python3-spec.coffee new file mode 100644 index 0000000..ca3a8a4 --- /dev/null +++ b/spec/python3-spec.coffee @@ -0,0 +1,336 @@ +describe "Python3 grammar", -> + grammar = null + + beforeEach -> + waitsForPromise -> + atom.packages.activatePackage("language-python") + + runs -> + grammar = atom.grammars.grammarForScopeName("source.python.3") + + it "parses the grammar", -> + expect(grammar).toBeDefined() + expect(grammar.scopeName).toBe "source.python.3" + + it "tokenizes multi-line strings", -> + tokens = grammar.tokenizeLines('"1\\\n2"') + + # Line 0 + expect(tokens[0][0].value).toBe '"' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.unicode.python.3', 'punctuation.definition.string.begin.python.3'] + + expect(tokens[0][1].value).toBe '1' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.unicode.python.3'] + + expect(tokens[0][2].value).toBe '\\' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.unicode.python.3', 'constant.character.escape.newline.python.3'] + + expect(tokens[0][3]).not.toBeDefined() + + # Line 1 + expect(tokens[1][0].value).toBe '2' + expect(tokens[1][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.unicode.python.3'] + + expect(tokens[1][1].value).toBe '"' + expect(tokens[1][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.unicode.python.3', 'punctuation.definition.string.end.python.3'] + + expect(tokens[1][2]).not.toBeDefined() + + it "terminates a single-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines("r'%d(' #foo") + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a single-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines("r'%d[' #foo") + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-unicode.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a double-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines('r"%d(" #foo') + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a double-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines('r"%d[" #foo') + + expect(tokens[0][0].value).toBe 'r' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-unicode.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a bytes single-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines("br'%d(' #foo") + + expect(tokens[0][0].value).toBe 'br' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a bytes single-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines("br'%d[' #foo") + + expect(tokens[0][0].value).toBe 'br' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe "'" + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe "'" + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.single.single-line.raw-bytes.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a bytes double-quoted raw string containing opening parenthesis at closing quote", -> + tokens = grammar.tokenizeLines('br"%d(" #foo') + + expect(tokens[0][0].value).toBe 'br' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '(' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'meta.group.regexp', 'punctuation.definition.group.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates a bytes double-quoted raw string containing opening bracket at closing quote", -> + tokens = grammar.tokenizeLines('br"%d[" #foo') + + expect(tokens[0][0].value).toBe 'br' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'storage.type.string.python.3'] + expect(tokens[0][1].value).toBe '"' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'punctuation.definition.string.begin.python.3'] + expect(tokens[0][2].value).toBe '%d' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'constant.other.placeholder.printf.python.3'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.regexp'] + expect(tokens[0][4].value).toBe '"' + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'string.quoted.double.single-line.raw-bytes.python.3', 'punctuation.definition.string.end.python.3'] + expect(tokens[0][5].value).toBe ' ' + expect(tokens[0][5].scopes).toEqual ['source.python.3'] + expect(tokens[0][6].value).toBe '#' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[0][7].value).toBe 'foo' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'comment.line.number-sign.python.3'] + + it "terminates referencing an item in a list variable after a sequence of a closing and opening bracket", -> + tokens = grammar.tokenizeLines('foo[i[0]][j[0]]') + + expect(tokens[0][0].value).toBe 'foo' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'meta.item-access.python.3'] + expect(tokens[0][1].value).toBe '[' + expect(tokens[0][1].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.begin.python.3'] + expect(tokens[0][2].value).toBe 'i' + expect(tokens[0][2].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'meta.item-access.python.3'] + expect(tokens[0][3].value).toBe '[' + expect(tokens[0][3].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.begin.python.3'] + expect(tokens[0][4].value).toBe '0' + expect(tokens[0][4].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'constant.numeric.integer.decimal.python.3'] + expect(tokens[0][5].value).toBe ']' + expect(tokens[0][5].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.end.python.3'] + expect(tokens[0][6].value).toBe ']' + expect(tokens[0][6].scopes).toEqual ['source.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.end.python.3'] + expect(tokens[0][7].value).toBe '[' + expect(tokens[0][7].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'punctuation.definition.list.begin.python.3'] + expect(tokens[0][8].value).toBe 'j' + expect(tokens[0][8].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'meta.structure.list.item.python.3', 'meta.item-access.python.3'] + expect(tokens[0][9].value).toBe '[' + expect(tokens[0][9].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'meta.structure.list.item.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.begin.python.3'] + expect(tokens[0][10].value).toBe '0' + expect(tokens[0][10].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'meta.structure.list.item.python.3', 'meta.item-access.python.3', 'meta.item-access.arguments.python.3', 'constant.numeric.integer.decimal.python.3'] + expect(tokens[0][11].value).toBe ']' + expect(tokens[0][11].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'meta.structure.list.item.python.3', 'meta.item-access.python.3', 'punctuation.definition.arguments.end.python.3'] + expect(tokens[0][12].value).toBe ']' + expect(tokens[0][12].scopes).toEqual ['source.python.3', 'meta.structure.list.python.3', 'punctuation.definition.list.end.python.3'] + + it "tokenizes properties of self as variables", -> + tokens = grammar.tokenizeLines('self.foo') + + expect(tokens[0][0].value).toBe 'self' + expect(tokens[0][0].scopes).toEqual ['source.python.3', 'variable.language.python.3'] + expect(tokens[0][1].value).toBe '.' + expect(tokens[0][1].scopes).toEqual ['source.python.3'] + expect(tokens[0][2].value).toBe 'foo' + expect(tokens[0][2].scopes).toEqual ['source.python.3'] + + it "tokenizes properties of a variable as variables", -> + tokens = grammar.tokenizeLines('bar.foo') + + expect(tokens[0][0].value).toBe 'bar' + expect(tokens[0][0].scopes).toEqual ['source.python.3'] + expect(tokens[0][1].value).toBe '.' + expect(tokens[0][1].scopes).toEqual ['source.python.3'] + expect(tokens[0][2].value).toBe 'foo' + expect(tokens[0][2].scopes).toEqual ['source.python.3'] + + it "tokenizes comments inside function parameters", -> + {tokens} = grammar.tokenizeLine('def test(arg, # comment') + + expect(tokens[0]).toEqual value: 'def', scopes: ['source.python.3', 'meta.function.python.3', 'storage.type.function.python.3'] + expect(tokens[2]).toEqual value: 'test', scopes: ['source.python.3', 'meta.function.python.3', 'entity.name.function.python.3'] + expect(tokens[3]).toEqual value: '(', scopes: ['source.python.3', 'meta.function.python.3', 'punctuation.definition.parameters.begin.python.3'] + expect(tokens[4]).toEqual value: 'arg', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'variable.parameter.function.python.3'] + expect(tokens[5]).toEqual value: ',', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'punctuation.separator.parameters.python.3'] + expect(tokens[7]).toEqual value: '#', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[8]).toEqual value: ' comment', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'comment.line.number-sign.python.3'] + + tokens = grammar.tokenizeLines(""" + def __init__( + self, + codec, # comment + config + ): + """) + + expect(tokens[0][0]).toEqual value: 'def', scopes: ['source.python.3', 'meta.function.python.3', 'storage.type.function.python.3'] + expect(tokens[0][2]).toEqual value: '__init__', scopes: ['source.python.3', 'meta.function.python.3', 'entity.name.function.python.3', 'support.function.magic.python.3'] + expect(tokens[0][3]).toEqual value: '(', scopes: ['source.python.3', 'meta.function.python.3', 'punctuation.definition.parameters.begin.python.3'] + expect(tokens[1][1]).toEqual value: 'self', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'variable.parameter.function.python.3'] + expect(tokens[1][2]).toEqual value: ',', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'punctuation.separator.parameters.python.3'] + expect(tokens[2][1]).toEqual value: 'codec', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'variable.parameter.function.python.3'] + expect(tokens[2][2]).toEqual value: ',', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'punctuation.separator.parameters.python.3'] + expect(tokens[2][4]).toEqual value: '#', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'comment.line.number-sign.python.3', 'punctuation.definition.comment.python.3'] + expect(tokens[2][5]).toEqual value: ' comment', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'comment.line.number-sign.python.3'] + expect(tokens[3][1]).toEqual value: 'config', scopes: ['source.python.3', 'meta.function.python.3', 'meta.function.parameters.python.3', 'variable.parameter.function.python.3'] + expect(tokens[4][0]).toEqual value: ')', scopes: ['source.python.3', 'meta.function.python.3', 'punctuation.definition.parameters.end.python.3'] + expect(tokens[4][1]).toEqual value: ':', scopes: ['source.python.3', 'meta.function.python.3', 'punctuation.section.function.begin.python.3'] + + + # it "tokenizes SQL inline highlighting on blocks", -> + # delimsByScope = + # "string.quoted.double.block.sql.python.3": '"""' + # "string.quoted.single.block.sql.python.3": "'''" + # + # for scope, delim in delimsByScope + # tokens = grammar.tokenizeLines( + # delim + + # 'SELECT bar + # FROM foo' + # + delim + # ) + # + # expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python.3', scope, 'punctuation.definition.string.begin.python.3'] + # expect(tokens[1][0]).toEqual value: 'SELECT bar', scopes: ['source.python.3', scope] + # expect(tokens[2][0]).toEqual value: 'FROM foo', scopes: ['source.python.3', scope] + # expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python.3', scope, 'punctuation.definition.string.end.python.3'] + # + # it "tokenizes SQL inline highlighting on blocks with a CTE", -> + # delimsByScope = + # "string.quoted.double.block.sql.python.3": '"""' + # "string.quoted.single.block.sql.python.3": "'''" + # + # for scope, delim of delimsByScope + # tokens = grammar.tokenizeLines(""" + # #{delim} + # WITH example_cte AS ( + # SELECT bar + # FROM foo + # GROUP BY bar + # ) + # + # SELECT COUNT(*) + # FROM example_cte + # #{delim} + # """) + # + # expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python.3', scope, 'punctuation.definition.string.begin.python.3'] + # expect(tokens[1][0]).toEqual value: 'WITH example_cte AS (', scopes: ['source.python.3', scope] + # expect(tokens[2][0]).toEqual value: 'SELECT bar', scopes: ['source.python.3', scope] + # expect(tokens[3][0]).toEqual value: 'FROM foo', scopes: ['source.python.3', scope] + # expect(tokens[4][0]).toEqual value: 'GROUP BY bar', scopes: ['source.python.3', scope] + # expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python.3', scope] + # expect(tokens[6][0]).toEqual value: '', scopes: ['source.python.3', scope] + # expect(tokens[7][0]).toEqual value: 'SELECT COUNT(*)', scopes: ['source.python.3', scope] + # expect(tokens[8][0]).toEqual value: 'FROM example_cte', scopes: ['source.python.3', scope] + # expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python.3', scope, 'punctuation.definition.string.end.python.3'] + # + # it "tokenizes SQL inline highlighting on single line with a CTE", -> + # + # {tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'') + # + # expect(tokens[0]).toEqual value: '\'', scopes: ['source.python.3', 'string.quoted.single.single-line.python.3', 'punctuation.definition.string.begin.python.3'] + # expect(tokens[1]).toEqual value: 'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte', scopes: ['source.python.3', 'string.quoted.single.single-line.python.3'] + # expect(tokens[2]).toEqual value: '\'', scopes: ['source.python.3', 'string.quoted.single.single-line.python.3', 'punctuation.definition.string.end.python.3']