Python pygments.token.Punctuation() Examples

The following are 8 code examples of pygments.token.Punctuation(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pygments.token , or try the search function .
Example #1
Source File: test_kotlin.py    From pygments with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_can_cope_with_destructuring(lexer):
    fragment = u'val (a, b) = '
    tokens = [
        (Keyword, u'val'),
        (Text, u' '),
        (Punctuation, u'('),
        (Name.Property, u'a'),
        (Punctuation, u','),
        (Text, u' '),
        (Name.Property, u'b'),
        (Punctuation, u')'),
        (Text, u' '),
        (Punctuation, u'='),
        (Text, u' '),
        (Text, u'\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #2
Source File: test_kotlin.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_can_cope_generics_in_destructuring(lexer):
    fragment = u'val (a: List<Something>, b: Set<Wobble>) ='
    tokens = [
        (Keyword, u'val'),
        (Text, u' '),
        (Punctuation, u'('),
        (Name.Property, u'a'),
        (Punctuation, u':'),
        (Text, u' '),
        (Name.Property, u'List'),
        (Punctuation, u'<'),
        (Name, u'Something'),
        (Punctuation, u'>'),
        (Punctuation, u','),
        (Text, u' '),
        (Name.Property, u'b'),
        (Punctuation, u':'),
        (Text, u' '),
        (Name.Property, u'Set'),
        (Punctuation, u'<'),
        (Name, u'Wobble'),
        (Punctuation, u'>'),
        (Punctuation, u')'),
        (Text, u' '),
        (Punctuation, u'='),
        (Text, u'\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #3
Source File: test_kotlin.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_can_cope_with_generics(lexer):
    fragment = u'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
    tokens = [
        (Keyword, u'inline fun'),
        (Text, u' '),
        (Punctuation, u'<'),
        (Keyword, u'reified'),
        (Text, u' '),
        (Name, u'T'),
        (Text, u' '),
        (Punctuation, u':'),
        (Text, u' '),
        (Name, u'ContractState'),
        (Punctuation, u'>'),
        (Text, u' '),
        (Name.Class, u'VaultService'),
        (Punctuation, u'.'),
        (Name.Function, u'queryBy'),
        (Punctuation, u'('),
        (Punctuation, u')'),
        (Punctuation, u':'),
        (Text, u' '),
        (Name, u'Vault'),
        (Punctuation, u'.'),
        (Name, u'Page'),
        (Punctuation, u'<'),
        (Name, u'T'),
        (Punctuation, u'>'),
        (Text, u' '),
        (Punctuation, u'{'),
        (Text, u'\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #4
Source File: test_sql.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_can_reject_almost_float(lexer):
    _assert_tokens_match(lexer, '.e1', ((Punctuation, '.'), (Name, 'e1'))) 
Example #5
Source File: test_basic.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_can_reject_almost_float(lexer):
    assert_tokens_match(lexer, '.e1', ((Punctuation, '.'), (Name, 'e1'))) 
Example #6
Source File: test_r.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_call(lexer):
    fragment = u'f(1, a)\n'
    tokens = [
        (Name.Function, u'f'),
        (Punctuation, u'('),
        (Token.Literal.Number, u'1'),
        (Punctuation, u','),
        (Token.Text, u' '),
        (Token.Name, u'a'),
        (Punctuation, u')'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #7
Source File: test_r.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_indexing(lexer):
    fragment = u'a[1]'
    tokens = [
        (Token.Name, u'a'),
        (Token.Punctuation, u'['),
        (Token.Literal.Number, u'1'),
        (Token.Punctuation, u']'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #8
Source File: test_r.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_dot_indexing(lexer):
    fragment = u'.[1]'
    tokens = [
        (Token.Name, u'.'),
        (Token.Punctuation, u'['),
        (Token.Literal.Number, u'1'),
        (Token.Punctuation, u']'),
        (Token.Text, u'\n'),
    ]
    assert list(lexer.get_tokens(fragment)) == tokens