Python pygments.token.Number.Integer() Examples
The following are 13
code examples of pygments.token.Number.Integer().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
pygments.token.Number
, or try the search function
.
Example #1
Source File: test_ezhil.py From pygments with BSD 2-Clause "Simplified" License | 6 votes |
def test_gcd_expr(lexer): fragment = u'1^3+(5-5)*gcd(a,b)\n' tokens = [ (Token.Number.Integer, u'1'), (Token.Operator, u'^'), (Token.Literal.Number.Integer, u'3'), (Token.Operator, u'+'), (Token.Punctuation, u'('), (Token.Literal.Number.Integer, u'5'), (Token.Operator, u'-'), (Token.Literal.Number.Integer, u'5'), (Token.Punctuation, u')'), (Token.Operator, u'*'), (Token.Name, u'gcd'), (Token.Punctuation, u'('), (Token.Name, u'a'), (Token.Operator, u','), (Token.Name, u'b'), (Token.Punctuation, u')'), (Token.Text, u'\n') ] assert list(lexer.get_tokens(fragment)) == tokens
Example #2
Source File: test_sql.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_can_lex_integer(lexer): _assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
Example #3
Source File: test_ezhil.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_sum(lexer): fragment = u'1+3\n' tokens = [ (Number.Integer, u'1'), (Operator, u'+'), (Number.Integer, u'3'), (Text, u'\n'), ] assert list(lexer.get_tokens(fragment)) == tokens
Example #4
Source File: test_ezhil.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_if_statement(lexer): fragment = u"""@( 0 > 3 ) ஆனால் பதிப்பி "wont print" முடி""" tokens = [ (Token.Operator, u'@'), (Token.Punctuation, u'('), (Token.Text, u' '), (Token.Literal.Number.Integer, u'0'), (Token.Text, u' '), (Token.Operator, u'>'), (Token.Text, u' '), (Token.Literal.Number.Integer, u'3'), (Token.Text, u' '), (Token.Punctuation, u')'), (Token.Text, u' '), (Token.Keyword, u'ஆனால்'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'பதிப்பி'), (Token.Text, u' '), (Token.Literal.String, u'"wont print"'), (Token.Text, u'\n'), (Token.Keyword, u'முடி'), (Token.Text, u'\n') ] assert list(lexer.get_tokens(fragment)) == tokens
Example #5
Source File: test_ruby.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_range_syntax1(lexer): fragment = u'1..3\n' tokens = [ (Number.Integer, u'1'), (Operator, u'..'), (Number.Integer, u'3'), (Text, u'\n'), ] assert list(lexer.get_tokens(fragment)) == tokens
Example #6
Source File: test_ruby.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_range_syntax3(lexer): fragment = u'1 .. 3\n' tokens = [ (Number.Integer, u'1'), (Text, u' '), (Operator, u'..'), (Text, u' '), (Number.Integer, u'3'), (Text, u'\n'), ] assert list(lexer.get_tokens(fragment)) == tokens
Example #7
Source File: test_ruby.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_operator_methods(lexer): fragment = u'x.==4\n' tokens = [ (Token.Name, u'x'), (Token.Operator, u'.'), (Token.Name.Operator, u'=='), (Token.Literal.Number.Integer, u'4'), (Token.Text, u'\n'), ] assert list(lexer.get_tokens(fragment)) == tokens
Example #8
Source File: test_basic.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_can_lex_integer(lexer): assert_are_tokens_of_type(lexer, '1 23 456', Number.Integer)
Example #9
Source File: test_java.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_numeric_literals(lexer): fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0' fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n' tokens = [ (Number.Integer, '0'), (Text, ' '), (Number.Integer, '5L'), (Text, ' '), (Number.Integer, '9__542_72l'), (Text, ' '), (Number.Hex, '0xbEEf'), (Text, ' '), (Number.Hex, '0X9_A'), (Text, ' '), (Number.Oct, '0_35'), (Text, ' '), (Number.Oct, '01'), (Text, ' '), (Number.Bin, '0b0___101_0'), (Text, ' '), (Number.Float, '0.'), (Text, ' '), (Number.Float, '.7_17F'), (Text, ' '), (Number.Float, '3e-1_3d'), (Text, ' '), (Number.Float, '1f'), (Text, ' '), (Number.Float, '6_01.9e+3'), (Text, ' '), (Number.Float, '0x.1Fp3'), (Text, ' '), (Number.Float, '0XEP8D'), (Text, '\n') ] assert list(lexer.get_tokens(fragment)) == tokens
Example #10
Source File: test_clexer.py From pygments with BSD 2-Clause "Simplified" License | 5 votes |
def test_label_followed_by_statement(lexer): fragment = u'''\ int main() { foo:return 0; goto foo; } ''' tokens = [ (Token.Keyword.Type, u'int'), (Token.Text, u' '), (Token.Name.Function, u'main'), (Token.Punctuation, u'('), (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Punctuation, u'{'), (Token.Text, u'\n'), (Token.Name.Label, u'foo'), (Token.Punctuation, u':'), (Token.Keyword, u'return'), (Token.Text, u' '), (Token.Literal.Number.Integer, u'0'), (Token.Punctuation, u';'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'goto'), (Token.Text, u' '), (Token.Name, u'foo'), (Token.Punctuation, u';'), (Token.Text, u'\n'), (Token.Punctuation, u'}'), (Token.Text, u'\n'), ] assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
Example #11
Source File: test_ruby.py From pygments with BSD 2-Clause "Simplified" License | 4 votes |
def test_interpolation_nested_curly(lexer): fragment = ( u'"A#{ (3..5).group_by { |x| x/2}.map ' u'do |k,v| "#{k}" end.join }" + "Z"\n') tokens = [ (Token.Literal.String.Double, u'"'), (Token.Literal.String.Double, u'A'), (Token.Literal.String.Interpol, u'#{'), (Token.Text, u' '), (Token.Punctuation, u'('), (Token.Literal.Number.Integer, u'3'), (Token.Operator, u'..'), (Token.Literal.Number.Integer, u'5'), (Token.Punctuation, u')'), (Token.Operator, u'.'), (Token.Name, u'group_by'), (Token.Text, u' '), (Token.Literal.String.Interpol, u'{'), (Token.Text, u' '), (Token.Operator, u'|'), (Token.Name, u'x'), (Token.Operator, u'|'), (Token.Text, u' '), (Token.Name, u'x'), (Token.Operator, u'/'), (Token.Literal.Number.Integer, u'2'), (Token.Literal.String.Interpol, u'}'), (Token.Operator, u'.'), (Token.Name, u'map'), (Token.Text, u' '), (Token.Keyword, u'do'), (Token.Text, u' '), (Token.Operator, u'|'), (Token.Name, u'k'), (Token.Punctuation, u','), (Token.Name, u'v'), (Token.Operator, u'|'), (Token.Text, u' '), (Token.Literal.String.Double, u'"'), (Token.Literal.String.Interpol, u'#{'), (Token.Name, u'k'), (Token.Literal.String.Interpol, u'}'), (Token.Literal.String.Double, u'"'), (Token.Text, u' '), (Token.Keyword, u'end'), (Token.Operator, u'.'), (Token.Name, u'join'), (Token.Text, u' '), (Token.Literal.String.Interpol, u'}'), (Token.Literal.String.Double, u'"'), (Token.Text, u' '), (Token.Operator, u'+'), (Token.Text, u' '), (Token.Literal.String.Double, u'"'), (Token.Literal.String.Double, u'Z'), (Token.Literal.String.Double, u'"'), (Token.Text, u'\n'), ] assert list(lexer.get_tokens(fragment)) == tokens
Example #12
Source File: test_clexer.py From pygments with BSD 2-Clause "Simplified" License | 4 votes |
def test_switch(lexer): fragment = u'''\ int main() { switch (0) { case 0: default: ; } } ''' tokens = [ (Token.Keyword.Type, u'int'), (Token.Text, u' '), (Token.Name.Function, u'main'), (Token.Punctuation, u'('), (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Punctuation, u'{'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'switch'), (Token.Text, u' '), (Token.Punctuation, u'('), (Token.Literal.Number.Integer, u'0'), (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u'{'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'case'), (Token.Text, u' '), (Token.Literal.Number.Integer, u'0'), (Token.Operator, u':'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'default'), (Token.Operator, u':'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u';'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u'}'), (Token.Text, u'\n'), (Token.Punctuation, u'}'), (Token.Text, u'\n'), ] assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens
Example #13
Source File: test_clexer.py From pygments with BSD 2-Clause "Simplified" License | 4 votes |
def test_switch_space_before_colon(lexer): fragment = u'''\ int main() { switch (0) { case 0 : default : ; } } ''' tokens = [ (Token.Keyword.Type, u'int'), (Token.Text, u' '), (Token.Name.Function, u'main'), (Token.Punctuation, u'('), (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Punctuation, u'{'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'switch'), (Token.Text, u' '), (Token.Punctuation, u'('), (Token.Literal.Number.Integer, u'0'), (Token.Punctuation, u')'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u'{'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'case'), (Token.Text, u' '), (Token.Literal.Number.Integer, u'0'), (Token.Text, u' '), (Token.Operator, u':'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Keyword, u'default'), (Token.Text, u' '), (Token.Operator, u':'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u';'), (Token.Text, u'\n'), (Token.Text, u' '), (Token.Punctuation, u'}'), (Token.Text, u'\n'), (Token.Punctuation, u'}'), (Token.Text, u'\n'), ] assert list(lexer.get_tokens(textwrap.dedent(fragment))) == tokens