Python pygments.token.String() Examples

The following are 7 code examples of pygments.token.String(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pygments.token , or try the search function .
Example #1
Source File: execution.py    From http-prompt with MIT License 6 votes vote down vote up
def visit_ls(self, node, children):
        path = urlparse(self.context_override.url).path
        path = filter(None, path.split('/'))
        nodes = self.context.root.ls(*path)
        if self.output.isatty():
            names = []
            for node in nodes:
                token_type = String if node.data.get('type') == 'dir' else Name
                name = self._colorize(node.name, token_type)
                names.append(name)
            lines = list(colformat(list(names)))
        else:
            lines = [n.name for n in nodes]
        if lines:
            self.output.write('\n'.join(lines))
        return node 
Example #2
Source File: test_kotlin.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_should_cope_with_multiline_comments(lexer):
    fragment = u'"""\nthis\nis\na\ncomment"""'
    tokens = [
        (String, u'"""\nthis\nis\na\ncomment"""'),
        (Text, u'\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens 
Example #3
Source File: test_token.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_tokentype():
    t = token.String
    assert t.split() == [token.Token, token.Literal, token.String]
    assert t.__class__ is token._TokenType 
Example #4
Source File: test_token.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_functions():
    assert token.is_token_subtype(token.String, token.String)
    assert token.is_token_subtype(token.String, token.Literal)
    assert not token.is_token_subtype(token.Literal, token.String)

    assert token.string_to_tokentype(token.String) is token.String
    assert token.string_to_tokentype('') is token.Token
    assert token.string_to_tokentype('String') is token.String 
Example #5
Source File: test_token.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_copying():
    # Token instances are supposed to be singletons, so copying or even
    # deepcopying should return themselves
    t = token.String
    assert t is copy.copy(t)
    assert t is copy.deepcopy(t) 
Example #6
Source File: test_using_api.py    From pygments with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_basic():
    expected = [(Text, 'a'), (String, '"'), (Keyword, 'bcd'),
                (String, '"'), (Text, 'e\n')]
    assert list(MyLexer().get_tokens('a"bcd"e')) == expected 
Example #7
Source File: lexer.py    From http-prompt with MIT License 5 votes vote down vote up
def string_rules(state):
    return [
        (r'(")((?:[^\r\n"\\]|(?:\\.))+)(")',
         bygroups(Text, String, Text), state),

        (r'(")((?:[^\r\n"\\]|(?:\\.))+)', bygroups(Text, String), state),

        (r"(')((?:[^\r\n'\\]|(?:\\.))+)(')",
         bygroups(Text, String, Text), state),

        (r"(')((?:[^\r\n'\\]|(?:\\.))+)", bygroups(Text, String), state),

        (r'([^\s\'\\]|(\\.))+', String, state)
    ]