Python token.NEWLINE Examples
The following are 30
code examples of token.NEWLINE().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
token
, or try the search function
.
Example #1
Source File: pylint_checker.py From caniusepython3 with Apache License 2.0 | 6 votes |
def process_tokens(self, tokens): # Module docstring can be a native string. # Also use as a flag to notice when __future__ statements are no longer # valid to avoid wasting time checking every NAME token # (which is < STRING). module_start = True line_num = 1 for type_, val, start, end, line in tokens: if type_ in (token.NEWLINE, tokenize.NL): line_num += 1 # Anything else means we are past the first string in the module, # any comments (e.g. shebang), and no more __future__ statements # are possible. if type_ > token.NEWLINE and type_ < token.N_TOKENS: module_start = False elif type_ == token.STRING: line_num += val.count('\n') if not module_start and not val.startswith(('u', 'b')): self.add_message('native-string', line=line_num) elif module_start and type_ == token.NAME: if len(line) >= 39: # Fast-fail check if u'__future__' in line and u'unicode_literals' in line: return
Example #2
Source File: transformer.py From ironpython2 with Apache License 2.0 | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #3
Source File: transformer.py From PokemonGo-DesktopMap with MIT License | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #4
Source File: transformer.py From BinderFilter with MIT License | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #5
Source File: transformer.py From CTFCrackTools-V2 with GNU General Public License v3.0 | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #6
Source File: pygettext.py From oss-ftp with MIT License | 6 votes |
def __openseen(self, ttype, tstring, lineno): if ttype == tokenize.OP and tstring == ')': # We've seen the last of the translatable strings. Record the # line number of the first line of the strings and update the list # of messages seen. Reset state for the next batch. If there # were no strings inside _(), then just ignore this entry. if self.__data: self.__addentry(EMPTYSTRING.join(self.__data)) self.__state = self.__waiting elif ttype == tokenize.STRING: self.__data.append(safe_eval(tstring)) elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]: # warn if we see anything else than STRING or whitespace print >> sys.stderr, _( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': tstring, 'file': self.__curfile, 'lineno': self.__lineno } self.__state = self.__waiting
Example #7
Source File: pygettext.py From odoo13-x64 with GNU General Public License v3.0 | 6 votes |
def __openseen(self, ttype, tstring, lineno): if ttype == tokenize.OP and tstring == ')': # We've seen the last of the translatable strings. Record the # line number of the first line of the strings and update the list # of messages seen. Reset state for the next batch. If there # were no strings inside _(), then just ignore this entry. if self.__data: self.__addentry(EMPTYSTRING.join(self.__data)) self.__state = self.__waiting elif ttype == tokenize.STRING and is_literal_string(tstring): self.__data.append(safe_eval(tstring)) elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]: # warn if we see anything else than STRING or whitespace print(_( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': tstring, 'file': self.__curfile, 'lineno': self.__lineno }, file=sys.stderr) self.__state = self.__waiting
Example #8
Source File: transformer.py From medicare-demo with Apache License 2.0 | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #9
Source File: pygettext.py From HRTunerProxy with GNU General Public License v2.0 | 6 votes |
def __openseen(self, ttype, tstring, lineno): if ttype == tokenize.OP and tstring == ')': # We've seen the last of the translatable strings. Record the # line number of the first line of the strings and update the list # of messages seen. Reset state for the next batch. If there # were no strings inside _(), then just ignore this entry. if self.__data: self.__addentry(EMPTYSTRING.join(self.__data)) self.__state = self.__waiting elif ttype == tokenize.STRING: self.__data.append(safe_eval(tstring)) elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]: # warn if we see anything else than STRING or whitespace print >> sys.stderr, _( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': tstring, 'file': self.__curfile, 'lineno': self.__lineno } self.__state = self.__waiting
Example #10
Source File: test_asttokens.py From asttokens with Apache License 2.0 | 6 votes |
def test_tokenizing(self): # Test that we produce meaningful tokens on initialization. source = "import re # comment\n\nfoo = 'bar'\n" atok = asttokens.ASTTokens(source) self.assertEqual(atok.text, source) self.assertEqual([str(t) for t in atok.tokens], [ "NAME:'import'", "NAME:'re'", "COMMENT:'# comment'", "NEWLINE:'\\n'", "NL:'\\n'", "NAME:'foo'", "OP:'='", 'STRING:"\'bar\'"', "NEWLINE:'\\n'", "ENDMARKER:''" ]) self.assertEqual(atok.tokens[5].type, token.NAME) self.assertEqual(atok.tokens[5].string, 'foo') self.assertEqual(atok.tokens[5].index, 5) self.assertEqual(atok.tokens[5].startpos, 22) self.assertEqual(atok.tokens[5].endpos, 25)
Example #11
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 6 votes |
def decorator(self, nodelist): # '@' dotted_name [ '(' [arglist] ')' ] assert len(nodelist) in (3, 5, 6) assert nodelist[0][0] == token.AT assert nodelist[-1][0] == token.NEWLINE assert nodelist[1][0] == symbol.dotted_name funcname = self.decorator_name(nodelist[1][1:]) if len(nodelist) > 3: assert nodelist[2][0] == token.LPAR expr = self.com_call_function(funcname, nodelist[3]) else: expr = funcname return expr
Example #12
Source File: pygettext.py From datafari with Apache License 2.0 | 6 votes |
def __openseen(self, ttype, tstring, lineno): if ttype == tokenize.OP and tstring == ')': # We've seen the last of the translatable strings. Record the # line number of the first line of the strings and update the list # of messages seen. Reset state for the next batch. If there # were no strings inside _(), then just ignore this entry. if self.__data: self.__addentry(EMPTYSTRING.join(self.__data)) self.__state = self.__waiting elif ttype == tokenize.STRING: self.__data.append(safe_eval(tstring)) elif ttype not in [tokenize.COMMENT, token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL]: # warn if we see anything else than STRING or whitespace print >> sys.stderr, _( '*** %(file)s:%(lineno)s: Seen unexpected token "%(token)s"' ) % { 'token': tstring, 'file': self.__curfile, 'lineno': self.__lineno } self.__state = self.__waiting
Example #13
Source File: transformer.py From CTFCrackTools-V2 with GNU General Public License v3.0 | 5 votes |
def decorators(self, nodelist): # decorators: decorator ([NEWLINE] decorator)* NEWLINE items = [] for dec_nodelist in nodelist: assert dec_nodelist[0] == symbol.decorator items.append(self.decorator(dec_nodelist[1:])) return Decorators(items)
Example #14
Source File: transformer.py From CTFCrackTools-V2 with GNU General Public License v3.0 | 5 votes |
def decorators(self, nodelist): # decorators: decorator ([NEWLINE] decorator)* NEWLINE items = [] for dec_nodelist in nodelist: assert dec_nodelist[0] == symbol.decorator items.append(self.decorator(dec_nodelist[1:])) return Decorators(items)
Example #15
Source File: transformer.py From medicare-demo with Apache License 2.0 | 5 votes |
def file_input(self, nodelist): doc = self.get_docstring(nodelist, symbol.file_input) if doc is not None: i = 1 else: i = 0 stmts = [] for node in nodelist[i:]: if node[0] != token.ENDMARKER and node[0] != token.NEWLINE: self.com_append_stmt(stmts, node) return Module(doc, Stmt(stmts))
Example #16
Source File: transformer.py From medicare-demo with Apache License 2.0 | 5 votes |
def single_input(self, node): ### do we want to do anything about being "interactive" ? # NEWLINE | simple_stmt | compound_stmt NEWLINE n = node[0][0] if n != token.NEWLINE: return self.com_stmt(node[0]) return Pass()
Example #17
Source File: transformer.py From medicare-demo with Apache License 2.0 | 5 votes |
def __init__(self): self._dispatch = {} for value, name in symbol.sym_name.items(): if hasattr(self, name): self._dispatch[value] = getattr(self, name) self._dispatch[token.NEWLINE] = self.com_NEWLINE self._atom_dispatch = {token.LPAR: self.atom_lpar, token.LSQB: self.atom_lsqb, token.LBRACE: self.atom_lbrace, token.BACKQUOTE: self.atom_backquote, token.NUMBER: self.atom_number, token.STRING: self.atom_string, token.NAME: self.atom_name, } self.encoding = None
Example #18
Source File: transformer.py From CTFCrackTools-V2 with GNU General Public License v3.0 | 5 votes |
def suite(self, nodelist): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if len(nodelist) == 1: return self.com_stmt(nodelist[0]) stmts = [] for node in nodelist: if node[0] == symbol.stmt: self.com_append_stmt(stmts, node) return Stmt(stmts) # -------------------------------------------------------------- # # EXPRESSION NODES (invoked by com_node()) #
Example #19
Source File: pygettext.py From HRTunerProxy with GNU General Public License v2.0 | 5 votes |
def __suitedocstring(self, ttype, tstring, lineno): # ignore any intervening noise if ttype == tokenize.STRING: self.__addentry(safe_eval(tstring), lineno, isdocstring=1) self.__state = self.__waiting elif ttype not in (tokenize.NEWLINE, tokenize.INDENT, tokenize.COMMENT): # there was no class docstring self.__state = self.__waiting
Example #20
Source File: transformer.py From PokemonGo-DesktopMap with MIT License | 5 votes |
def __init__(self): self._dispatch = {} for value, name in symbol.sym_name.items(): if hasattr(self, name): self._dispatch[value] = getattr(self, name) self._dispatch[token.NEWLINE] = self.com_NEWLINE self._atom_dispatch = {token.LPAR: self.atom_lpar, token.LSQB: self.atom_lsqb, token.LBRACE: self.atom_lbrace, token.BACKQUOTE: self.atom_backquote, token.NUMBER: self.atom_number, token.STRING: self.atom_string, token.NAME: self.atom_name, } self.encoding = None
Example #21
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def com_NEWLINE(self, *args): # A ';' at the end of a line can make a NEWLINE token appear # here, Render it harmless. (genc discards ('discard', # ('const', xxxx)) Nodes) return Discard(Const(None))
Example #22
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def suite(self, nodelist): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT if len(nodelist) == 1: return self.com_stmt(nodelist[0]) stmts = [] for node in nodelist: if node[0] == symbol.stmt: self.com_append_stmt(stmts, node) return Stmt(stmts) # -------------------------------------------------------------- # # EXPRESSION NODES (invoked by com_node()) #
Example #23
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def decorators(self, nodelist): # decorators: decorator ([NEWLINE] decorator)* NEWLINE items = [] for dec_nodelist in nodelist: assert dec_nodelist[0] == symbol.decorator items.append(self.decorator(dec_nodelist[1:])) return Decorators(items)
Example #24
Source File: transformer.py From PokemonGo-DesktopMap with MIT License | 5 votes |
def single_input(self, node): ### do we want to do anything about being "interactive" ? # NEWLINE | simple_stmt | compound_stmt NEWLINE n = node[0][0] if n != token.NEWLINE: return self.com_stmt(node[0]) return Pass()
Example #25
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def file_input(self, nodelist): doc = self.get_docstring(nodelist, symbol.file_input) if doc is not None: i = 1 else: i = 0 stmts = [] for node in nodelist[i:]: if node[0] != token.ENDMARKER and node[0] != token.NEWLINE: self.com_append_stmt(stmts, node) return Module(doc, Stmt(stmts))
Example #26
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def single_input(self, node): ### do we want to do anything about being "interactive" ? # NEWLINE | simple_stmt | compound_stmt NEWLINE n = node[0][0] if n != token.NEWLINE: return self.com_stmt(node[0]) return Pass()
Example #27
Source File: transformer.py From Splunking-Crime with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self): self._dispatch = {} for value, name in symbol.sym_name.items(): if hasattr(self, name): self._dispatch[value] = getattr(self, name) self._dispatch[token.NEWLINE] = self.com_NEWLINE self._atom_dispatch = {token.LPAR: self.atom_lpar, token.LSQB: self.atom_lsqb, token.LBRACE: self.atom_lbrace, token.BACKQUOTE: self.atom_backquote, token.NUMBER: self.atom_number, token.STRING: self.atom_string, token.NAME: self.atom_name, } self.encoding = None
Example #28
Source File: transformer.py From PokemonGo-DesktopMap with MIT License | 5 votes |
def file_input(self, nodelist): doc = self.get_docstring(nodelist, symbol.file_input) if doc is not None: i = 1 else: i = 0 stmts = [] for node in nodelist[i:]: if node[0] != token.ENDMARKER and node[0] != token.NEWLINE: self.com_append_stmt(stmts, node) return Module(doc, Stmt(stmts))
Example #29
Source File: pygettext.py From datafari with Apache License 2.0 | 5 votes |
def __suitedocstring(self, ttype, tstring, lineno): # ignore any intervening noise if ttype == tokenize.STRING: self.__addentry(safe_eval(tstring), lineno, isdocstring=1) self.__state = self.__waiting elif ttype not in (tokenize.NEWLINE, tokenize.INDENT, tokenize.COMMENT): # there was no class docstring self.__state = self.__waiting
Example #30
Source File: transformer.py From CTFCrackTools-V2 with GNU General Public License v3.0 | 5 votes |
def com_NEWLINE(self, *args): # A ';' at the end of a line can make a NEWLINE token appear # here, Render it harmless. (genc discards ('discard', # ('const', xxxx)) Nodes) return Discard(Const(None))