Python sys.maxunicode() Examples
The following are 30
code examples of sys.maxunicode().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sys
, or try the search function
.
Example #1
Source File: test_builtin.py From oss-ftp with MIT License | 6 votes |
def test_hasattr(self): import sys self.assertTrue(hasattr(sys, 'stdout')) self.assertRaises(TypeError, hasattr, sys, 1) self.assertRaises(TypeError, hasattr) if have_unicode: self.assertRaises(UnicodeError, hasattr, sys, unichr(sys.maxunicode)) # Check that hasattr allows SystemExit and KeyboardInterrupts by class A: def __getattr__(self, what): raise KeyboardInterrupt self.assertRaises(KeyboardInterrupt, hasattr, A(), "b") class B: def __getattr__(self, what): raise SystemExit self.assertRaises(SystemExit, hasattr, B(), "b")
Example #2
Source File: test_unicode.py From qgis-cartodb with GNU General Public License v2.0 | 6 votes |
def test_invalid_escape_sequences(self): # incomplete escape sequence self.assertRaises(json.JSONDecodeError, json.loads, '"\\u') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1234') # invalid escape sequence self.assertRaises(json.JSONDecodeError, json.loads, '"\\u123x"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u12x4"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\u1x34"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ux234"') if sys.maxunicode > 65535: # invalid escape sequence for low surrogate self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u000x"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u00x0"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\u0x00"') self.assertRaises(json.JSONDecodeError, json.loads, '"\\ud800\\ux000"')
Example #3
Source File: _pep425.py From oscrypto with MIT License | 6 votes |
def _pep425_get_abi(): """ :return: A unicode string of the system abi. Will be something like: "cp27m", "cp33m", etc. """ try: soabi = sysconfig.get_config_var('SOABI') if soabi: if soabi.startswith('cpython-'): return 'cp%s' % soabi.split('-')[1] return soabi.replace('.', '_').replace('-', '_') except (IOError, NameError): pass impl = _pep425_implementation() suffix = '' if impl == 'cp': suffix += 'm' if sys.maxunicode == 0x10ffff and sys.version_info < (3, 3): suffix += 'u' return '%s%s%s' % (impl, ''.join(map(str_cls, _pep425_version())), suffix)
Example #4
Source File: test_builtinfunc.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_unichr(self): #Added the following to resolve Codeplex WorkItem #3220. max_uni = sys.maxunicode self.assertTrue(max_uni==0xFFFF or max_uni==0x10FFFF) max_uni_plus_one = max_uni + 1 huger_than_max = 100000 max_ok_value = u'\uffff' #special case for WorkItem #3220 if max_uni==0x10FFFF: huger_than_max = 10000000 max_ok_value = u'\U0010FFFF' self.assertRaises(ValueError, unichr, -1) # arg must be in the range [0...65535] or [0...1114111] inclusive self.assertRaises(ValueError, unichr, max_uni_plus_one) self.assertRaises(ValueError, unichr, huger_than_max) self.assertTrue(unichr(0) == '\x00') self.assertTrue(unichr(max_uni) == max_ok_value)
Example #5
Source File: test_builtin.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_hasattr(self): import sys self.assertTrue(hasattr(sys, 'stdout')) self.assertRaises(TypeError, hasattr, sys, 1) self.assertRaises(TypeError, hasattr) if have_unicode: self.assertRaises(UnicodeError, hasattr, sys, unichr(sys.maxunicode)) # Check that hasattr allows SystemExit and KeyboardInterrupts by class A: def __getattr__(self, what): raise KeyboardInterrupt self.assertRaises(KeyboardInterrupt, hasattr, A(), "b") class B: def __getattr__(self, what): raise SystemExit self.assertRaises(SystemExit, hasattr, B(), "b")
Example #6
Source File: test_codeccallbacks.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_backslashescape(self): # Does the same as the "unicode-escape" encoding, but with different # base encodings. sin = u"a\xac\u1234\u20ac\u8000" if sys.maxunicode > 0xffff: sin += unichr(sys.maxunicode) sout = "a\\xac\\u1234\\u20ac\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("ascii", "backslashreplace"), sout) sout = "a\xac\\u1234\\u20ac\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("latin-1", "backslashreplace"), sout) sout = "a\xac\\u1234\xa4\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("iso-8859-15", "backslashreplace"), sout)
Example #7
Source File: test_codecs.py From ironpython2 with Apache License 2.0 | 6 votes |
def test_bug1251300(self): # Decoding with unicode_internal used to not correctly handle "code # points" above 0x10ffff on UCS-4 builds. if sys.maxunicode > 0xffff: ok = [ ("\x00\x10\xff\xff", u"\U0010ffff"), ("\x00\x00\x01\x01", u"\U00000101"), ("", u""), ] not_ok = [ "\x7f\xff\xff\xff", "\x80\x00\x00\x00", "\x81\x00\x00\x00", "\x00", "\x00\x00\x00\x00\x00", ] for internal, uni in ok: if sys.byteorder == "little": internal = "".join(reversed(internal)) self.assertEqual(uni, internal.decode("unicode_internal")) for internal in not_ok: if sys.byteorder == "little": internal = "".join(reversed(internal)) self.assertRaises(UnicodeDecodeError, internal.decode, "unicode_internal")
Example #8
Source File: test_builtin.py From BinderFilter with MIT License | 6 votes |
def test_hasattr(self): import sys self.assertTrue(hasattr(sys, 'stdout')) self.assertRaises(TypeError, hasattr, sys, 1) self.assertRaises(TypeError, hasattr) if have_unicode: self.assertRaises(UnicodeError, hasattr, sys, unichr(sys.maxunicode)) # Check that hasattr allows SystemExit and KeyboardInterrupts by class A: def __getattr__(self, what): raise KeyboardInterrupt self.assertRaises(KeyboardInterrupt, hasattr, A(), "b") class B: def __getattr__(self, what): raise SystemExit self.assertRaises(SystemExit, hasattr, B(), "b")
Example #9
Source File: test_unicode.py From BinderFilter with MIT License | 6 votes |
def test_utf8_decode_valid_sequences(self): sequences = [ # single byte ('\x00', u'\x00'), ('a', u'a'), ('\x7f', u'\x7f'), # 2 bytes ('\xc2\x80', u'\x80'), ('\xdf\xbf', u'\u07ff'), # 3 bytes ('\xe0\xa0\x80', u'\u0800'), ('\xed\x9f\xbf', u'\ud7ff'), ('\xee\x80\x80', u'\uE000'), ('\xef\xbf\xbf', u'\uffff'), # 4 bytes ('\xF0\x90\x80\x80', u'\U00010000'), ('\xf4\x8f\xbf\xbf', u'\U0010FFFF') ] for seq, res in sequences: self.assertEqual(seq.decode('utf-8'), res) for ch in map(unichr, range(0, sys.maxunicode)): self.assertEqual(ch, ch.encode('utf-8').decode('utf-8'))
Example #10
Source File: test_codeccallbacks.py From BinderFilter with MIT License | 6 votes |
def test_backslashescape(self): # Does the same as the "unicode-escape" encoding, but with different # base encodings. sin = u"a\xac\u1234\u20ac\u8000" if sys.maxunicode > 0xffff: sin += unichr(sys.maxunicode) sout = "a\\xac\\u1234\\u20ac\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("ascii", "backslashreplace"), sout) sout = "a\xac\\u1234\\u20ac\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("latin-1", "backslashreplace"), sout) sout = "a\xac\\u1234\xa4\\u8000" if sys.maxunicode > 0xffff: sout += "\\U%08x" % sys.maxunicode self.assertEqual(sin.encode("iso-8859-15", "backslashreplace"), sout)
Example #11
Source File: test_codecs.py From BinderFilter with MIT License | 6 votes |
def test_bug1251300(self): # Decoding with unicode_internal used to not correctly handle "code # points" above 0x10ffff on UCS-4 builds. if sys.maxunicode > 0xffff: ok = [ ("\x00\x10\xff\xff", u"\U0010ffff"), ("\x00\x00\x01\x01", u"\U00000101"), ("", u""), ] not_ok = [ "\x7f\xff\xff\xff", "\x80\x00\x00\x00", "\x81\x00\x00\x00", "\x00", "\x00\x00\x00\x00\x00", ] for internal, uni in ok: if sys.byteorder == "little": internal = "".join(reversed(internal)) self.assertEqual(uni, internal.decode("unicode_internal")) for internal in not_ok: if sys.byteorder == "little": internal = "".join(reversed(internal)) self.assertRaises(UnicodeDecodeError, internal.decode, "unicode_internal")
Example #12
Source File: test_codecs.py From BinderFilter with MIT License | 5 votes |
def test_decode_error_attributes(self): if sys.maxunicode > 0xffff: try: "\x00\x00\x00\x00\x00\x11\x11\x00".decode("unicode_internal") except UnicodeDecodeError, ex: self.assertEqual("unicode_internal", ex.encoding) self.assertEqual("\x00\x00\x00\x00\x00\x11\x11\x00", ex.object) self.assertEqual(4, ex.start) self.assertEqual(8, ex.end) else: self.fail()
Example #13
Source File: test_multibytecodec.py From BinderFilter with MIT License | 5 votes |
def test_bug1572832(self): if sys.maxunicode >= 0x10000: myunichr = unichr else: myunichr = lambda x: unichr(0xD7C0+(x>>10)) + unichr(0xDC00+(x&0x3FF)) for x in xrange(0x10000, 0x110000): # Any ISO 2022 codec will cause the segfault myunichr(x).encode('iso_2022_jp', 'ignore')
Example #14
Source File: test_normalization.py From BinderFilter with MIT License | 5 votes |
def unistr(data): data = [int(x, 16) for x in data.split(" ")] for x in data: if x > sys.maxunicode: raise RangeError return u"".join([unichr(x) for x in data])
Example #15
Source File: compute_bleu.py From models with Apache License 2.0 | 5 votes |
def property_chars(self, prefix): return "".join(six.unichr(x) for x in range(sys.maxunicode) if unicodedata.category(six.unichr(x)).startswith(prefix))
Example #16
Source File: compute_bleu.py From models with Apache License 2.0 | 5 votes |
def property_chars(self, prefix): return "".join(six.unichr(x) for x in range(sys.maxunicode) if unicodedata.category(six.unichr(x)).startswith(prefix))
Example #17
Source File: test_codeccallbacks.py From BinderFilter with MIT License | 5 votes |
def test_translatehelper(self): # enhance coverage of: # Objects/unicodeobject.c::unicode_encode_call_errorhandler() # and callers # (Unfortunately the errors argument is not directly accessible # from Python, so we can't test that much) class D(dict): def __getitem__(self, key): raise ValueError self.assertRaises(ValueError, u"\xff".translate, D()) self.assertRaises(TypeError, u"\xff".translate, {0xff: sys.maxunicode+1}) self.assertRaises(TypeError, u"\xff".translate, {0xff: ()})
Example #18
Source File: compute_bleu.py From models with Apache License 2.0 | 5 votes |
def property_chars(self, prefix): return "".join(six.unichr(x) for x in range(sys.maxunicode) if unicodedata.category(six.unichr(x)).startswith(prefix))
Example #19
Source File: pep425tags.py From FuYiSpider with Apache License 2.0 | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #20
Source File: pep425tags.py From FuYiSpider with Apache License 2.0 | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #21
Source File: bleu_hook.py From BERT with Apache License 2.0 | 5 votes |
def property_chars(self, prefix): return "".join(six.unichr(x) for x in range(sys.maxunicode) if unicodedata.category(six.unichr(x)).startswith(prefix))
Example #22
Source File: pep425tags.py From vnpy_crypto with MIT License | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #23
Source File: punctuation_chars.py From faces with GNU General Public License v2.0 | 5 votes |
def unicode_charlists(categories, cp_min=0, cp_max=None): """Return dictionary of Unicode character lists. For each of the `catagories`, an item contains a list with all Unicode characters with `cp_min` <= code-point <= `cp_max` that belong to the category. The default values check every code-point supported by Python (`sys.maxint` is 0x10FFFF in a "wide" build and 0xFFFF in a "narrow" build, i.e. ucs4 and ucs2 respectively). """ # Determine highest code point with one of the given categories # (may shorten the search time considerably if there are many # categories with not too high characters): if cp_max is None: cp_max = max(x for x in xrange(sys.maxunicode+1) if unicodedata.category(unichr(x)) in categories) # print cp_max # => 74867 for unicode_punctuation_categories charlists = {} for cat in categories: charlists[cat] = [unichr(x) for x in xrange(cp_min, cp_max+1) if unicodedata.category(unichr(x)) == cat] return charlists # Character categories in Docutils # --------------------------------
Example #24
Source File: punctuation_chars.py From faces with GNU General Public License v2.0 | 5 votes |
def unicode_charlists(categories, cp_min=0, cp_max=None): """Return dictionary of Unicode character lists. For each of the `catagories`, an item contains a list with all Unicode characters with `cp_min` <= code-point <= `cp_max` that belong to the category. The default values check every code-point supported by Python (`sys.maxint` is 0x10FFFF in a "wide" build and 0xFFFF in a "narrow" build, i.e. ucs4 and ucs2 respectively). """ # Determine highest code point with one of the given categories # (may shorten the search time considerably if there are many # categories with not too high characters): if cp_max is None: cp_max = max(x for x in range(sys.maxunicode+1) if unicodedata.category(chr(x)) in categories) # print cp_max # => 74867 for unicode_punctuation_categories charlists = {} for cat in categories: charlists[cat] = [chr(x) for x in range(cp_min, cp_max+1) if unicodedata.category(chr(x)) == cat] return charlists # Character categories in Docutils # --------------------------------
Example #25
Source File: pep425tags.py From kobo-predict with BSD 2-Clause "Simplified" License | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #26
Source File: pep425tags.py From kobo-predict with BSD 2-Clause "Simplified" License | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #27
Source File: pep425tags.py From jbox with MIT License | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #28
Source File: utils.py From pdm with MIT License | 5 votes |
def get_abi_tag(python_version): # type: (Tuple[int, int]) -> Optional[str] """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy). A replacement for pip._internal.models.pep425tags:get_abi_tag() """ from wheel.pep425tags import get_config_var, get_abbr_impl, get_flag soabi = get_config_var("SOABI") impl = get_abbr_impl() abi = None # type: Optional[str] if not soabi and impl in {"cp", "pp"} and hasattr(sys, "maxunicode"): d = "" m = "" u = "" is_cpython = impl == "cp" if get_flag("Py_DEBUG", lambda: hasattr(sys, "gettotalrefcount"), warn=False): d = "d" if python_version < (3, 8) and get_flag( "WITH_PYMALLOC", lambda: is_cpython, warn=False ): m = "m" if python_version < (3, 3) and get_flag( "Py_UNICODE_SIZE", lambda: sys.maxunicode == 0x10FFFF, expected=4, warn=False, ): u = "u" abi = "%s%s%s%s%s" % (impl, "".join(map(str, python_version)), d, m, u) elif soabi and soabi.startswith("cpython-"): abi = "cp" + soabi.split("-")[1] elif soabi: abi = soabi.replace(".", "_").replace("-", "_") return abi
Example #29
Source File: pep425tags.py From Python24 with MIT License | 5 votes |
def get_abi_tag(): """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') impl = get_abbr_impl() if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'): d = '' m = '' u = '' if get_flag('Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'), warn=(impl == 'cp')): d = 'd' if get_flag('WITH_PYMALLOC', lambda: impl == 'cp', warn=(impl == 'cp')): m = 'm' if get_flag('Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff, expected=4, warn=(impl == 'cp' and sys.version_info < (3, 3))) \ and sys.version_info < (3, 3): u = 'u' abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) elif soabi and soabi.startswith('cpython-'): abi = 'cp' + soabi.split('-')[1] elif soabi: abi = soabi.replace('.', '_').replace('-', '_') else: abi = None return abi
Example #30
Source File: bleu_hook.py From tensor2tensor with Apache License 2.0 | 5 votes |
def property_chars(self, prefix): return "".join(six.unichr(x) for x in range(sys.maxunicode) if unicodedata.category(six.unichr(x)).startswith(prefix))