Python doctest.NORMALIZE_WHITESPACE Examples
The following are 30
code examples of doctest.NORMALIZE_WHITESPACE().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
doctest
, or try the search function
.
Example #1
Source File: test_tutorials.py From sqlalchemy with MIT License | 7 votes |
def _run_doctest_for_content(self, name, content): optionflags = ( doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.IGNORE_EXCEPTION_DETAIL | _get_allow_unicode_flag() ) runner = doctest.DocTestRunner( verbose=None, optionflags=optionflags, checker=_get_unicode_checker(), ) globs = {"print_function": print_function} parser = doctest.DocTestParser() test = parser.get_doctest(content, globs, name, name, 0) runner.run(test) runner.summarize() assert not runner.failures
Example #2
Source File: documenter.py From schedula with European Union Public License 1.1 | 6 votes |
def _import_docstring(documenter): code_content = _import_docstring_code_content(documenter) if code_content: # noinspection PyBroadException try: code, content = code_content parser = DocTestParser() runner = DocTestRunner(verbose=0, optionflags=NORMALIZE_WHITESPACE | ELLIPSIS) glob = {} if documenter.modname: exec('from %s import *\n' % documenter.modname, glob) tests = parser.get_doctest(code, glob, '', '', 0) runner.run(tests, clear_globs=False) documenter.object = tests.globs[documenter.name] documenter.code = content documenter.is_doctest = True return True except Exception: pass
Example #3
Source File: session.py From LearningApacheSpark with MIT License | 6 votes |
def _test(): import os import doctest from pyspark.context import SparkContext from pyspark.sql import Row import pyspark.sql.session os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.session.__dict__.copy() sc = SparkContext('local[4]', 'PythonTest') globs['sc'] = sc globs['spark'] = SparkSession(sc) globs['rdd'] = rdd = sc.parallelize( [Row(field1=1, field2="row1"), Row(field1=2, field2="row2"), Row(field1=3, field2="row3")]) globs['df'] = rdd.toDF() (failure_count, test_count) = doctest.testmod( pyspark.sql.session, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) globs['sc'].stop() if failure_count: sys.exit(-1)
Example #4
Source File: column.py From LearningApacheSpark with MIT License | 6 votes |
def _test(): import doctest from pyspark.sql import SparkSession import pyspark.sql.column globs = pyspark.sql.column.__dict__.copy() spark = SparkSession.builder\ .master("local[4]")\ .appName("sql.column tests")\ .getOrCreate() sc = spark.sparkContext globs['spark'] = spark globs['df'] = sc.parallelize([(2, 'Alice'), (5, 'Bob')]) \ .toDF(StructType([StructField('age', IntegerType()), StructField('name', StringType())])) (failure_count, test_count) = doctest.testmod( pyspark.sql.column, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | doctest.REPORT_NDIFF) spark.stop() if failure_count: sys.exit(-1)
Example #5
Source File: catalog.py From LearningApacheSpark with MIT License | 6 votes |
def _test(): import os import doctest from pyspark.sql import SparkSession import pyspark.sql.catalog os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.catalog.__dict__.copy() spark = SparkSession.builder\ .master("local[4]")\ .appName("sql.catalog tests")\ .getOrCreate() globs['sc'] = spark.sparkContext globs['spark'] = spark (failure_count, test_count) = doctest.testmod( pyspark.sql.catalog, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) spark.stop() if failure_count: sys.exit(-1)
Example #6
Source File: udf.py From LearningApacheSpark with MIT License | 6 votes |
def _test(): import doctest from pyspark.sql import SparkSession import pyspark.sql.udf globs = pyspark.sql.udf.__dict__.copy() spark = SparkSession.builder\ .master("local[4]")\ .appName("sql.udf tests")\ .getOrCreate() globs['spark'] = spark (failure_count, test_count) = doctest.testmod( pyspark.sql.udf, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) spark.stop() if failure_count: sys.exit(-1)
Example #7
Source File: functions.py From LearningApacheSpark with MIT License | 6 votes |
def _test(): import doctest from pyspark.sql import Row, SparkSession import pyspark.sql.functions globs = pyspark.sql.functions.__dict__.copy() spark = SparkSession.builder\ .master("local[4]")\ .appName("sql.functions tests")\ .getOrCreate() sc = spark.sparkContext globs['sc'] = sc globs['spark'] = spark globs['df'] = spark.createDataFrame([Row(name='Alice', age=2), Row(name='Bob', age=5)]) (failure_count, test_count) = doctest.testmod( pyspark.sql.functions, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) spark.stop() if failure_count: sys.exit(-1)
Example #8
Source File: test_integration.py From cr8 with MIT License | 6 votes |
def load_tests(loader, tests, ignore): env = os.environ.copy() env['CR8_NO_TQDM'] = 'True' node.start() assert node.http_host, "http_url must be available" tests.addTests(doctest.DocFileSuite( os.path.join('..', 'README.rst'), globs={ 'sh': functools.partial( subprocess.run, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, timeout=60, shell=True, env=env ) }, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS, setUp=setup, tearDown=teardown, parser=Parser() )) return tests
Example #9
Source File: _test.py From guildai with Apache License 2.0 | 6 votes |
def run_test_file(filename, globs): return run_test_file_with_config( filename, globs=globs, optionflags=( _report_first_flag() | doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE | NORMALIZE_PATHS | WINDOWS | STRIP_U | STRIP_L | STRIP_ANSI_FMT | PY2 | PY3 ), )
Example #10
Source File: test_cli.py From maas with GNU Affero General Public License v3.0 | 6 votes |
def test_print_whats_next(self): profile = { "name": factory.make_name("profile"), "url": factory.make_name("url"), } stdout = self.patch(sys, "stdout", StringIO()) cli.cmd_login.print_whats_next(profile) expected = ( dedent( """\ You are now logged in to the MAAS server at %(url)s with the profile name '%(name)s'. For help with the available commands, try: maas %(name)s --help """ ) % profile ) observed = stdout.getvalue() flags = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE self.assertThat(observed, DocTestMatches(expected, flags))
Example #11
Source File: ipdoctest.py From Computable with MIT License | 6 votes |
def makeTest(self, obj, parent): """Look for doctests in the given object, which will be a function, method or class. """ #print 'Plugin analyzing:', obj, parent # dbg # always use whitespace and ellipsis options optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS doctests = self.finder.find(obj, module=getmodule(parent)) if doctests: for test in doctests: if len(test.examples) == 0: continue yield DocTestCase(test, obj=obj, optionflags=optionflags, checker=self.checker)
Example #12
Source File: test_interface.py From python-for-android with Apache License 2.0 | 6 votes |
def test_suite(): suite = unittest.makeSuite(InterfaceTests) suite.addTest(doctest.DocTestSuite("zope.interface.interface")) if sys.version_info >= (2, 4): suite.addTest(doctest.DocTestSuite()) suite.addTest(doctest.DocFileSuite( '../README.txt', globs={'__name__': '__main__'}, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS, )) suite.addTest(doctest.DocFileSuite( '../README.ru.txt', globs={'__name__': '__main__'}, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS, )) return suite
Example #13
Source File: session.py From tidb-docker-compose with Apache License 2.0 | 6 votes |
def _test(): import os import doctest from pyspark.context import SparkContext from pyspark.sql import Row import pyspark.sql.session os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.session.__dict__.copy() sc = SparkContext('local[4]', 'PythonTest') globs['sc'] = sc globs['spark'] = SparkSession(sc) globs['rdd'] = rdd = sc.parallelize( [Row(field1=1, field2="row1"), Row(field1=2, field2="row2"), Row(field1=3, field2="row3")]) globs['df'] = rdd.toDF() (failure_count, test_count) = doctest.testmod( pyspark.sql.session, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) globs['sc'].stop() if failure_count: exit(-1)
Example #14
Source File: session.py From tidb-docker-compose with Apache License 2.0 | 6 votes |
def _test(): import os import doctest from pyspark.context import SparkContext from pyspark.sql import Row import pyspark.sql.session os.chdir(os.environ["SPARK_HOME"]) globs = pyspark.sql.session.__dict__.copy() sc = SparkContext('local[4]', 'PythonTest') globs['sc'] = sc globs['spark'] = SparkSession(sc) globs['rdd'] = rdd = sc.parallelize( [Row(field1=1, field2="row1"), Row(field1=2, field2="row2"), Row(field1=3, field2="row3")]) globs['df'] = rdd.toDF() (failure_count, test_count) = doctest.testmod( pyspark.sql.session, globs=globs, optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE) globs['sc'].stop() if failure_count: sys.exit(-1)
Example #15
Source File: test_blue.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.blue as utl failure_count, test_count = doctest.testmod( utl, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS ) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #16
Source File: test_base.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.base as utl failure_count, test_count = doctest.testmod( utl, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS ) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #17
Source File: test_io.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.io as utl failure_count, test_count = doctest.testmod( utl, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #18
Source File: test_asy.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.asy as asy failure_count, test_count = doctest.testmod( asy, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #19
Source File: test_gen.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.gen as utl failure_count, test_count = doctest.testmod( utl, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #20
Source File: test_dispatcher.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.dispatcher as dsp failure_count, test_count = doctest.testmod( dsp, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS ) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #21
Source File: test_alg.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest import schedula.utils.alg as dsp failure_count, test_count = doctest.testmod( dsp, optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #22
Source File: testdocs.py From PyCRS with MIT License | 5 votes |
def test(**kwargs): doctest.NORMALIZE_WHITESPACE = 1 verbosity = kwargs.get('verbose', 0) if verbosity == 0: print('Running doctests...') # ignore py2-3 unicode differences import re class Py23DocChecker(doctest.OutputChecker): def check_output(self, want, got, optionflags): if sys.version_info[0] == 2: got = re.sub("u'(.*?)'", "'\\1'", got) got = re.sub('u"(.*?)"', '"\\1"', got) res = doctest.OutputChecker.check_output(self, want, got, optionflags) return res def summarize(self): doctest.OutputChecker.summarize(True) # run tests runner = doctest.DocTestRunner(checker=Py23DocChecker(), verbose=verbosity) with open('README.md') as r: doc = r.read() test = doctest.DocTestParser().get_doctest(string=doc, globs={}, name="__init__", filename="__init__.py", lineno=0) failure_count, test_count = runner.run(test) # print results if verbosity: runner.summarize(True) else: if failure_count == 0: print('All test passed successfully') elif failure_count > 0: runner.summarize(verbosity) return failure_count
Example #23
Source File: test_readme.py From schedula with European Union Public License 1.1 | 5 votes |
def runTest(self): import doctest failure_count, test_count = doctest.testfile( '../README.rst', optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS ) self.assertGreater(test_count, 0, (failure_count, test_count)) self.assertEqual(failure_count, 0, (failure_count, test_count))
Example #24
Source File: testing.py From numdifftools with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_docstrings(name=''): # np.set_printoptions(precision=6) import doctest if not name: name = inspect.stack()[1][1] print('Testing docstrings in {}'.format(name)) doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS)
Example #25
Source File: test_1_doctest_crystal.py From PyChemia with MIT License | 5 votes |
def test_lattice(): """ DocTests (pychemia.crystal.lattice) : """ import pychemia.crystal.lattice dt = doctest.testmod(pychemia.crystal.lattice, verbose=True, optionflags=doctest.NORMALIZE_WHITESPACE) assert dt.failed == 0
Example #26
Source File: __init__.py From python-libmaas with GNU Affero General Public License v3.0 | 5 votes |
def assertDocTestMatches(self, expected, observed, flags=None): """See if `observed` matches `expected`, a doctest sample. By default uses the doctest flags `NORMALIZE_WHITESPACE` and `ELLIPSIS`. """ if flags is None: flags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS self.assertThat(observed, DocTestMatches(expected, flags))
Example #27
Source File: test_doctest.py From test_driven_python with MIT License | 5 votes |
def load_tests(loader, tests, pattern): tests.addTests(doctest.DocTestSuite(stock, globs={ "datetime": datetime, "Stock": stock.Stock }, setUp=setup_stock_doctest)) options = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE tests.addTests(doctest.DocFileSuite("readme.txt", package="stock_alerter", optionflags=options)) return tests
Example #28
Source File: test_tags.py From maas with GNU Affero General Public License v3.0 | 5 votes |
def assertDocTestMatches(self, expected, observed): return self.assertThat( observed, DocTestMatches( dedent(expected), doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE, ), )
Example #29
Source File: testutils.py From odl with Mozilla Public License 2.0 | 5 votes |
def run_doctests(skip_if=False, **kwargs): """Run all doctests in the current module. This function calls ``doctest.testmod()``, by default with the options ``optionflags=doctest.NORMALIZE_WHITESPACE`` and ``extraglobs={'odl': odl, 'np': np}``. This can be changed with keyword arguments. Parameters ---------- skip_if : bool For ``True``, skip the doctests in this module. kwargs : Extra keyword arguments passed on to the ``doctest.testmod`` function. """ from doctest import testmod, NORMALIZE_WHITESPACE, SKIP from packaging.version import parse as parse_version import odl import numpy as np optionflags = kwargs.pop('optionflags', NORMALIZE_WHITESPACE) if skip_if: optionflags |= SKIP extraglobs = kwargs.pop('extraglobs', {'odl': odl, 'np': np}) if run_from_ipython(): try: import spyder except ImportError: pass else: if parse_version(spyder.__version__) < parse_version('3.1.4'): warnings.warn('A bug with IPython and Spyder < 3.1.4 ' 'sometimes causes doctests to fail to run. ' 'Please upgrade Spyder or use another ' 'interpreter if the doctests do not work.', RuntimeWarning) testmod(optionflags=optionflags, extraglobs=extraglobs, **kwargs)
Example #30
Source File: shapefile.py From BlenderGIS with GNU General Public License v3.0 | 5 votes |
def test(**kwargs): import doctest doctest.NORMALIZE_WHITESPACE = 1 verbosity = kwargs.get('verbose', 0) if verbosity == 0: print('Running doctests...') # ignore py2-3 unicode differences import re class Py23DocChecker(doctest.OutputChecker): def check_output(self, want, got, optionflags): if sys.version_info[0] == 2: got = re.sub("u'(.*?)'", "'\\1'", got) got = re.sub('u"(.*?)"', '"\\1"', got) res = doctest.OutputChecker.check_output(self, want, got, optionflags) return res def summarize(self): doctest.OutputChecker.summarize(True) # run tests runner = doctest.DocTestRunner(checker=Py23DocChecker(), verbose=verbosity) with open("README.md","rb") as fobj: test = doctest.DocTestParser().get_doctest(string=fobj.read().decode("utf8").replace('\r\n','\n'), globs={}, name="README", filename="README.md", lineno=0) failure_count, test_count = runner.run(test) # print results if verbosity: runner.summarize(True) else: if failure_count == 0: print('All test passed successfully') elif failure_count > 0: runner.summarize(verbosity) return failure_count