Python dockerfile_parse.DockerfileParser() Examples

The following are 30 code examples of dockerfile_parse.DockerfileParser(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module dockerfile_parse , or try the search function .
Example #1
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_context_structure_mixed_top_arg(self, tmpdir):
        dfp = DockerfileParser(
            str(tmpdir.realpath()),
            build_args={"version": "8", "key": "value❤"},
            env_replace=True)
        dfp.content = dedent("""\
            ARG image=centos
            ARG version=latest
            FROM $image:$version
            ARG image
            ARG key
            """)
        c = dfp.context_structure

        assert len(c) == 5
        assert c[0].get_values(context_type='ARG') == {"image": "centos"}
        assert c[1].get_values(context_type='ARG') == {"image": "centos", "version": "8"}
        assert c[2].get_values(context_type='ARG') == {}
        assert c[3].get_values(context_type='ARG') == {"image": "centos"}
        assert c[4].get_values(context_type='ARG') == {"image": "centos", "key": "value❤"} 
Example #2
Source File: test_plugin.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def mock_workflow(tmpdir):
    if MOCK:
        mock_docker()

    workflow = DockerBuildWorkflow('test-image', source=SOURCE)
    setattr(workflow, 'builder', X())
    flexmock(DockerfileParser, content='df_content')
    setattr(workflow.builder, 'get_built_image_info', flexmock())
    flexmock(workflow.builder, get_built_image_info={'Id': 'some'})
    setattr(workflow.builder, 'ensure_not_built', flexmock())
    flexmock(workflow.builder, ensure_not_built=None)
    setattr(workflow.builder, 'image_id', 'image-id')
    setattr(workflow.builder, 'source', flexmock(
        dockerfile_path='dockerfile-path',
        path='path',
        config=flexmock(image_build_method=None),
    ))
    setattr(workflow.builder, 'df_path', 'df_path')
    setattr(workflow.builder, 'image', flexmock())
    setattr(workflow.builder.image, 'to_str', lambda: 'image')
    setattr(workflow.builder, 'base_image', flexmock())
    setattr(workflow.builder.base_image, 'to_str', lambda: 'base-image')

    return workflow 
Example #3
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_get_parent_env(self, tmpdir):
        tmpdir_path = str(tmpdir.realpath())
        p_env = {"bar": "baz❤"}
        df1 = DockerfileParser(tmpdir_path, env_replace=True, parent_env=p_env)
        df1.lines = [
            "FROM parent\n",
            "ENV foo=\"$bar\"\n",
            "LABEL label=\"$foo $bar\"\n"
        ]

        # Even though we inherit an ENV, this .envs count should only be for the
        # ENVs defined in *this* Dockerfile as we're parsing the Dockerfile and
        # the parent_env is only to satisfy use of inherited ENVs.
        assert len(df1.envs) == 1
        assert df1.envs.get('foo') == 'baz❤'
        assert len(df1.labels) == 1
        assert df1.labels.get('label') == 'baz❤ baz❤' 
Example #4
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_source_not_removed_for_exit_plugins():
    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)
    watch_exit = Watcher()
    watch_buildstep = Watcher()
    workflow = DockerBuildWorkflow('test-image', source=SOURCE,
                                   exit_plugins=[{'name': 'uses_source',
                                                  'args': {
                                                      'watcher': watch_exit,
                                                  }}],
                                   buildstep_plugins=[{'name': 'buildstep_watched',
                                                       'args': {
                                                           'watcher': watch_buildstep,
                                                       }}],
                                   plugin_files=[this_file])

    workflow.build_docker_image()

    # Make sure that the plugin was actually run
    assert watch_exit.was_called() 
Example #5
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_dockerfileparser(self, dfparser, tmpdir):
        df_content = dedent("""\
            FROM fedora
            LABEL label={0}""".format(NON_ASCII))
        df_lines = ["FROM fedora\n", "LABEL label={0}".format(NON_ASCII)]

        dfparser.content = ""
        dfparser.content = df_content
        assert dfparser.content == df_content
        assert dfparser.lines == df_lines
        assert [isinstance(line, six.text_type) for line in dfparser.lines]

        dfparser.content = ""
        dfparser.lines = df_lines
        assert dfparser.content == df_content
        assert dfparser.lines == df_lines
        assert [isinstance(line, six.text_type) for line in dfparser.lines]

        dockerfile = os.path.join(str(tmpdir), 'Dockerfile')
        with open(dockerfile, 'wb') as fp:
            fp.write(df_content.encode('utf-8'))
        dfparser = DockerfileParser(dockerfile)
        assert dfparser.content == df_content
        assert dfparser.lines == df_lines
        assert [isinstance(line, six.text_type) for line in dfparser.lines] 
Example #6
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_get_build_args(self, tmpdir):
        tmpdir_path = str(tmpdir.realpath())
        b_args = {"bar": "baz❤"}
        df1 = DockerfileParser(tmpdir_path, env_replace=True, build_args=b_args)
        df1.lines = [
            "ARG foo=\"baz❤\"\n",
            "ARG not=\"used\"\n",
            "FROM parent\n",
            "ARG foo\n",
            "ARG bar\n",
            "LABEL label=\"$foo $bar\"\n"
        ]

        # Even though we inherit an ARG, this .args count should only be for the
        # ARGs defined in *this* Dockerfile as we're parsing the Dockerfile and
        # the build_args is only to satisfy use of this build.
        assert len(df1.args) == 2
        assert df1.args.get('foo') == 'baz❤'
        assert df1.args.get('bar') == 'baz❤'
        assert len(df1.labels) == 1
        assert df1.labels.get('label') == 'baz❤ baz❤' 
Example #7
Source File: test_buildah.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_failed_build(workflow):
    cmd_output = "spam spam spam spam spam spam spam baked beans spam spam spam and spam\n"
    cmd_error = "Nobody expects the Spanish Inquisition!\n"
    ib_process = flexmock(
        stdout=StringIO(cmd_output + cmd_error),
        poll=lambda: True,
        returncode=1,
    )
    flexmock(subprocess).should_receive('Popen').and_return(ib_process)

    flexmock(DockerfileParser, content='df_content')
    fake_builder = MockInsideBuilder(image_id='abcde')
    flexmock(InsideBuilder).new_instances(fake_builder)
    with pytest.raises(PluginFailedException):
        workflow.build_docker_image()

    assert isinstance(workflow.build_result, BuildResult)
    assert workflow.build_result.is_failed()
    assert cmd_output in workflow.build_result.logs
    assert cmd_error in workflow.build_result.logs
    assert cmd_error in workflow.build_result.fail_reason
    assert workflow.build_result.skip_layer_squash is False 
Example #8
Source File: test_imagebuilder.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_failed_build(workflow):
    cmd_output = "spam spam spam spam spam spam spam baked beans spam spam spam and spam\n"
    cmd_error = "Nobody expects the Spanish Inquisition!\n"
    ib_process = flexmock(
        stdout=StringIO(cmd_output + cmd_error),
        poll=lambda: True,
        returncode=1,
    )
    flexmock(subprocess).should_receive('Popen').and_return(ib_process)

    flexmock(DockerfileParser, content='df_content')
    fake_builder = MockInsideBuilder(image_id='abcde')
    flexmock(InsideBuilder).new_instances(fake_builder)
    with pytest.raises(PluginFailedException):
        workflow.build_docker_image()

    assert isinstance(workflow.build_result, BuildResult)
    assert workflow.build_result.is_failed()
    assert cmd_output in workflow.build_result.logs
    assert cmd_error in workflow.build_result.logs
    assert cmd_error in workflow.build_result.fail_reason
    assert workflow.build_result.skip_layer_squash is False 
Example #9
Source File: test_buildah.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_popen_cmd(docker_tasker, workflow, image_id):
    """
    tests buildah build plugin working
    """
    flexmock(DockerfileParser, content='df_content')
    fake_builder = MockInsideBuilder(image_id=image_id)
    fake_builder.tasker = docker_tasker
    mock_docker_tasker(docker_tasker)
    flexmock(InsideBuilder).new_instances(fake_builder)

    cmd_output = "spam spam spam spam spam spam spam baked beans spam spam spam and spam"
    real_popen = subprocess.Popen
    flexmock(subprocess, Popen=lambda *_, **kw: real_popen(['echo', '-n', cmd_output], **kw))
    workflow.build_docker_image()

    assert isinstance(workflow.buildstep_result['buildah_bud'], BuildResult)
    assert workflow.build_result == workflow.buildstep_result['buildah_bud']
    assert not workflow.build_result.is_failed()
    assert workflow.build_result.image_id.startswith('sha256:')
    assert workflow.build_result.image_id.count(':') == 1
    assert workflow.build_result.skip_layer_squash
    assert len(workflow.exported_image_sequence) == 1
    assert cmd_output in workflow.build_result.logs 
Example #10
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_constructor_cache(self, tmpdir):
        tmpdir_path = str(tmpdir.realpath())
        df1 = DockerfileParser(tmpdir_path)
        df1.lines = ["From fedora:latest\n", "LABEL a b\n"]

        df2 = DockerfileParser(tmpdir_path, True)
        assert df2.cached_content 
Example #11
Source File: bangtext.py    From binaryanalysis-ng with GNU Affero General Public License v3.0 5 votes vote down vote up
def unpack_dockerfile(fileresult, scanenvironment, offset, unpackdir):
    '''Verify a Dockerfile.'''
    filesize = fileresult.filesize
    filename_full = scanenvironment.unpack_path(fileresult.filename)
    unpackedfilesandlabels = []
    labels = []
    unpackingerror = {}
    unpackedsize = 0

    renamed = False
    if not filename_full.name.endswith('Dockerfile'):
        dockerdir = pathlib.Path(tempfile.mkdtemp(dir=scanenvironment.temporarydirectory))
        shutil.copy(filename_full, dockerdir / 'Dockerfile')
        dockerfileparser = dockerfile_parse.DockerfileParser(str(dockerdir / 'Dockerfile'))
        renamed = True
    else:
        dockerfileparser = dockerfile_parse.DockerfileParser(str(filename_full))

    try:
        dfcontent = dockerfileparser.content
    except Exception:
        if renamed:
            shutil.rmtree(dockerdir)
        unpackingerror = {'offset': offset, 'fatal': False,
                          'reason': 'not a valid Dockerfile'}
        return {'status': False, 'error': unpackingerror}

    labels.append('dockerfile')
    if renamed:
        shutil.rmtree(dockerdir)

    return {'status': True, 'length': filesize, 'labels': labels,
            'filesandlabels': unpackedfilesandlabels} 
Example #12
Source File: fixtures.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def dfparser(tmpdir, request):
    """

    :param tmpdir: already existing fixture defined in pytest
    :param request: parameter, cache_content arg to DockerfileParser
    :return: DockerfileParser instance
    """

    use_fileobj, cache_content = request.param
    if use_fileobj:
        fileobj = six.BytesIO()
        return DockerfileParser(fileobj=fileobj, cache_content=cache_content)
    else:
        tmpdir_path = str(tmpdir.realpath())
        return DockerfileParser(path=tmpdir_path, cache_content=cache_content) 
Example #13
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_dockerfileparser_exceptions(self, tmpdir):
        df_content = dedent("""\
            FROM fedora
            LABEL label={0}""".format(NON_ASCII))
        df_lines = ["FROM fedora\n", "LABEL label={0}".format(NON_ASCII)]

        dfp = DockerfileParser(os.path.join(str(tmpdir), 'no-directory'))
        with pytest.raises(IOError):
            assert dfp.content
        with pytest.raises(IOError):
            dfp.content = df_content
        with pytest.raises(IOError):
            assert dfp.lines
        with pytest.raises(IOError):
            dfp.lines = df_lines 
Example #14
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_internal_exceptions(self, tmpdir):
        dfp = DockerfileParser(str(tmpdir))
        with pytest.raises(ValueError):
            dfp._instruction_getter('FOO', env_replace=True)
        with pytest.raises(ValueError):
            dfp._instructions_setter('FOO', {})
        with pytest.raises(ValueError):
            dfp._modify_instruction_label_env('FOO', 'key', 'value') 
Example #15
Source File: throughput-test-harness.py    From osbs-client with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def bump_release(df_path, branch):
    parser = DockerfileParser(df_path)
    oldrelease = parser.labels["Release"]
    if not oldrelease:
        raise RuntimeError("Dockerfile has no Release label")

    m = re.match(r"(.*\D)?(\d+)", oldrelease)
    if not m:
        raise RuntimeError("Release does not end with number")

    num = int(m.group(2))
    newrelease = "{}{:03d}".format(m.group(1), num+1)

    parser.labels["Release"] = newrelease
    return newrelease 
Example #16
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_get_build_args_from_scratch(self, tmpdir):
        tmpdir_path = str(tmpdir.realpath())
        b_args = {"bar": "baz"}
        df1 = DockerfileParser(tmpdir_path, env_replace=True, build_args=b_args)
        df1.lines = [
            "FROM scratch\n",
        ]

        assert not df1.args
        assert not (df1.args == ['bar', 'baz'])
        assert hash(df1.args) 
Example #17
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_get_parent_env_from_scratch(self, tmpdir):
        tmpdir_path = str(tmpdir.realpath())
        p_env = {"bar": "baz"}
        df1 = DockerfileParser(tmpdir_path, env_replace=True, parent_env=p_env)
        df1.lines = [
            "FROM scratch\n",
        ]

        assert not df1.envs
        assert not (df1.envs == ['bar', 'baz'])
        assert hash(df1.envs) 
Example #18
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_path_and_fileobj_together(self):
        with pytest.raises(ValueError):
            DockerfileParser(path='.', fileobj=six.StringIO()) 
Example #19
Source File: test_parser.py    From dockerfile-parse with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_remove_whitespace(self, tmpdir):
        """
        Verify keys are parsed correctly even if there is no final newline.

        """
        with open(os.path.join(str(tmpdir), 'Dockerfile'), 'w') as fp:
            fp.write('FROM scratch')
        tmpdir_path = str(tmpdir.realpath())
        df1 = DockerfileParser(tmpdir_path)
        df1.labels['foo'] = 'bar ❤'

        df2 = DockerfileParser(tmpdir_path, True)
        assert df2.baseimage == 'scratch'
        assert df2.labels['foo'] == 'bar ❤' 
Example #20
Source File: pr_from_new_release.py    From mattermost-openshift with GNU General Public License v3.0 5 votes vote down vote up
def main():
    """lets start our task"""
    # clone the repo
    cleanup(LOCAL_WORK_COPY)
    try:
        r = Repo.clone_from(git_url, LOCAL_WORK_COPY)
    except GitCommandError as git_error:
        print(git_error)
        exit(-1)

    d = feedparser.parse(
        'https://github.com/mattermost/mattermost-server/releases.atom')
    release_version = d.entries[0].title[1:]

    # lets read the dockerfile of the current master
    dfp = DockerfileParser()

    with open('./mattermost-openshift-workdir/Dockerfile') as f:
        dfp.content = f.read()

    if 'MATTERMOST_VERSION' in dfp.envs:
        dockerfile_version = dfp.envs['MATTERMOST_VERSION']

    # Lets check if we got a new release
    if semver.compare(release_version, dockerfile_version) == 1:
        print("Updating from %s to %s" % (dockerfile_version, release_version))

        target_branch = 'bots-life/update-to-' + release_version

        if not pr_in_progress(target_branch):
            patch_and_push(dfp, r, target_branch, release_version)
            cleanup(LOCAL_WORK_COPY)

            create_pr_to_master(target_branch)
        else:
            print("There is an open PR for %s, aborting..." %
                  (target_branch))

    else:
        print("we are even with Mattermost %s, no need to update" %
              release_version) 
Example #21
Source File: __init__.py    From osbs-client with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def get_repo_info(git_uri, git_ref, git_branch=None, depth=None):
    with checkout_git_repo(git_uri, commit=git_ref, branch=git_branch,
                           depth=depth) as code_dir_info:
        code_dir = code_dir_info.repo_path
        depth = code_dir_info.commit_depth
        dfp = DockerfileParser(os.path.join(code_dir), cache_content=True)
        config = RepoConfiguration(git_uri=git_uri, git_ref=git_ref, git_branch=git_branch,
                                   dir_path=code_dir, depth=depth)
        tags_config = AdditionalTagsConfig(dir_path=code_dir,
                                           tags=config.container.get('tags', set()))
    repo_info = RepoInfo(dfp, config, tags_config)
    return repo_info 
Example #22
Source File: throughput-test-harness.py    From osbs-client with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def set_initial_release(df_path, branch):
    parser = DockerfileParser(df_path)
    oldrelease = parser.labels.get("Release", "1")
    newrelease = "{}.{}.iteration001".format(oldrelease, branch)
    parser.labels["Release"] = newrelease
    return newrelease 
Example #23
Source File: target.py    From colin with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self, target, **_):
        super().__init__()
        self.target_name = target
        logger.debug("Target is a dockerfile.")
        if isinstance(target, io.IOBase):
            logger.debug("Target is a dockerfile loaded from the file-like object.")
            self.instance = DockerfileParser(fileobj=target)
        else:
            self.instance = DockerfileParser(fileobj=open(target)) 
Example #24
Source File: test_docker_api.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_build(docker_tasker, workflow, is_failed, image_id):
    """
    tests docker build api plugin working
    """
    flexmock(DockerfileParser, content='df_content')
    mock_docker()
    fake_builder = MockInsideBuilder(image_id=image_id)
    fake_builder.tasker = docker_tasker
    mock_docker_tasker(docker_tasker)
    flexmock(InsideBuilder).new_instances(fake_builder)

    flexmock(CommandResult).should_receive('is_failed').and_return(is_failed)
    error = "error message"
    error_detail = "{u'message': u\"%s\"}" % error
    if is_failed:
        flexmock(CommandResult, error=error, error_detail=error_detail)
        with pytest.raises(PluginFailedException):
            workflow.build_docker_image()
    else:
        workflow.build_docker_image()

    assert isinstance(workflow.buildstep_result['docker_api'], BuildResult)
    assert workflow.build_result == workflow.buildstep_result['docker_api']
    assert workflow.build_result.is_failed() == is_failed

    if is_failed:
        assert workflow.build_result.fail_reason == error
        assert '\\' not in workflow.plugins_errors['docker_api']
        assert error in workflow.plugins_errors['docker_api']
    else:
        assert workflow.build_result.image_id.startswith('sha256:')
        assert workflow.build_result.image_id.count(':') == 1 
Example #25
Source File: test_imagebuilder.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_popen_cmd(docker_tasker, workflow, image_id):
    """
    tests imagebuilder build plugin working
    """
    flexmock(DockerfileParser, content='df_content')
    fake_builder = MockInsideBuilder(image_id=image_id)
    fake_builder.tasker = docker_tasker
    mock_docker_tasker(docker_tasker)
    flexmock(InsideBuilder).new_instances(fake_builder)

    real_popen = subprocess.Popen

    process_args = ['imagebuilder', '-t', fake_builder.image.to_str()]
    for argname, argval in fake_builder.buildargs.items():
        process_args.append('--build-arg')
        process_args.append('%s=%s' % (argname, argval))
    process_args.append(fake_builder.df_dir)

    flexmock(subprocess, Popen=lambda *args, **kw: real_popen(['echo', '-n', str(args)], **kw))
    workflow.build_docker_image()

    assert isinstance(workflow.buildstep_result['imagebuilder'], BuildResult)
    assert workflow.build_result == workflow.buildstep_result['imagebuilder']
    assert not workflow.build_result.is_failed()
    assert workflow.build_result.image_id.startswith('sha256:')
    assert workflow.build_result.image_id.count(':') == 1
    assert workflow.build_result.skip_layer_squash
    assert len(workflow.exported_image_sequence) == 1
    assert str((process_args, )) in workflow.build_result.logs 
Example #26
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_show_version(has_version, caplog):
    """
    Test atomic-reactor print version of osbs-client used to build the build json
    if available
    """
    VERSION = "1.0"
    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(PreRaises)

    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)

    watch_buildstep = Watcher()

    caplog.clear()

    params = {
        'prebuild_plugins': [],
        'buildstep_plugins': [{'name': 'buildstep_watched',
                               'args': {'watcher': watch_buildstep}}],
        'prepublish_plugins': [],
        'postbuild_plugins': [],
        'exit_plugins': [],
        'plugin_files': [this_file],
    }
    if has_version:
        params['client_version'] = VERSION

    workflow = DockerBuildWorkflow('test-image', source=MOCK_SOURCE, **params)
    workflow.build_docker_image()
    expected_log_message = "build json was built by osbs-client {}".format(VERSION)
    assert any(
        expected_log_message in record.message
        for record in caplog.records
        if record.levelno == logging.DEBUG
    ) == has_version 
Example #27
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_plugin_errors(plugins, should_fail, should_log, caplog):
    """
    Try bad plugin configuration.
    """
    flexmock(DockerfileParser, content='df_content')
    flexmock(DockerApiPlugin).should_receive('run').and_return(DUMMY_BUILD_RESULT)
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)

    caplog.clear()
    workflow = DockerBuildWorkflow('test-image', source=MOCK_SOURCE,
                                   plugin_files=[this_file],
                                   **plugins)

    # Find the 'watcher' parameter
    watchers = [conf.get('args', {}).get('watcher')
                for plugin in plugins.values()
                for conf in plugin]
    watcher = [x for x in watchers if x][0]

    if should_fail:
        with pytest.raises(PluginFailedException):
            workflow.build_docker_image()

        assert not watcher.was_called()
        assert workflow.plugins_errors
        assert all([is_string_type(plugin)
                    for plugin in workflow.plugins_errors])
        assert all([is_string_type(reason)
                    for reason in workflow.plugins_errors.values()])
    else:
        workflow.build_docker_image()
        assert watcher.was_called()
        assert not workflow.plugins_errors

    if should_log:
        assert any(record.levelno == logging.ERROR for record in caplog.records)
    else:
        assert all(record.levelno != logging.ERROR for record in caplog.records) 
Example #28
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_workflow_compat(caplog):
    """
    Some of our plugins have changed from being run post-build to
    being run at exit. Let's test what happens when we try running an
    exit plugin as a post-build plugin.
    """
    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(PreWatched)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)
    watch_exit = Watcher()
    watch_buildstep = Watcher()

    caplog.clear()

    workflow = DockerBuildWorkflow('test-image', source=MOCK_SOURCE,
                                   postbuild_plugins=[{'name': 'store_logs_to_file',
                                                       'args': {
                                                           'watcher': watch_exit
                                                       }}],

                                   buildstep_plugins=[{'name': 'buildstep_watched',
                                                       'args': {
                                                           'watcher': watch_buildstep
                                                       }}],
                                   plugin_files=[this_file])

    workflow.build_docker_image()
    assert watch_exit.was_called()
    for record in caplog.records:
        assert record.levelno != logging.ERROR 
Example #29
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def test_buildstep_alias(buildstep_alias, buildstep_plugin):
    """
    Verifies that buildstep plugin is changed when buildstep_alias is defined
    """
    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)

    prebuild_plugins = [{'name': 'reactor_config'}]
    buildstep_plugins = []
    postbuild_plugins = []
    prepublish_plugins = []
    exit_plugins = []

    os.environ['REACTOR_CONFIG'] = dedent("""\
        version: 1
        koji:
          hub_url: /
          root_url: ''
          auth: {}
        """)
    os.environ['USER_PARAMS'] = "{}"
    if buildstep_alias:
        os.environ['REACTOR_CONFIG'] += dedent("""\
        buildstep_alias:
          docker_api: imagebuilder
        """)

    workflow = DockerBuildWorkflow('test-image', source=MOCK_SOURCE,
                                   prebuild_plugins=prebuild_plugins,
                                   buildstep_plugins=buildstep_plugins,
                                   prepublish_plugins=prepublish_plugins,
                                   postbuild_plugins=postbuild_plugins,
                                   exit_plugins=exit_plugins,
                                   plugin_files=[this_file])

    workflow.build_docker_image()
    os.environ.pop('REACTOR_CONFIG', None)

    assert buildstep_plugin in workflow.buildstep_result
    assert isinstance(workflow.buildstep_result[buildstep_plugin], BuildResult) 
Example #30
Source File: test_inner.py    From atomic-reactor with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def test_workflow_plugin_results(buildstep_plugin, buildstep_raises):
    """
    Verifies the results of plugins in different phases
    are stored properly.
    It also verifies failed and remote BuildResult is handled properly.
    """

    flexmock(DockerfileParser, content='df_content')
    this_file = inspect.getfile(PreRaises)
    mock_docker()
    fake_builder = MockInsideBuilder()
    flexmock(InsideBuilder).new_instances(fake_builder)

    prebuild_plugins = [{'name': 'pre_build_value'}]
    buildstep_plugins = [{'name': buildstep_plugin}]
    postbuild_plugins = [{'name': 'post_build_value'}]
    prepublish_plugins = [{'name': 'pre_publish_value'}]
    exit_plugins = [{'name': 'exit_value'}]

    workflow = DockerBuildWorkflow('test-image', source=MOCK_SOURCE,
                                   prebuild_plugins=prebuild_plugins,
                                   buildstep_plugins=buildstep_plugins,
                                   prepublish_plugins=prepublish_plugins,
                                   postbuild_plugins=postbuild_plugins,
                                   exit_plugins=exit_plugins,
                                   plugin_files=[this_file])

    if buildstep_raises:
        with pytest.raises(PluginFailedException):
            workflow.build_docker_image()
    else:
        workflow.build_docker_image()

    assert workflow.prebuild_results == {'pre_build_value': 'pre_build_value_result'}
    assert isinstance(workflow.buildstep_result[buildstep_plugin], BuildResult)

    if buildstep_raises:
        assert workflow.postbuild_results == {}
        assert workflow.prepub_results == {}
    else:
        assert workflow.postbuild_results == {'post_build_value': 'post_build_value_result'}
        assert workflow.prepub_results == {'pre_publish_value': 'pre_publish_value_result'}

    assert workflow.exit_results == {'exit_value': 'exit_value_result'}