Python h5py.__version__() Examples

The following are 15 code examples of h5py.__version__(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module h5py , or try the search function .
Example #1
Source File: test_h5netcdf.py    From h5netcdf with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_optional_netcdf4_attrs(tmp_local_or_remote_netcdf):
    h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
    with h5.File(tmp_local_or_remote_netcdf, 'w') as f:
        foo_data = np.arange(50).reshape(5, 10)
        f.create_dataset('foo', data=foo_data)
        f.create_dataset('x', data=np.arange(5))
        f.create_dataset('y', data=np.arange(10))
        if h5py.__version__ < LooseVersion('2.10.0'):
            f['foo'].dims.create_scale(f['x'])
            f['foo'].dims.create_scale(f['y'])
        else:
            f['x'].make_scale()
            f['y'].make_scale()
        f['foo'].dims[0].attach_scale(f['x'])
        f['foo'].dims[1].attach_scale(f['y'])
    with h5netcdf.File(tmp_local_or_remote_netcdf, 'r') as ds:
        assert ds['foo'].dimensions == ('x', 'y')
        assert ds.dimensions == {'x': 5, 'y': 10}
        assert array_equal(ds['foo'], foo_data) 
Example #2
Source File: test_h5netcdf.py    From h5netcdf with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_invalid_netcdf_malformed_dimension_scales(tmp_local_or_remote_netcdf):
    h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
    with h5.File(tmp_local_or_remote_netcdf, 'w') as f:
        foo_data = np.arange(125).reshape(5, 5, 5)
        f.create_dataset('foo1', data=foo_data)
        f.create_dataset('x', data=np.arange(5))
        f.create_dataset('y', data=np.arange(5))
        f.create_dataset('z', data=np.arange(5))

        if h5py.__version__ < LooseVersion('2.10.0'):
            f['foo1'].dims.create_scale(f['x'])
            f['foo1'].dims.create_scale(f['y'])
            f['foo1'].dims.create_scale(f['z'])
        else:
            f['x'].make_scale()
            f['y'].make_scale()
            f['z'].make_scale()
        f['foo1'].dims[0].attach_scale(f['x'])

    with raises(ValueError):
        with h5netcdf.File(tmp_local_or_remote_netcdf, 'r',
                           phony_dims='sort') as ds:
            pass 
Example #3
Source File: sys_utils.py    From VASNet with MIT License 5 votes vote down vote up
def ge_pkg_versions():
    dep_versions = {}
    dep_versions['display'] = run_command('cat /proc/driver/nvidia/version')

    dep_versions['cuda'] = 'NA'
    cuda_home = '/usr/local/cuda/'
    if 'CUDA_HOME' in os.environ:
        cuda_home = os.environ['CUDA_HOME']

    cmd = cuda_home+'/version.txt'
    if os.path.isfile(cmd):
        dep_versions['cuda'] = run_command('cat '+cmd)

    dep_versions['cudnn'] = torch.backends.cudnn.version()
    dep_versions['platform'] = platform.platform()
    dep_versions['python'] = sys.version_info[:3]
    dep_versions['torch'] = torch.__version__
    dep_versions['numpy'] = np.__version__
    dep_versions['h5py'] = h5py.__version__
    dep_versions['json'] = json.__version__
    dep_versions['ortools'] = ortools.__version__
    dep_versions['torchvision'] = pkg_resources.get_distribution("torchvision").version

    # dep_versions['PIL'] = Image.VERSION
    # dep_versions['OpenCV'] = 'NA'
    # if 'cv2' in sys.modules:
    #     dep_versions['OpenCV'] = cv2.__version__


    return dep_versions 
Example #4
Source File: data_sanitisation.py    From ont_fast5_api with Mozilla Public License 2.0 5 votes vote down vote up
def check_version_compatibility():
    if parse_version(h5py.__version__) < parse_version("2.7") \
            and parse_version(np.__version__) >= parse_version("1.13"):
        raise EnvironmentError("Incompatible h5py=={} and numpy=={} versions detected. \n"
                               "Array reading/decoding may not proceed as expected. \n"
                               "Please upgrade to the latest compatible verions"
                               "".format(h5py.__version__, np.__version__)) 
Example #5
Source File: data_sanitisation.py    From ont_fast5_api with Mozilla Public License 2.0 5 votes vote down vote up
def _sanitize_data_for_writing(data):
    # To make the interface more user friendly we encode python strings as  byte-strings when writing datasets
    check_version_compatibility()
    if isinstance(data, str):
        # Plain python-strings can be encoded trivially
        return data.encode()
    elif isinstance(data, np.ndarray) and data.dtype.kind == np.dtype(np.unicode):
        # If the array is all of one type, unicode-string, we can encode with numpy
        return data.astype('S')
    elif isinstance(data, np.ndarray) and len(data.dtype) > 1:
        # If the array is of mixed types we have to set the encoding column by column
        encoded_dtypes = []
        for field_name in data.dtype.names:
            field_dtype, field_byte_index = data.dtype.fields[field_name]
            if field_dtype.kind == 'U':
                str_len = field_dtype.itemsize // field_dtype.alignment
                field_dtype = np.dtype("|S{}".format(str_len))
            encoded_dtypes.append((field_name, field_dtype))
        try:
            return data.astype(encoded_dtypes)
        except (ValueError, UnicodeEncodeError):
            if parse_version(h5py.__version__) < parse_version("2.7"):
                raise UnicodeError("Cannot encode array with types: {}.\n"
                                   "There are known bugs in h5py<2.7 which yield non-deteministic results when decoding "
                                   "arrays with empty strings and additional bugs with compatibility between "
                                   "h5py<2.7 and numpy>=1.13 when decoding arrays with  mixed/padded data types.\n"
                                   "Please try upgrading to the latest h5py and numpy versions"
                                   "".format(encoded_dtypes))
            else:
                raise
    return data 
Example #6
Source File: data_sanitisation.py    From ont_fast5_api with Mozilla Public License 2.0 5 votes vote down vote up
def _sanitize_data_for_reading(data):
    # To make the interface more user friendly we decode byte-strings into unicode strings when reading datasets
    check_version_compatibility()
    if isinstance(data, h5py.Dataset):
        data = data[()]

    if isinstance(data, bytes):
        # Plain byte-strings can be decoded trivially
        return data.decode()
    elif isinstance(data, np.ndarray) and data.dtype.kind == 'S':
        # If the array is all of one type, byte-string, we can decode with numpy
        return np.char.decode(data)
    elif isinstance(data, np.ndarray) and len(data.dtype) > 1:
        # If the array is of mixed types we have to decode column by column
        decoded_dtypes = []
        for field_name in data.dtype.names:
            field_dtype, field_byte_index = data.dtype.fields[field_name]
            if field_dtype.kind == 'S':
                field_dtype = np.dtype("<U{}".format(field_dtype.itemsize))
            decoded_dtypes.append((field_name, field_dtype))
        try:
            return data.astype(decoded_dtypes)
        except (UnicodeDecodeError, SystemError):
            # On h5py==2.6 we can't decode padded string-arrays properly - we should advise users to upgrade
            if parse_version(h5py.__version__) < parse_version("2.7"):
                raise UnicodeError("Cannot encode array with types: {}.\n"
                                   "There are known bugs in h5py<2.7 which yield non-deteministic results when decoding "
                                   "arrays with empty strings and additional bugs with compatibility between "
                                   "h5py<2.7 and numpy>=1.13 when decoding arrays with  mixed/padded data types.\n"
                                   "Please try upgrading to the latest h5py and numpy versions".format(decoded_dtypes))
            else:
                raise
    return data 
Example #7
Source File: test_data_sanitisation.py    From ont_fast5_api with Mozilla Public License 2.0 5 votes vote down vote up
def test_real_example_file(self):
        with MultiFast5File(os.path.join(test_data, 'rle_basecall_table', 'rle_example.fast5'), 'r') as mf5:
            for read in mf5.get_reads():
                actual_data = read.handle['Analyses/Basecall_1D_000/BaseCalled_template/RunlengthBasecall']
                expected_dtypes = [('base', '<U1'),  # After cleaning this is a unicode string
                                   ('scale', '<f4'),
                                   ('shape', '<f4'),
                                   ('weight', '<f4'),
                                   ('index', '<u4'),
                                   ('runlength', '<u4')]

                for field, expected_type in expected_dtypes:
                    if field != 'base':
                        self.assertEqual(dtype(expected_type), actual_data[field].dtype)
                    else:
                        # Before cleaning the 'base' column is of type byte-string length=1
                        self.assertEqual(dtype('|S1'), actual_data[field].dtype)

                try:
                    clean_data = _sanitize_data_for_reading(actual_data)
                    self.assertEqual(dtype(expected_dtypes), clean_data.dtype)
                except UnicodeError:
                    if parse_version(h5py.__version__) < parse_version("2.7"):
                        # h5py==2.6 often fails to decode these arrays correctly
                        pass
                    else:
                        raise 
Example #8
Source File: utils.py    From anvio with GNU General Public License v3.0 5 votes vote down vote up
def check_h5py_module():
    """To make sure we do have the h5py module.

       The reason this function is here is becasue we removed h5py from anvi'o dependencies,
       but some migration scripts may still need it if the user has very old databases. In
       those cases the user must install it manually."""

    try:
        import h5py
        h5py.__version__
    except:
        raise ConfigError("Please install the Python module `h5py` manually for this migration task to continue. "
                          "The reason why the standard anvi'o installation did not install module is complicated, "
                          "and really unimportant. If you run `pip install h5py` in your Python virtual environmnet "
                          "for anvi'o, and try running the migration program again things should be alright.") 
Example #9
Source File: _on_demand_imports.py    From unyt with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __version__(self):
        if self._version is None:
            try:
                import astropy

                version = astropy.__version__
            except ImportError:
                version = NotAModule(self._name)
            self._version = version
        return self._version 
Example #10
Source File: _on_demand_imports.py    From unyt with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __version__(self):
        if self._version is None:
            try:
                from h5py import __version__

                self._version = __version__
            except ImportError:
                self._version = NotAModule(self._name)
        return self._version 
Example #11
Source File: _on_demand_imports.py    From unyt with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __version__(self):
        if self._version is None:
            try:
                from matplotlib import __version__

                self._version = __version__
            except ImportError:
                self._version = NotAModule(self._name)
        return self._version 
Example #12
Source File: core.py    From h5netcdf with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _create_dim_scales(self):
        """Create all necessary HDF5 dimension scale."""
        dim_order = self._dim_order.maps[0]
        for dim in sorted(dim_order, key=lambda d: dim_order[d]):
            if dim not in self._h5group:
                size = self._current_dim_sizes[dim]
                kwargs = {}
                if self._dim_sizes[dim] is None:
                    kwargs["maxshape"] = (None,)
                self._h5group.create_dataset(
                    name=dim, shape=(size,), dtype='S1', **kwargs)

            h5ds = self._h5group[dim]
            h5ds.attrs['_Netcdf4Dimid'] = dim_order[dim]

            if len(h5ds.shape) > 1:
                dims = self._variables[dim].dimensions
                coord_ids = np.array([dim_order[d] for d in dims], 'int32')
                h5ds.attrs['_Netcdf4Coordinates'] = coord_ids

            # TODO: don't re-create scales if they already exist. With the
            # current version of h5py, this would require using the low-level
            # h5py.h5ds.is_scale interface to detect pre-existing scales.
            scale_name = dim if dim in self.variables else NOT_A_VARIABLE
            if h5py.__version__ < LooseVersion('2.10.0'):
                h5ds.dims.create_scale(h5ds, scale_name)
            else:
                h5ds.make_scale(scale_name)

        for subgroup in self.groups.values():
            subgroup._create_dim_scales() 
Example #13
Source File: test_h5netcdf.py    From h5netcdf with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_fileobj():
    if h5py.__version__ < LooseVersion('2.9.0'):
        pytest.skip('h5py > 2.9.0 required to test file-like objects')
    fileobj = tempfile.TemporaryFile()
    write_h5netcdf(fileobj)
    read_h5netcdf(fileobj, h5netcdf)
    fileobj = io.BytesIO()
    write_h5netcdf(fileobj)
    read_h5netcdf(fileobj, h5netcdf) 
Example #14
Source File: test_h5netcdf.py    From h5netcdf with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_invalid_netcdf4_mixed(tmp_local_or_remote_netcdf):
    h5 = get_hdf5_module(tmp_local_or_remote_netcdf)
    with h5.File(tmp_local_or_remote_netcdf, 'w') as f:
        var, var2 = create_invalid_netcdf_data()
        for k, v in var.items():
            f.create_dataset(k, data=v)
        for k, v in var2.items():
            f.create_dataset(k, data=np.arange(v))

        if h5py.__version__ < LooseVersion('2.10.0'):
            f['foo2'].dims.create_scale(f['x1'])
            f['foo2'].dims.create_scale(f['y1'])
            f['foo2'].dims.create_scale(f['z1'])
        else:
            f['x1'].make_scale()
            f['y1'].make_scale()
            f['z1'].make_scale()
        f['foo2'].dims[0].attach_scale(f['x1'])
        f['foo2'].dims[1].attach_scale(f['y1'])
        f['foo2'].dims[2].attach_scale(f['z1'])

    with h5netcdf.File(tmp_local_or_remote_netcdf, 'r',
                       phony_dims='sort') as ds:
        var = ds.variables
        check_invalid_netcdf4_mixed(var, 3)

    with h5netcdf.File(tmp_local_or_remote_netcdf, 'r',
                       phony_dims='access') as ds:
        var = ds.variables
        check_invalid_netcdf4_mixed(var, 0)

    with netCDF4.Dataset(tmp_local_or_remote_netcdf, 'r') as ds:
        var = ds.variables
        check_invalid_netcdf4_mixed(var, 3)

    with h5netcdf.File(tmp_local_or_remote_netcdf, 'r') as ds:
        with raises(ValueError):
            ds.variables['foo1'].dimensions 
Example #15
Source File: imaging.py    From sima with GNU General Public License v2.0 5 votes vote down vote up
def _todict(self):
        """Returns the dataset as a dictionary, useful for saving"""
        return {'savedir': abspath(self.savedir),
                'channel_names': self.channel_names,
                'num_frames': self.num_frames,
                'frame_shape': self.frame_shape,
                'num_sequences': self.num_sequences,
                '__version__': sima.__version__}