Python netCDF4.date2num() Examples
The following are 30
code examples of netCDF4.date2num().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
netCDF4
, or try the search function
.
Example #1
Source File: netcdf_data.py From Ocean-Data-Map-Project with GNU General Public License v3.0 | 6 votes |
def convert_to_timestamp(self, date: str): """Converts ISO 8601 Extended date, to the corresponding dataset time index. """ # Time is in ISO 8601 Extended format # Get time index from dataset time_range = [dateutil.parser.parse(x) for x in date.split(',')] time_var = self.time_variable time_range[0] = time_range[0].replace(tzinfo=None) time_range = [netCDF4.date2num( x, time_var.attrs['units']) for x in time_range] time_range = [np.where(time_var.values == x)[0] for x in time_range] if len(time_range) == 1: # Single Date return int(str(time_range[0][0])) else: # Multiple Dates date_formatted = {} i = 0 for x in date.split(','): # x is a single date new_date = {x: int(str(time_range[i][0]))} date_formatted.update(new_date) # Add Next pair i += 1 return date_formatted
Example #2
Source File: test_disk.py From forest with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_valid_times(self): units = "hours since 1970-01-01 00:00:00" times = { "time_0": [dt.datetime(2019, 1, 1)], "time_1": [dt.datetime(2019, 1, 1, 3)]} with netCDF4.Dataset(self.path, "w") as dataset: um = tutorial.UM(dataset) for name, values in times.items(): var = um.times(name, length=len(values)) var[:] = netCDF4.date2num(values, units=var.units) var = um.pressures("pressure", length=1) var[:] = 1000. var = um.longitudes(length=1) var[:] = 125. var = um.latitudes(length=1) var[:] = 45. dims = ("time_1", "pressure", "longitude", "latitude") var = um.relative_humidity(dims) var[:] = 100. variable = "relative_humidity" result = unified_model.read_valid_times(self.path, variable) expect = times["time_1"] np.testing.assert_array_equal(expect, result)
Example #3
Source File: ncConverter.py From PCR-GLOBWB_model with GNU General Public License v3.0 | 6 votes |
def dataList2NetCDF(self, ncFileName, shortVarNameList, varFieldList, timeStamp, posCnt = None): rootgrp = nc.Dataset(ncFileName,'a') date_time = rootgrp.variables['time'] if posCnt == None: posCnt = len(date_time) for shortVarName in shortVarNameList: date_time[posCnt] = nc.date2num(timeStamp,date_time.units,date_time.calendar) varField = varFieldList[shortVarName] # flip variable if necessary (to follow cf_convention) if self.netcdf_y_orientation_follow_cf_convention: varField = np.flipud(varField) rootgrp.variables[shortVarName][posCnt,:,:] = varField rootgrp.sync() rootgrp.close()
Example #4
Source File: ncConverter.py From PCR-GLOBWB_model with GNU General Public License v3.0 | 6 votes |
def dataList2NetCDF(self, ncFileName, shortVarNameList, varFieldList, timeStamp, posCnt = None): rootgrp = nc.Dataset(ncFileName,'a') date_time = rootgrp.variables['time'] if posCnt == None: posCnt = len(date_time) for shortVarName in shortVarNameList: date_time[posCnt] = nc.date2num(timeStamp,date_time.units,date_time.calendar) varField = varFieldList[shortVarName] # flip variable if necessary (to follow cf_convention) if self.netcdf_y_orientation_follow_cf_convention: varField = np.flipud(varField) rootgrp.variables[shortVarName][posCnt,:,:] = varField rootgrp.sync() rootgrp.close()
Example #5
Source File: test_db_main.py From forest with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_main_saves_axis_information(self): times = [dt.datetime(2019, 1, 1), dt.datetime(2019, 1, 1)] pressures = [1000, 900] with netCDF4.Dataset(self.netcdf_file, "w") as dataset: dataset.createDimension("dim0", len(times)) obj = dataset.createVariable("time", "d", ("dim0",)) obj.units = self.units obj[:] = netCDF4.date2num(times, self.units) obj = dataset.createVariable("pressure", "d", ("dim0",)) obj[:] = pressures obj = dataset.createVariable("air_temperature", "f", ("dim0",)) obj.um_stash_source = "m01s16i203" obj.coordinates = "time pressure" main.main([ "--database", self.database_file, self.netcdf_file ]) connection = sqlite3.connect(self.database_file) cursor = connection.cursor() cursor.execute("SELECT v.time_axis, v.pressure_axis FROM variable AS v") result = cursor.fetchall() expect = [(0, 0)] self.assertEqual(expect, result)
Example #6
Source File: test_db_main.py From forest with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_main_saves_reference_time(self): reference_time = dt.datetime(2019, 1, 1) with netCDF4.Dataset(self.netcdf_file, "w") as dataset: obj = dataset.createVariable("forecast_reference_time", "d", ()) obj[:] = netCDF4.date2num(reference_time, self.units) obj.units = self.units main.main([ "--database", self.database_file, self.netcdf_file ]) connection = sqlite3.connect(self.database_file) cursor = connection.cursor() cursor.execute("SELECT reference FROM file") result = cursor.fetchall() expect = [(str(reference_time),)] self.assertEqual(expect, result)
Example #7
Source File: test_db_main.py From forest with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_main_saves_times_in_database(self): times = [ dt.datetime(2019, 1, 1, 12), dt.datetime(2019, 1, 1, 13)] with netCDF4.Dataset(self.netcdf_file, "w") as dataset: dataset.createDimension("time", len(times)) obj = dataset.createVariable("time", "d", ("time",)) obj.units = self.units obj[:] = netCDF4.date2num(times, self.units) obj = dataset.createVariable("air_temperature", "f", ("time",)) obj.um_stash_source = "m01s16i203" main.main([ "--database", self.database_file, self.netcdf_file ]) connection = sqlite3.connect(self.database_file) cursor = connection.cursor() cursor.execute("SELECT DISTINCT value FROM time") result = cursor.fetchall() expect = [("2019-01-01 12:00:00",), ("2019-01-01 13:00:00",)] self.assertEqual(expect, result)
Example #8
Source File: conftest.py From pynco with MIT License | 6 votes |
def testfileglobal(tempsrcdir): """Create a bunch of sample monthly netcdf files with real times""" dates = [datetime.datetime.now()] filename = os.path.join(tempsrcdir, "global.nc") dataset = netCDF4.Dataset(filename, "w") random_field = np.random.rand(1, 180, 360) # 1degree resolution shape = random_field.shape dataset.createDimension("lat", shape[1]) dataset.createDimension("lon", shape[2]) dataset.createDimension("time", len(dates)) var = dataset.createVariable("random", "f8", ("time", "lat", "lon")) lon = dataset.createVariable("lon", "f8", ("lon",)) lat = dataset.createVariable("lat", "f8", ("lat",)) time = dataset.createVariable("time", "f8", ("time",)) time.units = _UNITS_STD_TIME time.calendar = _CALENDAR_NO_LEAP var[:, :, :] = random_field time[:] = netCDF4.date2num(dates, _UNITS_STD_TIME, calendar=_CALENDAR_NO_LEAP) lat[:] = np.linspace(-90.0, 90.0, shape[1]) lon[:] = np.linspace(-180.0, 180, shape[2]) dataset.close() return filename
Example #9
Source File: conftest.py From pynco with MIT License | 6 votes |
def testfile85(random_field, tempsrcdir): """Create a bunch of sample monthly netcdf files with real times""" dates = [ datetime.datetime(1985, 1, 1) + datetime.timedelta(days=d) for d in range(0, 365) ] filename = os.path.join(tempsrcdir, "85.nc") dataset = netCDF4.Dataset(filename, "w") shape = random_field.shape dataset.createDimension("dim0", shape[0]) dataset.createDimension("dim1", shape[1]) dataset.createDimension("time", len(dates)) var = dataset.createVariable("random", "f8", ("time", "dim0", "dim1")) time = dataset.createVariable("time", "f8", ("time",)) time.units = _UNITS_STD_TIME time.calendar = _CALENDAR_NO_LEAP var[:, :, :] = random_field time[:] = netCDF4.date2num(dates, _UNITS_STD_TIME, calendar=_CALENDAR_NO_LEAP) dataset.close() return filename
Example #10
Source File: conftest.py From pynco with MIT License | 6 votes |
def testfiles8589(random_field, tempsrcdir): """Create a bunch of sample monthly netcdf files with real times""" filelist = [] for year in range(1985, 1990): date = datetime.datetime(year, 1, 1) filename = date.strftime("%y.nc") filename = os.path.join(tempsrcdir, filename) dataset = netCDF4.Dataset(filename, "w") shape = random_field.shape dataset.createDimension("dim0", shape[0]) dataset.createDimension("dim1", shape[1]) dataset.createDimension("time") var = dataset.createVariable("random", "f8", ("time", "dim0", "dim1")) time = dataset.createVariable("time", "f8", ("time",)) time.units = _UNITS_STD_TIME time.calendar = _CALENDAR_NO_LEAP var[0, :, :] = random_field time[:] = netCDF4.date2num(date, _UNITS_STD_TIME, calendar=_CALENDAR_NO_LEAP) dataset.close() filelist.append(filename) return filelist
Example #11
Source File: conftest.py From pynco with MIT License | 6 votes |
def monthly_filelist(random_field, monthlydatetimelist, tempsrcdir): """Create a bunch of sample monthly netcdf files with real times""" file_list = [] for date in monthlydatetimelist: filename = date.strftime(_DATESTR_FORMAT_MONTHLY) filename = os.path.join(tempsrcdir, filename) dataset = netCDF4.Dataset(filename, "w") shape = random_field.shape dataset.createDimension("dim0", shape[0]) dataset.createDimension("dim1", shape[1]) dataset.createDimension("time", 1) var = dataset.createVariable("random", "f8", ("time", "dim0", "dim1")) time = dataset.createVariable("time", "f8", ("time",)) time.units = _UNITS_STD_TIME time.calendar = _CALENDAR_NO_LEAP var[:, :, :] = random_field time[:] = netCDF4.date2num(date, _UNITS_STD_TIME, calendar=_CALENDAR_NO_LEAP) dataset.close() file_list.append(filename) return file_list
Example #12
Source File: settings.py From lisflood-code with European Union Public License 1.2 | 5 votes |
def calendar(date_in, calendar_type='proleptic_gregorian'): """ Get date or number of steps from input. Get date from input string using one of the available formats or get time step number from input number or string. Used to get the date from CalendarDayStart (input) in the settings xml :param date_in: string containing a date in one of the available formats or time step number as number or string :param calendar_type: :rtype: datetime object or float number :returns: date as datetime or time step number as float :raises ValueError: stop if input is not a step number AND it is in wrong date format """ try: # try reading step number from number or string return float(date_in) except ValueError: # try reading a date in one of available formats try: _t_units = "hours since 1970-01-01 00:00:00" # units used for date type conversion (datetime.datetime -> calendar-specific if needed) date = parse_time_string(date_in, dayfirst=True)[0] # datetime.datetime type step = date2num(date, _t_units, calendar_type) # float type return num2date(step, _t_units, calendar_type) # calendar-dependent type from netCDF4.netcdftime._netcdftime module except: # if cannot read input then stop msg = "Wrong step or date format in XML settings file\n Input {}".format(date_in) raise LisfloodError(msg)
Example #13
Source File: lib.py From seapy with MIT License | 5 votes |
def date2num(dates, nc, tvar=None): """ Convert the datetime vector to number for the given netcdf files considering the units and the calendar type used. This is a wrapper to the netCDF4.date2num function to account for calendar strangeness in ROMS Parameters ---------- dates : array of datetime.datetime Values to convert nc : netCDF4.Dataset, netcdf input file tvar : string, optional time variable to load. If not specified, it will find the time variable from predefined Returns ------- ndarray, Array of values in the correct units/calendar of the netCDF file """ tvar = tvar if tvar else get_timevar(nc) calendar, _ = _get_calendar(nc.variables[tvar]) # Convert the times return netCDF4.date2num(dates, nc.variables[tvar].units, calendar=calendar)
Example #14
Source File: ncConverter.py From PCR-GLOBWB_model with GNU General Public License v3.0 | 5 votes |
def data2NetCDF(self, ncFileName, shortVarName, varField, timeStamp, posCnt = None): rootgrp = nc.Dataset(ncFileName,'a') date_time = rootgrp.variables['time'] if posCnt == None: posCnt = len(date_time) date_time[posCnt] = nc.date2num(timeStamp,date_time.units,date_time.calendar) # flip variable if necessary (to follow cf_convention) if self.netcdf_y_orientation_follow_cf_convention: varField = np.flipud(varField) rootgrp.variables[shortVarName][posCnt,:,:] = varField rootgrp.sync() rootgrp.close()
Example #15
Source File: ncConverter.py From PCR-GLOBWB_model with GNU General Public License v3.0 | 5 votes |
def data2NetCDF(self, ncFileName, shortVarName, varField, timeStamp, posCnt = None): rootgrp = nc.Dataset(ncFileName,'a') date_time = rootgrp.variables['time'] if posCnt == None: posCnt = len(date_time) date_time[posCnt] = nc.date2num(timeStamp,date_time.units,date_time.calendar) # flip variable if necessary (to follow cf_convention) if self.netcdf_y_orientation_follow_cf_convention: varField = np.flipud(varField) rootgrp.variables[shortVarName][posCnt,:,:] = varField rootgrp.sync() rootgrp.close()
Example #16
Source File: _files.py From pseudonetcdf with GNU Lesser General Public License v3.0 | 5 votes |
def time2idx(self, time, dim='time', timekey=None, **kwds): """ Convert datetime objects to dimension indices Parameters ---------- time : array-like array of datetime.datetime objects dim : str dimension name for val2idx timekey : str time variable key. None defaults to dim kwds : mappable see val2idx Returns ------- idx : array-like time index (0-based) """ if timekey is None: timekey = dim time = np.asarray(time) nums = self.date2num(time, timekey=timekey) return self.val2idx(dim=dim, val=nums, **kwds)
Example #17
Source File: ecmwf_macc.py From pvlib-python with BSD 3-Clause "New" or "Revised" License | 5 votes |
def interp_data(self, latitude, longitude, utc_time, param): """ Interpolate ``param`` values to ``utc_time`` using indices nearest to (``latitude, longitude``). Parmaeters ---------- latitude : float Latitude in degrees longitude : float Longitude in degrees utc_time : datetime.datetime or datetime.date Naive or UTC date or datetime to interpolate param : str Name of the parameter to interpolate from the data Returns ------- Interpolated ``param`` value at (``utc_time, latitude, longitude``) Examples -------- Use this to get a single value of a parameter in the data at a specific time and set of (latitude, longitude) coordinates. >>> from datetime import datetime >>> from pvlib.iotools import ecmwf_macc >>> data = ecmwf_macc.ECMWF_MACC('aod_tcwv_20121101.nc') >>> dt = datetime(2012, 11, 1, 11, 33, 1) >>> data.interp_data(38.2, -122.1, dt, 'aod550') """ nctime = self.data['time'] # time ilat, ilon = self.get_nearest_indices(latitude, longitude) # time index before before = netCDF4.date2index(utc_time, nctime, select='before') fbefore = self.data[param][before, ilat, ilon] fafter = self.data[param][before + 1, ilat, ilon] dt_num = netCDF4.date2num(utc_time, nctime.units) time_ratio = (dt_num - nctime[before]) / self.delta_time return fbefore + (fafter - fbefore) * time_ratio
Example #18
Source File: _files.py From pseudonetcdf with GNU Lesser General Public License v3.0 | 5 votes |
def date2num(self, time, timekey='time'): """ Parameters ---------- time : array-like array of datetime.datetime objects timekey : str time variable key which requires units and should have calendar. If calendar is missing, standard is the default. default 'time' Returns ------- num : array-like time in relative time as defined by units of time variable (i.e., timekey) which defaults to 'time' """ from netCDF4 import date2num try: from datetime import timezone utc = timezone.utc except ImportError: from datetime import tzinfo utc = tzinfo.utc time = np.asarray(time) # netCDF4 date2num is timezone naive; assumes UTC when not # specified and converts to UTC internally # so, if a tzinfo is involved, it should be removed if any([t.tzinfo is not None for t in time[:]]): time = np.array([ t.astimezone(utc).replace(tzinfo=None) for t in time[:] ]) timeunits = self.variables[timekey].units.strip() calendar = getattr(self.variables[timekey], 'calendar', 'standard') num = date2num(time, timeunits, calendar.strip()) return num
Example #19
Source File: test_db_future.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def define(self, path): units = "hours since 1970-01-01 00:00:00" with netCDF4.Dataset(path, "w") as dataset: dataset.createDimension("x", 2) dataset.createDimension("y", 2) obj = dataset.createVariable("time", "d", ()) obj[:] = netCDF4.date2num(dt.datetime(2019, 1, 1), units=units) obj = dataset.createVariable("x", "f", ("x",)) obj[:] = [0, 10] obj = dataset.createVariable("y", "f", ("y",)) obj[:] = [0, 10] obj = dataset.createVariable("air_temperature", "f", ("y", "x")) obj.um_stash_source = "m01s16i203" obj.coordinates = "time"
Example #20
Source File: test_drivers_eida50.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def set_times(dataset, times): units = "seconds since 1970-01-01 00:00:00" dataset.createDimension("time", len(times)) var = dataset.createVariable("time", "d", ("time",)) var.units = units var[:] = netCDF4.date2num(times, units=units)
Example #21
Source File: test_drivers_eida50.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def _eida50(dataset, times, lons=[0], lats=[0]): """Helper to define EIDA50 formatted file""" dataset.createDimension("time", len(times)) dataset.createDimension("longitude", len(lons)) dataset.createDimension("latitude", len(lats)) units = "hours since 1970-01-01 00:00:00" var = dataset.createVariable( "time", "d", ("time",)) var.axis = "T" var.units = units var.standard_name = "time" var.calendar = "gregorian" var[:] = netCDF4.date2num(times, units=units) var = dataset.createVariable( "longitude", "f", ("longitude",)) var.axis = "X" var.units = "degrees_east" var.standard_name = "longitude" var[:] = lons var = dataset.createVariable( "latitude", "f", ("latitude",)) var.axis = "Y" var.units = "degrees_north" var.standard_name = "latitude" var[:] = lats var = dataset.createVariable( "data", "f", ("time", "latitude", "longitude")) var.standard_name = "toa_brightness_temperature" var.long_name = "toa_brightness_temperature" var.units = "K" var[:] = 0
Example #22
Source File: test_profile.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def variable_surface( dataset, variable, times, longitudes, latitudes, values): dataset.createDimension("latitude", len(latitudes)) dataset.createDimension("longitude", len(longitudes)) dataset.createDimension("time", len(times)) var = dataset.createVariable( "longitude", "d", ("longitude",)) var.axis = "X" var.units = "degrees_east" var.standard_name = "longitude" var[:] = longitudes var = dataset.createVariable( "latitude", "d", ("latitude",)) var.axis = "Y" var.units = "degrees_north" var.standard_name = "latitude" var[:] = latitudes units = "hours since 1970-01-01 00:00:00" var = dataset.createVariable( "time", "d", ("time",)) var.units = units var[:] = netCDF4.date2num(times, units=units) var = dataset.createVariable( "forecast_reference_time", "d", ()) units = "hours since 1970-01-01 00:00:00" var.units = units var[:] = netCDF4.date2num(times, units=units)[0] var = dataset.createVariable( variable, "f", ("time", "latitude", "longitude")) var.units = "Pa" var.grid_mapping = "latitude_longitude" var.coordinates = "forecast_period forecast_reference_time" var[:] = values
Example #23
Source File: test_drivers_unified_model.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def insert_times(dataset, times): if "time" not in dataset.dimensions: dataset.createDimension("time", len(times)) units = "seconds since 1970-01-01 00:00:00 utc" var = dataset.createVariable("time", "f", ("time",)) var.units = units var[:] = netCDF4.date2num(times, units=units)
Example #24
Source File: test_disk.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_locator_given_time_pressure_format(self): pattern = self.path reference_time = dt.datetime(2019, 1, 1) times = [dt.datetime(2019, 1, 2), dt.datetime(2019, 1, 2, 3)] pressures = [1000, 950, 850] with netCDF4.Dataset(self.path, "w") as dataset: um = tutorial.UM(dataset) dataset.createDimension("longitude", 1) dataset.createDimension("latitude", 1) var = um.times("time", length=len(times)) var[:] = netCDF4.date2num(times, units=var.units) um.forecast_reference_time(reference_time) var = um.pressures("pressure", length=len(pressures)) var[:] = pressures dims = ("time", "pressure", "longitude", "latitude") coordinates = "forecast_period_1 forecast_reference_time" var = um.relative_humidity(dims, coordinates=coordinates) var[:] = 100. variable = "relative_humidity" initial_time = reference_time valid_time = times[1] pressure = pressures[2] locator = unified_model.Locator([self.path]) _, result = locator.locate( pattern, variable, initial_time, valid_time, pressure) expect = (1, 2) self.assertEqual(expect, result)
Example #25
Source File: test_disk.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_locator_given_dim0_format(self): pattern = self.path times = [dt.datetime(2019, 1, 1), dt.datetime(2019, 1, 2)] with netCDF4.Dataset(self.path, "w") as dataset: um = tutorial.UM(dataset) dataset.createDimension("longitude", 1) dataset.createDimension("latitude", 1) var = um.times("time", length=len(times), dim_name="dim0") var[:] = netCDF4.date2num(times, units=var.units) um.forecast_reference_time(times[0]) var = um.pressures("pressure", length=len(times), dim_name="dim0") var[:] = 1000. dims = ("dim0", "longitude", "latitude") coordinates = "forecast_period_1 forecast_reference_time pressure time" var = um.relative_humidity(dims, coordinates=coordinates) var[:] = 100. variable = "relative_humidity" initial_time = dt.datetime(2019, 1, 1) valid_time = dt.datetime(2019, 1, 2) locator = unified_model.Locator([self.path]) _, result = locator.locate( pattern, variable, initial_time, valid_time, pressure=1000.0001) expect = (1,) self.assertEqual(expect, result)
Example #26
Source File: test_series.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def variable_4d( dataset, variable, times, pressures, longitudes, latitudes, values): dataset.createDimension("latitude", len(latitudes)) dataset.createDimension("longitude", len(longitudes)) dataset.createDimension("time_1", len(times)) dataset.createDimension("pressure", len(pressures)) var = dataset.createVariable( "longitude", "d", ("longitude",)) var.axis = "X" var.units = "degrees_east" var.standard_name = "longitude" var[:] = longitudes var = dataset.createVariable( "latitude", "d", ("latitude",)) var.axis = "Y" var.units = "degrees_north" var.standard_name = "latitude" var[:] = latitudes units = "hours since 1970-01-01 00:00:00" var = dataset.createVariable( "pressure", "d", ("pressure",)) var[:] = pressures var = dataset.createVariable( "time_1", "d", ("time_1",)) var.units = units var[:] = netCDF4.date2num(times, units=units) var = dataset.createVariable( variable, "f", ("time_1", "pressure", "latitude", "longitude")) var.units = "K" var.grid_mapping = "latitude_longitude" var.coordinates = "forecast_period_1 forecast_reference_time" var[:] = values
Example #27
Source File: test_series.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def variable_3d_scalar_time( dataset, variable, time, pressures, longitudes, latitudes, values): dataset.createDimension("latitude", len(latitudes)) dataset.createDimension("longitude", len(longitudes)) dataset.createDimension("pressure", len(pressures)) var = dataset.createVariable( "longitude", "d", ("longitude",)) var.axis = "X" var.units = "degrees_east" var.standard_name = "longitude" var[:] = longitudes var = dataset.createVariable( "latitude", "d", ("latitude",)) var.axis = "Y" var.units = "degrees_north" var.standard_name = "latitude" var[:] = latitudes units = "hours since 1970-01-01 00:00:00" var = dataset.createVariable( "pressure", "d", ("pressure",)) var[:] = pressures var = dataset.createVariable( "time", "d", ()) var.units = units var[:] = netCDF4.date2num(time, units=units) var = dataset.createVariable( variable, "f", ("pressure", "latitude", "longitude")) var.units = "%" var.grid_mapping = "latitude_longitude" var.coordinates = "forecast_period forecast_reference_time time" var[:] = values
Example #28
Source File: test_series.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def variable_surface( dataset, variable, times, longitudes, latitudes, values): dataset.createDimension("latitude", len(latitudes)) dataset.createDimension("longitude", len(longitudes)) dataset.createDimension("time", len(times)) var = dataset.createVariable( "longitude", "d", ("longitude",)) var.axis = "X" var.units = "degrees_east" var.standard_name = "longitude" var[:] = longitudes var = dataset.createVariable( "latitude", "d", ("latitude",)) var.axis = "Y" var.units = "degrees_north" var.standard_name = "latitude" var[:] = latitudes units = "hours since 1970-01-01 00:00:00" var = dataset.createVariable( "time", "d", ("time",)) var.units = units var[:] = netCDF4.date2num(times, units=units) var = dataset.createVariable( variable, "f", ("time", "latitude", "longitude")) var.units = "Pa" var.grid_mapping = "latitude_longitude" var.coordinates = "forecast_period forecast_reference_time" var[:] = values
Example #29
Source File: core.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def forecast_reference_time(self, time, name="forecast_reference_time"): dataset = self.dataset var = dataset.createVariable(name, "d", ()) var.units = self.units var.standard_name = name var.calendar = "gregorian" var[:] = netCDF4.date2num(time, units=self.units)
Example #30
Source File: core.py From forest with BSD 3-Clause "New" or "Revised" License | 5 votes |
def build_um(build_dir): nx, ny = 100, 100 x = np.linspace(0, 45, nx) y = np.linspace(0, 45, ny) X, Y = np.meshgrid(x, y) Z_0 = np.sqrt(X**2 + Y**2) Z_1 = Z_0 + 5. reference = dt.datetime(2019, 4, 17) times = [dt.datetime(2019, 4, 17, 12, 45), dt.datetime(2019, 4, 17, 13, 45)] path = os.path.join(build_dir, UM_FILE) print("writing: {}".format(path)) with netCDF4.Dataset(path, "w") as dataset: formatter = UM(dataset) var = formatter.longitudes(nx) var[:] = x var = formatter.latitudes(ny) var[:] = y var = formatter.times("time", length=len(times), dim_name="dim0") var[:] = netCDF4.date2num(times, units=var.units) formatter.forecast_reference_time(times[0]) var = formatter.pressures("pressure", length=len(times), dim_name="dim0") var[:] = 1000. dims = ("dim0", "longitude", "latitude") coordinates = "forecast_period_1 forecast_reference_time pressure time" var = formatter.relative_humidity(dims, coordinates=coordinates) var[0] = Z_0.T var[1] = Z_1.T