org.apache.hadoop.metrics.spi.Util Java Examples
The following examples show how to use
org.apache.hadoop.metrics.spi.Util.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GangliaContext.java From RDFS with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); try { datagramSocket = new DatagramSocket(); } catch (SocketException se) { se.printStackTrace(); } }
Example #2
Source File: GangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY)); String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY); if (multicastEnabled) { if (multicastTtlValue == null) { multicastTtl = DEFAULT_MULTICAST_TTL; } else { multicastTtl = Integer.parseInt(multicastTtlValue); } } try { if (multicastEnabled) { LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl); datagramSocket = new MulticastSocket(); ((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl); } else { datagramSocket = new DatagramSocket(); } } catch (IOException e) { LOG.error(e); } }
Example #3
Source File: GangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY)); String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY); if (multicastEnabled) { if (multicastTtlValue == null) { multicastTtl = DEFAULT_MULTICAST_TTL; } else { multicastTtl = Integer.parseInt(multicastTtlValue); } } try { if (multicastEnabled) { LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl); datagramSocket = new MulticastSocket(); ((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl); } else { datagramSocket = new DatagramSocket(); } } catch (IOException e) { LOG.error(e); } }
Example #4
Source File: GangliaContext.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); String periodStr = getAttribute(PERIOD_PROPERTY); if (periodStr != null) { int period = 0; try { period = Integer.parseInt(periodStr); } catch (NumberFormatException nfe) { } if (period <= 0) { throw new MetricsException("Invalid period: " + periodStr); } setPeriod(period); } metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); try { datagramSocket = new DatagramSocket(); } catch (SocketException se) { se.printStackTrace(); } }