org.apache.hadoop.metrics.ContextFactory Java Examples
The following examples show how to use
org.apache.hadoop.metrics.ContextFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CompositeContext.java From RDFS with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); int nKids; try { String sKids = getAttribute(ARITY_LABEL); nKids = Integer.valueOf(sKids); } catch (Exception e) { LOG.error("Unable to initialize composite metric " + contextName + ": could not init arity", e); return; } for (int i = 0; i < nKids; ++i) { MetricsContext ctxt = MetricsUtil.getContext( String.format(SUB_FMT, contextName, i), contextName); if (null != ctxt) { subctxt.add(ctxt); } } }
Example #2
Source File: GangliaContext31.java From big-c with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics."); // Take the hostname from the DNS class. Configuration conf = new Configuration(); if (conf.get("slave.host.name") != null) { hostName = conf.get("slave.host.name"); } else { try { hostName = DNS.getDefaultHost( conf.get("dfs.datanode.dns.interface","default"), conf.get("dfs.datanode.dns.nameserver","default")); } catch (UnknownHostException uhe) { LOG.error(uhe); hostName = "UNKNOWN.example.com"; } } }
Example #3
Source File: CompositeContext.java From big-c with Apache License 2.0 | 6 votes |
@InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); int nKids; try { String sKids = getAttribute(ARITY_LABEL); nKids = Integer.parseInt(sKids); } catch (Exception e) { LOG.error("Unable to initialize composite metric " + contextName + ": could not init arity", e); return; } for (int i = 0; i < nKids; ++i) { MetricsContext ctxt = MetricsUtil.getContext( String.format(SUB_FMT, contextName, i), contextName); if (null != ctxt) { subctxt.add(ctxt); } } }
Example #4
Source File: JMXContext.java From RDFS with Apache License 2.0 | 6 votes |
@Override public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); initAllowedRecords(); String periodStr = getAttribute(PERIOD_PROPERTY); if (periodStr != null) { int period = 0; try { period = Integer.parseInt(periodStr); } catch (NumberFormatException nfe) { } if (period <= 0) { throw new MetricsException("Invalid period: " + periodStr); } setPeriod(period); } }
Example #5
Source File: FileContext.java From RDFS with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); fileName = getAttribute(FILE_NAME_PROPERTY); String recordDatePattern = getAttribute(RECORD_DATE_PATTERN_PROPERTY); if (recordDatePattern == null) recordDatePattern = DEFAULT_RECORD_DATE_PATTERN; recordDateFormat = new SimpleDateFormat(recordDatePattern); fileSuffixDateFormat = new SimpleDateFormat(FILE_SUFFIX_DATE_PATTERN); Calendar currentDate = Calendar.getInstance(); if (fileName != null) file = new File(getFullFileName(currentDate)); lastRecordDate = currentDate; parseAndSetPeriod(PERIOD_PROPERTY); }
Example #6
Source File: GangliaContext.java From RDFS with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); try { datagramSocket = new DatagramSocket(); } catch (SocketException se) { se.printStackTrace(); } }
Example #7
Source File: NullContextWithUpdateThread.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); // If period is specified, use it, otherwise the default is good enough String periodStr = getAttribute(PERIOD_PROPERTY); if (periodStr != null) { int period = 0; try { period = Integer.parseInt(periodStr); } catch (NumberFormatException nfe) { } if (period <= 0) { throw new MetricsException("Invalid period: " + periodStr); } setPeriod(period); } }
Example #8
Source File: GangliaContext31.java From hadoop with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics."); // Take the hostname from the DNS class. Configuration conf = new Configuration(); if (conf.get("slave.host.name") != null) { hostName = conf.get("slave.host.name"); } else { try { hostName = DNS.getDefaultHost( conf.get("dfs.datanode.dns.interface","default"), conf.get("dfs.datanode.dns.nameserver","default")); } catch (UnknownHostException uhe) { LOG.error(uhe); hostName = "UNKNOWN.example.com"; } } }
Example #9
Source File: CompositeContext.java From hadoop with Apache License 2.0 | 6 votes |
@InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); int nKids; try { String sKids = getAttribute(ARITY_LABEL); nKids = Integer.parseInt(sKids); } catch (Exception e) { LOG.error("Unable to initialize composite metric " + contextName + ": could not init arity", e); return; } for (int i = 0; i < nKids; ++i) { MetricsContext ctxt = MetricsUtil.getContext( String.format(SUB_FMT, contextName, i), contextName); if (null != ctxt) { subctxt.add(ctxt); } } }
Example #10
Source File: CompositeContext.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); int nKids; try { String sKids = getAttribute(ARITY_LABEL); nKids = Integer.valueOf(sKids); } catch (Exception e) { LOG.error("Unable to initialize composite metric " + contextName + ": could not init arity", e); return; } for (int i = 0; i < nKids; ++i) { MetricsContext ctxt = MetricsUtil.getContext( String.format(SUB_FMT, contextName, i), contextName); if (null != ctxt) { subctxt.add(ctxt); } } }
Example #11
Source File: FileContext.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); String fileName = getAttribute(FILE_NAME_PROPERTY); if (fileName != null) { file = new File(fileName); } String periodStr = getAttribute(PERIOD_PROPERTY); if (periodStr != null) { int period = 0; try { period = Integer.parseInt(periodStr); } catch (NumberFormatException nfe) { } if (period <= 0) { throw new MetricsException("Invalid period: " + periodStr); } setPeriod(period); } }
Example #12
Source File: TestGangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testShouldCreateMulticastSocket() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "true"); context.init("gangliaContext", factory); assertTrue("Did not create MulticastSocket", context.datagramSocket instanceof MulticastSocket); MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket; assertEquals("Did not set default TTL", multicastSocket.getTimeToLive(), 1); }
Example #13
Source File: ClusterManagerTestable.java From RDFS with Apache License 2.0 | 5 votes |
public ClusterManagerTestable(CoronaConf conf, boolean callbackSession) throws IOException { this.conf = conf; initLegalTypes(); ContextFactory.resetFactory(); setNoEmitMetricsContext(); metrics = new ClusterManagerMetrics(getTypes()); sessionManager = new SessionManagerTestable(this); nodeManager = new NodeManagerTestable(this, conf); if (callbackSession) { sessionNotifier = new CallbackSessionNotifier(sessionManager, this, metrics); } else { sessionNotifier = new FakeSessionNotifier(sessionManager, this, metrics); } sessionHistoryManager = new SessionHistoryManager(); sessionHistoryManager.setConf(conf); configManager = new FakeConfigManager(); scheduler = new SchedulerTestable(nodeManager, sessionManager, sessionNotifier, getTypes(), metrics, configManager, conf); scheduler.setConf(conf); scheduler.start(); sessionManager.setConf(conf); nodeManager.setConf(conf); startTime = clock.getTime(); }
Example #14
Source File: MiniCoronaCluster.java From RDFS with Apache License 2.0 | 5 votes |
private MiniCoronaCluster(Builder builder) throws IOException { ContextFactory.resetFactory(); setNoEmitMetricsContext(); if (builder.racks != null && builder.hosts != null) { if (builder.racks.length != builder.hosts.length) { throw new IllegalArgumentException( "The number of hosts and racks must be the same"); } } this.conf = builder.conf != null ? builder.conf : new JobConf(); this.namenode = builder.namenode; this.ugi = builder.ugi; this.conf.set(CoronaConf.CM_ADDRESS, "localhost:0"); this.conf.set(CoronaConf.CPU_TO_RESOURCE_PARTITIONING, TstUtils.std_cpu_to_resource_partitioning); this.clusterManagerPort = startClusterManager(this.conf); this.conf.set(CoronaConf.PROXY_JOB_TRACKER_ADDRESS, "localhost:0"); pjt = ProxyJobTracker.startProxyTracker(new CoronaConf(conf)); this.proxyJobTrackerPort = pjt.getRpcPort(); configureJobConf(conf, builder.namenode, clusterManagerPort, proxyJobTrackerPort, builder.ugi); for (int i = 0; i < builder.numTaskTrackers; ++i) { String host = builder.hosts == null ? "host" + i + ".foo.com" : builder.hosts[i]; String rack = builder.racks == null ? NetworkTopology.DEFAULT_RACK : builder.racks[i]; startTaskTracker(host, rack, i, builder.numDir); } waitTaskTrackers(); }
Example #15
Source File: TestGangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testCloseShouldCloseTheSocketWhichIsCreatedByInit() throws Exception { AbstractMetricsContext context=new GangliaContext(); context.init("gangliaContext", ContextFactory.getFactory()); GangliaContext gangliaContext =(GangliaContext) context; assertFalse("Socket already closed",gangliaContext.datagramSocket.isClosed()); context.close(); assertTrue("Socket not closed",gangliaContext.datagramSocket.isClosed()); }
Example #16
Source File: TestGangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testShouldSetMulticastSocketTtl() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "true"); factory.setAttribute("gangliaContext.multicast.ttl", "10"); context.init("gangliaContext", factory); MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket; assertEquals("Did not set TTL", multicastSocket.getTimeToLive(), 10); }
Example #17
Source File: TestGangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Test public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "false"); context.init("gangliaContext", factory); assertFalse("Created MulticastSocket", context.datagramSocket instanceof MulticastSocket); }
Example #18
Source File: GangliaContext.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); String periodStr = getAttribute(PERIOD_PROPERTY); if (periodStr != null) { int period = 0; try { period = Integer.parseInt(periodStr); } catch (NumberFormatException nfe) { } if (period <= 0) { throw new MetricsException("Invalid period: " + periodStr); } setPeriod(period); } metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); try { datagramSocket = new DatagramSocket(); } catch (SocketException se) { se.printStackTrace(); } }
Example #19
Source File: GangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY)); String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY); if (multicastEnabled) { if (multicastTtlValue == null) { multicastTtl = DEFAULT_MULTICAST_TTL; } else { multicastTtl = Integer.parseInt(multicastTtlValue); } } try { if (multicastEnabled) { LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl); datagramSocket = new MulticastSocket(); ((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl); } else { datagramSocket = new DatagramSocket(); } } catch (IOException e) { LOG.error(e); } }
Example #20
Source File: TestGangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testCloseShouldCloseTheSocketWhichIsCreatedByInit() throws Exception { AbstractMetricsContext context=new GangliaContext(); context.init("gangliaContext", ContextFactory.getFactory()); GangliaContext gangliaContext =(GangliaContext) context; assertFalse("Socket already closed",gangliaContext.datagramSocket.isClosed()); context.close(); assertTrue("Socket not closed",gangliaContext.datagramSocket.isClosed()); }
Example #21
Source File: TestGangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testShouldCreateDatagramSocketIfMulticastIsDisabled() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "false"); context.init("gangliaContext", factory); assertFalse("Created MulticastSocket", context.datagramSocket instanceof MulticastSocket); }
Example #22
Source File: TestGangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testShouldSetMulticastSocketTtl() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "true"); factory.setAttribute("gangliaContext.multicast.ttl", "10"); context.init("gangliaContext", factory); MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket; assertEquals("Did not set TTL", multicastSocket.getTimeToLive(), 10); }
Example #23
Source File: TestGangliaContext.java From hadoop with Apache License 2.0 | 5 votes |
@Test public void testShouldCreateMulticastSocket() throws Exception { GangliaContext context = new GangliaContext(); ContextFactory factory = ContextFactory.getFactory(); factory.setAttribute("gangliaContext.multicast", "true"); context.init("gangliaContext", factory); assertTrue("Did not create MulticastSocket", context.datagramSocket instanceof MulticastSocket); MulticastSocket multicastSocket = (MulticastSocket) context.datagramSocket; assertEquals("Did not set default TTL", multicastSocket.getTimeToLive(), 1); }
Example #24
Source File: GangliaContext.java From big-c with Apache License 2.0 | 5 votes |
@Override @InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); metricsServers = Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT); unitsTable = getAttributeTable(UNITS_PROPERTY); slopeTable = getAttributeTable(SLOPE_PROPERTY); tmaxTable = getAttributeTable(TMAX_PROPERTY); dmaxTable = getAttributeTable(DMAX_PROPERTY); multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY)); String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY); if (multicastEnabled) { if (multicastTtlValue == null) { multicastTtl = DEFAULT_MULTICAST_TTL; } else { multicastTtl = Integer.parseInt(multicastTtlValue); } } try { if (multicastEnabled) { LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl); datagramSocket = new MulticastSocket(); ((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl); } else { datagramSocket = new DatagramSocket(); } } catch (IOException e) { LOG.error(e); } }
Example #25
Source File: AbstractMetricsContext.java From RDFS with Apache License 2.0 | 4 votes |
/** * Returns the factory by which this context was created. */ public ContextFactory getContextFactory() { return factory; }
Example #26
Source File: AbstractMetricsContext.java From hadoop with Apache License 2.0 | 4 votes |
/** * Returns the factory by which this context was created. */ public ContextFactory getContextFactory() { return factory; }
Example #27
Source File: NullContextWithUpdateThread.java From hadoop with Apache License 2.0 | 4 votes |
@InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); }
Example #28
Source File: NoEmitMetricsContext.java From hadoop with Apache License 2.0 | 4 votes |
@InterfaceAudience.Private public void init(String contextName, ContextFactory factory) { super.init(contextName, factory); parseAndSetPeriod(PERIOD_PROPERTY); }
Example #29
Source File: AbstractMetricsContext.java From hadoop-gpu with Apache License 2.0 | 4 votes |
/** * Returns the factory by which this context was created. */ public ContextFactory getContextFactory() { return factory; }
Example #30
Source File: AbstractMetricsContext.java From hadoop-gpu with Apache License 2.0 | 4 votes |
/** * Initializes the context. */ public void init(String contextName, ContextFactory factory) { this.contextName = contextName; this.factory = factory; }