org.apache.hadoop.metrics.jvm.JvmMetrics Java Examples
The following examples show how to use
org.apache.hadoop.metrics.jvm.JvmMetrics.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: NameNodeMetrics.java From RDFS with Apache License 2.0 | 6 votes |
public NameNodeMetrics(Configuration conf, NameNode nameNode) { String sessionId = conf.get("session.id"); // Initiate Java VM metrics JvmMetrics.init("NameNode", sessionId); // Now the Mbean for the name node - this alos registers the MBean namenodeActivityMBean = new NameNodeActivtyMBean(registry); // Create a record for NameNode metrics MetricsContext metricsContext = MetricsUtil.getContext("dfs"); metricsRecord = MetricsUtil.createRecord(metricsContext, "namenode"); metricsRecord.setTag("sessionId", sessionId); metricsContext.registerUpdater(this); log.info("Initializing NameNodeMeterics using context object:" + metricsContext.getClass().getName()); }
Example #2
Source File: HighTideNodeMetrics.java From RDFS with Apache License 2.0 | 6 votes |
public HighTideNodeMetrics(Configuration conf, HighTideNode hightideNode) { String sessionId = conf.get("session.id"); // Initiate Java VM metrics JvmMetrics.init("HighTideNode", sessionId); // Now the Mbean for the name node - this also registers the MBean hightidenodeActivityMBean = new HighTideNodeActivityMBean(registry); // Create a record for HighTideNode metrics MetricsContext metricsContext = MetricsUtil.getContext("dfs"); metricsRecord = MetricsUtil.createRecord(metricsContext, "hightidenode"); metricsRecord.setTag("sessionId", sessionId); metricsContext.registerUpdater(this); LOG.info("Initializing HighTideNodeMetrics using context object:" + metricsContext.getClass().getName()); }
Example #3
Source File: NameNodeMetrics.java From hadoop-gpu with Apache License 2.0 | 6 votes |
public NameNodeMetrics(Configuration conf, NameNode nameNode) { String sessionId = conf.get("session.id"); // Initiate Java VM metrics JvmMetrics.init("NameNode", sessionId); // Now the Mbean for the name node - this alos registers the MBean namenodeActivityMBean = new NameNodeActivtyMBean(registry); // Create a record for NameNode metrics MetricsContext metricsContext = MetricsUtil.getContext("dfs"); metricsRecord = MetricsUtil.createRecord(metricsContext, "namenode"); metricsRecord.setTag("sessionId", sessionId); metricsContext.registerUpdater(this); log.info("Initializing NameNodeMeterics using context object:" + metricsContext.getClass().getName()); }
Example #4
Source File: LocalJobRunnerMetrics.java From hadoop with Apache License 2.0 | 5 votes |
public LocalJobRunnerMetrics(JobConf conf) { String sessionId = conf.getSessionId(); // Initiate JVM Metrics JvmMetrics.init("JobTracker", sessionId); // Create a record for map-reduce metrics MetricsContext context = MetricsUtil.getContext("mapred"); // record name is jobtracker for compatibility metricsRecord = MetricsUtil.createRecord(context, "jobtracker"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #5
Source File: LocalJobRunnerMetrics.java From big-c with Apache License 2.0 | 5 votes |
public LocalJobRunnerMetrics(JobConf conf) { String sessionId = conf.getSessionId(); // Initiate JVM Metrics JvmMetrics.init("JobTracker", sessionId); // Create a record for map-reduce metrics MetricsContext context = MetricsUtil.getContext("mapred"); // record name is jobtracker for compatibility metricsRecord = MetricsUtil.createRecord(context, "jobtracker"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #6
Source File: PigTests.java From datafu with Apache License 2.0 | 5 votes |
@org.testng.annotations.BeforeClass public void beforeClass() { Logger.getRootLogger().removeAllAppenders(); Logger.getRootLogger().addAppender(new ConsoleAppender(new PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN))); Logger.getRootLogger().setLevel(Level.INFO); Logger.getLogger(JvmMetrics.class).setLevel(Level.OFF); System.setProperty("pig.import.search.path", System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources"); // Test files will be created in the following sub-directory new File(System.getProperty("user.dir") + File.separator + "build", "test-files").mkdir(); }
Example #7
Source File: JobTrackerMetricsInst.java From RDFS with Apache License 2.0 | 5 votes |
public JobTrackerMetricsInst(JobTracker tracker, JobConf conf) { super(tracker, conf); String sessionId = conf.getSessionId(); // Initiate JVM Metrics JvmMetrics.init("JobTracker", sessionId); // Create a record for map-reduce metrics MetricsContext context = MetricsUtil.getContext("mapred"); metricsRecord = MetricsUtil.createRecord(context, "jobtracker"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #8
Source File: TaskTrackerMetricsInst.java From RDFS with Apache License 2.0 | 5 votes |
public TaskTrackerMetricsInst(TaskTracker t) { super(t); JobConf conf = tt.getJobConf(); String sessionId = conf.getSessionId(); // Initiate Java VM Metrics JvmMetrics.init("TaskTracker", sessionId); // Create a record for Task Tracker metrics MetricsContext context = MetricsUtil.getContext("mapred"); metricsRecord = MetricsUtil.createRecord(context, "tasktracker"); //guaranteed never null metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #9
Source File: DataNodeMetrics.java From RDFS with Apache License 2.0 | 5 votes |
public DataNodeMetrics(Configuration conf, String storageId) { String sessionId = conf.get("session.id"); // Initiate reporting of Java VM metrics JvmMetrics.init("DataNode", sessionId); // Now the MBean for the data node datanodeActivityMBean = new DataNodeActivityMBean(registry, storageId); // Create record for DataNode metrics MetricsContext context = MetricsUtil.getContext("dfs"); metricsRecord = MetricsUtil.createRecord(context, "datanode"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #10
Source File: JobTrackerMetricsInst.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public JobTrackerMetricsInst(JobTracker tracker, JobConf conf) { super(tracker, conf); String sessionId = conf.getSessionId(); // Initiate JVM Metrics JvmMetrics.init("JobTracker", sessionId); // Create a record for map-reduce metrics MetricsContext context = MetricsUtil.getContext("mapred"); metricsRecord = MetricsUtil.createRecord(context, "jobtracker"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #11
Source File: TaskTrackerMetricsInst.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public TaskTrackerMetricsInst(TaskTracker t) { super(t); JobConf conf = tt.getJobConf(); String sessionId = conf.getSessionId(); // Initiate Java VM Metrics JvmMetrics.init("TaskTracker", sessionId); // Create a record for Task Tracker metrics MetricsContext context = MetricsUtil.getContext("mapred"); metricsRecord = MetricsUtil.createRecord(context, "tasktracker"); //guaranteed never null metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #12
Source File: DataNodeMetrics.java From hadoop-gpu with Apache License 2.0 | 5 votes |
public DataNodeMetrics(Configuration conf, String storageId) { String sessionId = conf.get("session.id"); // Initiate reporting of Java VM metrics JvmMetrics.init("DataNode", sessionId); // Now the MBean for the data node datanodeActivityMBean = new DataNodeActivityMBean(registry, storageId); // Create record for DataNode metrics MetricsContext context = MetricsUtil.getContext("dfs"); metricsRecord = MetricsUtil.createRecord(context, "datanode"); metricsRecord.setTag("sessionId", sessionId); context.registerUpdater(this); }
Example #13
Source File: SecondaryNameNode.java From RDFS with Apache License 2.0 | 4 votes |
/** * Initialize SecondaryNameNode. */ private void initialize(Configuration conf) throws IOException { // initiate Java VM metrics JvmMetrics.init("SecondaryNameNode", conf.get("session.id")); // Create connection to the namenode. shouldRun = true; nameNodeAddr = NameNode.getAddress(conf); this.conf = conf; this.namenode = (NamenodeProtocol) RPC.waitForProxy(NamenodeProtocol.class, NamenodeProtocol.versionID, nameNodeAddr, conf); // initialize checkpoint directories fsName = getInfoServer(); checkpointDirs = FSImage.getCheckpointDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointImage = new CheckpointStorage(conf); checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs); // Initialize other scheduling parameters from the configuration checkpointPeriod = conf.getLong("fs.checkpoint.period", 3600); checkpointSize = conf.getLong("fs.checkpoint.size", 4194304); // initialize the webserver for uploading files. String infoAddr = NetUtils.getServerAddress(conf, "dfs.secondary.info.bindAddress", "dfs.secondary.info.port", "dfs.secondary.http.address"); InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr); infoBindAddress = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf); infoServer.setAttribute("name.system.image", checkpointImage); this.infoServer.setAttribute("name.conf", conf); infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class); infoServer.start(); // The web-server port can be ephemeral... ensure we have the correct info infoPort = infoServer.getPort(); conf.set("dfs.secondary.http.address", infoBindAddress + ":" +infoPort); LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort); LOG.warn("Checkpoint Period :" + checkpointPeriod + " secs " + "(" + checkpointPeriod/60 + " min)"); LOG.warn("Log Size Trigger :" + checkpointSize + " bytes " + "(" + checkpointSize/1024 + " KB)"); }
Example #14
Source File: SecondaryNameNode.java From hadoop-gpu with Apache License 2.0 | 4 votes |
/** * Initialize SecondaryNameNode. */ private void initialize(Configuration conf) throws IOException { // initiate Java VM metrics JvmMetrics.init("SecondaryNameNode", conf.get("session.id")); // Create connection to the namenode. shouldRun = true; nameNodeAddr = NameNode.getAddress(conf); this.conf = conf; this.namenode = (NamenodeProtocol) RPC.waitForProxy(NamenodeProtocol.class, NamenodeProtocol.versionID, nameNodeAddr, conf); // initialize checkpoint directories fsName = getInfoServer(); checkpointDirs = FSImage.getCheckpointDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointEditsDirs = FSImage.getCheckpointEditsDirs(conf, "/tmp/hadoop/dfs/namesecondary"); checkpointImage = new CheckpointStorage(); checkpointImage.recoverCreate(checkpointDirs, checkpointEditsDirs); // Initialize other scheduling parameters from the configuration checkpointPeriod = conf.getLong("fs.checkpoint.period", 3600); checkpointSize = conf.getLong("fs.checkpoint.size", 4194304); // initialize the webserver for uploading files. String infoAddr = NetUtils.getServerAddress(conf, "dfs.secondary.info.bindAddress", "dfs.secondary.info.port", "dfs.secondary.http.address"); InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr); infoBindAddress = infoSocAddr.getHostName(); int tmpInfoPort = infoSocAddr.getPort(); infoServer = new HttpServer("secondary", infoBindAddress, tmpInfoPort, tmpInfoPort == 0, conf); infoServer.setAttribute("name.system.image", checkpointImage); this.infoServer.setAttribute("name.conf", conf); infoServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class); infoServer.start(); // The web-server port can be ephemeral... ensure we have the correct info infoPort = infoServer.getPort(); conf.set("dfs.secondary.http.address", infoBindAddress + ":" +infoPort); LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" +infoPort); LOG.warn("Checkpoint Period :" + checkpointPeriod + " secs " + "(" + checkpointPeriod/60 + " min)"); LOG.warn("Log Size Trigger :" + checkpointSize + " bytes " + "(" + checkpointSize/1024 + " KB)"); }