org.apache.hive.service.server.HiveServer2 Java Examples
The following examples show how to use
org.apache.hive.service.server.HiveServer2.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveClientTest.java From garmadon with Apache License 2.0 | 6 votes |
@Before public void setup() throws IOException { hdfsTemp = Files.createTempDirectory("hdfs"); derbyDBPath = Files.createTempDirectory("derbyDB"); HiveConf hiveConf = new HiveConf(); hiveConf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.toString(), "jdbc:derby:;databaseName=" + derbyDBPath.toString() + "/derbyDB" + ";create=true"); ServerSocket s = new ServerSocket(0); port = String.valueOf(s.getLocalPort()); hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname, port); // Required to avoid NoSuchMethodError org.apache.hive.service.cli.operation.LogDivertAppender.setWriter hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED.varname, "false"); hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); s.close(); hiveServer2.start(); }
Example #2
Source File: HiveTester.java From transport with BSD 2-Clause "Simplified" License | 6 votes |
private void createHiveServer() { HiveServer2 server = new HiveServer2(); server.init(new HiveConf()); for (Service service : server.getServices()) { if (service instanceof CLIService) { _client = (CLIService) service; } } Preconditions.checkNotNull(_client, "CLI service not found in local Hive server"); try { _sessionHandle = _client.openSession(null, null, null); _functionRegistry = SessionState.getRegistryForWrite(); // "map_from_entries" UDF is required to create maps with non-primitive key types _functionRegistry.registerGenericUDF("map_from_entries", MapFromEntriesWrapper.class); // TODO: This is a hack. Hive's public API does not have a way to register an already created GenericUDF object // It only accepts a class name after which the parameterless constructor of the class is called to create a // GenericUDF object. This does not work for HiveTestStdUDFWrapper as it accepts the UDF classes as parameters. // However, Hive has an internal method which does allow passing GenericUDF objects instead of classes. _functionRegistryAddFunctionMethod = _functionRegistry.getClass().getDeclaredMethod("addFunction", String.class, FunctionInfo.class); _functionRegistryAddFunctionMethod.setAccessible(true); } catch (HiveSQLException | NoSuchMethodException e) { throw new RuntimeException(e); } }
Example #3
Source File: BlurSerDeTest.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
@SuppressWarnings("resource") private int waitForStartupAndGetPort(HiveServer2 hiveServer2) throws InterruptedException { while (true) { // thriftCLIService->server->serverTransport_->serverSocket_ Thread.sleep(100); Object o1 = getObject(hiveServer2, "thriftCLIService"); if (o1 == null) { continue; } Object o2 = getObject(o1, "server"); if (o2 == null) { continue; } Object o3 = getObject(o2, "serverTransport_"); if (o3 == null) { continue; } Object o4 = getObject(o3, "serverSocket_"); if (o4 == null) { continue; } ServerSocket socket = (ServerSocket) o4; return socket.getLocalPort(); } }
Example #4
Source File: HiveServer2Core.java From beeju with Apache License 2.0 | 5 votes |
public void initialise() throws InterruptedException { beejuCore.setHiveVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, RelaxedSQLStdHiveAuthorizerFactory.class.getName()); hiveServer2 = new HiveServer2(); hiveServer2.init(beejuCore.conf()); hiveServer2.start(); waitForHiveServer2StartUp(); jdbcConnectionUrl = "jdbc:hive2://localhost:" + port + "/" + beejuCore.databaseName(); }
Example #5
Source File: HiveTestService.java From hudi with Apache License 2.0 | 5 votes |
public HiveServer2 start() throws IOException { Objects.requireNonNull(workDir, "The work dir must be set before starting cluster."); if (hadoopConf == null) { hadoopConf = HoodieTestUtils.getDefaultHadoopConf(); } String localHiveLocation = getHiveLocation(workDir); if (clean) { LOG.info("Cleaning Hive cluster data at: " + localHiveLocation + " and starting fresh."); File file = new File(localHiveLocation); FileIOUtils.deleteDirectory(file); } HiveConf serverConf = configureHive(hadoopConf, localHiveLocation); executorService = Executors.newSingleThreadExecutor(); tServer = startMetaStore(bindIP, metastorePort, serverConf); serverConf.set("hive.in.test", "true"); hiveServer = startHiveServer(serverConf); String serverHostname; if (bindIP.equals("0.0.0.0")) { serverHostname = "localhost"; } else { serverHostname = bindIP; } if (!waitForServerUp(serverConf, serverHostname, metastorePort, CONNECTION_TIMEOUT)) { throw new IOException("Waiting for startup of standalone server"); } LOG.info("Hive Minicluster service started."); return hiveServer; }
Example #6
Source File: HiveLocalServer2.java From hadoop-mini-clusters with Apache License 2.0 | 5 votes |
@Override public void start() throws Exception { hiveServer2 = new HiveServer2(); LOG.info("HIVESERVER2: Starting HiveServer2 on port: {}", hiveServer2Port); configure(); hiveServer2.init(hiveConf); hiveServer2.start(); }
Example #7
Source File: HiveServerContainer.java From HiveRunner with Apache License 2.0 | 5 votes |
/** * Will start the HiveServer. * * @param testConfig Specific test case properties. Will be merged with the HiveConf of the context * @param hiveVars HiveVars to pass on to the HiveServer for this session */ public void init(Map<String, String> testConfig, Map<String, String> hiveVars) { context.init(); HiveConf hiveConf = context.getHiveConf(); // merge test case properties with hive conf before HiveServer is started. for (Map.Entry<String, String> property : testConfig.entrySet()) { hiveConf.set(property.getKey(), property.getValue()); } try { hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); // Locate the ClIService in the HiveServer2 for (Service service : hiveServer2.getServices()) { if (service instanceof CLIService) { client = (CLIService) service; } } Preconditions.checkNotNull(client, "ClIService was not initialized by HiveServer2"); sessionHandle = client.openSession("noUser", "noPassword", null); SessionState sessionState = client.getSessionManager().getSession(sessionHandle).getSessionState(); currentSessionState = sessionState; currentSessionState.setHiveVariables(hiveVars); } catch (Exception e) { throw new IllegalStateException("Failed to create HiveServer :" + e.getMessage(), e); } // Ping hive server before we do anything more with it! If validation // is switched on, this will fail if metastorage is not set up properly pingHiveServer(); }
Example #8
Source File: HiveServer2Core.java From beeju with Apache License 2.0 | 4 votes |
public HiveServer2 getHiveServer2() { return hiveServer2; }
Example #9
Source File: HiveTestService.java From hudi with Apache License 2.0 | 4 votes |
private HiveServer2 startHiveServer(HiveConf serverConf) { HiveServer2 hiveServer = new HiveServer2(); hiveServer.init(serverConf); hiveServer.start(); return hiveServer; }
Example #10
Source File: InternalHiveServerRunner.java From sqoop-on-spark with Apache License 2.0 | 4 votes |
public InternalHiveServerRunner(String hostname, int port) throws Exception { super(hostname, port); hiveServer2 = new HiveServer2(); }
Example #11
Source File: HIVERangerAuthorizerTest.java From ranger with Apache License 2.0 | 4 votes |
@org.junit.BeforeClass public static void setup() throws Exception { // Get a random port ServerSocket serverSocket = new ServerSocket(0); port = serverSocket.getLocalPort(); serverSocket.close(); HiveConf conf = new HiveConf(); // Warehouse File warehouseDir = new File("./target/hdfs/warehouse").getAbsoluteFile(); conf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, warehouseDir.getPath()); // Scratchdir File scratchDir = new File("./target/hdfs/scratchdir").getAbsoluteFile(); conf.set("hive.exec.scratchdir", scratchDir.getPath()); // REPL DUMP target folder File replRootDir = new File("./target/user/hive").getAbsoluteFile(); conf.set("hive.repl.rootdir", replRootDir.getPath()); // Create a temporary directory for the Hive metastore File metastoreDir = new File("./metastore_db/").getAbsoluteFile(); conf.set(HiveConf.ConfVars.METASTORECONNECTURLKEY.varname, String.format("jdbc:derby:;databaseName=%s;create=true", metastoreDir.getPath())); conf.set(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true"); conf.set(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname, "" + port); conf.set(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false"); conf.set(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, "0"); conf.set(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE.varname,"mr"); hiveServer = new HiveServer2(); hiveServer.init(conf); hiveServer.start(); Class.forName("org.apache.hive.jdbc.HiveDriver"); // Create database String initialUrl = "jdbc:hive2://localhost:" + port; Connection connection = DriverManager.getConnection(initialUrl, "admin", "admin"); Statement statement = connection.createStatement(); statement.execute("CREATE DATABASE IF NOT EXISTS rangerauthz with dbproperties ('repl.source.for'='1,2,3')"); statement.execute("CREATE DATABASE IF NOT EXISTS demo"); statement.close(); connection.close(); // Load data into HIVE String url = "jdbc:hive2://localhost:" + port + "/rangerauthz"; connection = DriverManager.getConnection(url, "admin", "admin"); statement = connection.createStatement(); // statement.execute("CREATE TABLE WORDS (word STRING, count INT)"); statement.execute("create table if not exists words (word STRING, count INT) row format delimited fields terminated by '\t' stored as textfile"); // Copy "wordcount.txt" to "target" to avoid overwriting it during load File inputFile = new File(HIVERangerAuthorizerTest.class.getResource("../../../../../wordcount.txt").toURI()); Path outputPath = Paths.get(inputFile.toPath().getParent().getParent().toString() + File.separator + "wordcountout.txt"); Files.copy(inputFile.toPath(), outputPath); statement.execute("LOAD DATA INPATH '" + outputPath + "' OVERWRITE INTO TABLE words"); // Just test to make sure it's working ResultSet resultSet = statement.executeQuery("SELECT * FROM words where count == '100'"); if (resultSet.next()) { Assert.assertEquals("Mr.", resultSet.getString(1)); } else { Assert.fail("No ResultSet found"); } statement.close(); connection.close(); // Enable ranger authorization after the initial db setup and table creating is done. conf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true"); conf.set(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, "true"); conf.set(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, "org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizerFactory"); }
Example #12
Source File: BlurSerDeTest.java From incubator-retired-blur with Apache License 2.0 | 4 votes |
private int runLoad(boolean disableMrUpdate) throws IOException, InterruptedException, ClassNotFoundException, SQLException { Configuration configuration = miniCluster.getMRConfiguration(); writeSiteFiles(configuration); HiveConf hiveConf = new HiveConf(configuration, getClass()); hiveConf.set("hive.server2.thrift.port", "0"); HiveServer2 hiveServer2 = new HiveServer2(); hiveServer2.init(hiveConf); hiveServer2.start(); int port = waitForStartupAndGetPort(hiveServer2); Class.forName(HiveDriver.class.getName()); String userName = UserGroupInformation.getCurrentUser().getShortUserName(); Connection connection = DriverManager.getConnection("jdbc:hive2://localhost:" + port, userName, ""); UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); run(connection, "set blur.user.name=" + currentUser.getUserName()); run(connection, "set blur.mr.update.disabled=" + disableMrUpdate); run(connection, "set hive.metastore.warehouse.dir=" + WAREHOUSE.toURI().toString()); run(connection, "create database if not exists testdb"); run(connection, "use testdb"); run(connection, "CREATE TABLE if not exists testtable ROW FORMAT SERDE 'org.apache.blur.hive.BlurSerDe' " + "WITH SERDEPROPERTIES ( 'blur.zookeeper.connection'='" + miniCluster.getZkConnectionString() + "', " + "'blur.table'='" + TEST + "', 'blur.family'='" + FAM + "' ) " + "STORED BY 'org.apache.blur.hive.BlurHiveStorageHandler'"); run(connection, "desc testtable"); String createLoadTable = buildCreateLoadTable(connection); run(connection, createLoadTable); File dbDir = new File(WAREHOUSE, "testdb.db"); File tableDir = new File(dbDir, "loadtable"); int totalRecords = 100; generateData(tableDir, totalRecords); run(connection, "select * from loadtable"); run(connection, "set " + BlurSerDe.BLUR_BLOCKING_APPLY + "=true"); run(connection, "insert into table testtable select * from loadtable"); connection.close(); hiveServer2.stop(); return totalRecords; }
Example #13
Source File: InternalHiveServer.java From incubator-sentry with Apache License 2.0 | 4 votes |
public InternalHiveServer(HiveConf conf) throws Exception { super(conf, getHostname(conf), getPort(conf)); hiveServer2 = new HiveServer2(); this.conf = conf; }
Example #14
Source File: HiveService.java From kite with Apache License 2.0 | 4 votes |
private HiveServer2 startHiveServer(HiveConf serverConf) { HiveServer2 hiveServer = new HiveServer2(); hiveServer.init(serverConf); hiveServer.start(); return hiveServer; }