org.apache.hive.service.cli.HiveSQLException Java Examples
The following examples show how to use
org.apache.hive.service.cli.HiveSQLException.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HiveTester.java From transport with BSD 2-Clause "Simplified" License | 6 votes |
private void createHiveServer() { HiveServer2 server = new HiveServer2(); server.init(new HiveConf()); for (Service service : server.getServices()) { if (service instanceof CLIService) { _client = (CLIService) service; } } Preconditions.checkNotNull(_client, "CLI service not found in local Hive server"); try { _sessionHandle = _client.openSession(null, null, null); _functionRegistry = SessionState.getRegistryForWrite(); // "map_from_entries" UDF is required to create maps with non-primitive key types _functionRegistry.registerGenericUDF("map_from_entries", MapFromEntriesWrapper.class); // TODO: This is a hack. Hive's public API does not have a way to register an already created GenericUDF object // It only accepts a class name after which the parameterless constructor of the class is called to create a // GenericUDF object. This does not work for HiveTestStdUDFWrapper as it accepts the UDF classes as parameters. // However, Hive has an internal method which does allow passing GenericUDF objects instead of classes. _functionRegistryAddFunctionMethod = _functionRegistry.getClass().getDeclaredMethod("addFunction", String.class, FunctionInfo.class); _functionRegistryAddFunctionMethod.setAccessible(true); } catch (HiveSQLException | NoSuchMethodException e) { throw new RuntimeException(e); } }
Example #2
Source File: HiveTester.java From transport with BSD 2-Clause "Simplified" License | 6 votes |
/** * Consumes any exceptions generated by the Hive engine to reveal the underlying exception thrown by the UDF */ private RuntimeException fetchUnderlyingException(Throwable e) { while (true) { if (e instanceof HiveSQLException || e instanceof HiveException) { e = e.getCause(); } else if (e instanceof IOException && e.getCause() instanceof HiveException) { e = e.getCause(); } else { break; } } if (e instanceof RuntimeException) { return (RuntimeException) e; } else { return new RuntimeException(e); } }
Example #3
Source File: HiveTester.java From transport with BSD 2-Clause "Simplified" License | 5 votes |
@Override public void assertFunctionCall(String functionCallString, Object expectedOutputData, Object expectedOutputType) { String query = "SELECT " + functionCallString; try { // Execute the SQL statement and fetch the result OperationHandle handle = _client.executeStatement(_sessionHandle, query, null); if (handle.hasResultSet()) { RowSet rowSet = _client.fetchResults(handle); if (rowSet.numRows() > 1 || rowSet.numColumns() > 1) { throw new RuntimeException( "Expected 1 row and 1 column in query output. Received " + rowSet.numRows() + " rows and " + rowSet.numColumns() + " columns.\nQuery: \"" + query + "\""); } Object[] row = rowSet.iterator().next(); Object result = row[0]; Assert.assertEquals(result, expectedOutputData, "UDF output does not match"); // Get the output data type and convert them to TypeInfo to compare ColumnDescriptor outputColumnDescriptor = _client.getResultSetMetadata(handle).getColumnDescriptors().get(0); Assert.assertEquals(TypeInfoUtils.getTypeInfoFromTypeString(outputColumnDescriptor.getTypeName().toLowerCase()), TypeInfoUtils.getTypeInfoFromObjectInspector((ObjectInspector) expectedOutputType), "UDF output type does not match"); } else { throw new RuntimeException("Query did not return any rows. Query: \"" + query + "\""); } } catch (HiveSQLException e) { throw fetchUnderlyingException(e); } }
Example #4
Source File: AbstractHiveSaveTest.java From elasticsearch-hadoop with Apache License 2.0 | 5 votes |
@Test(expected = HiveSQLException.class) public void testCreateWithDuplicates() throws Exception { // load the raw data as a native, managed table // and then insert its content into the external one String localTable = createTable("createsourceduplicate"); String load = loadData("createsourceduplicate"); // create external table String ddl = "CREATE EXTERNAL TABLE createsaveduplicate (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-createsave", "data", targetVersion), "'" + ConfigurationOptions.ES_MAPPING_ID + "'='id'", "'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='create'"); String selectTest = "SELECT s.name, struct(s.url, s.picture) FROM createsourceduplicate s"; // transfer data String insert = "INSERT OVERWRITE TABLE createsaveduplicate " + "SELECT s.id, s.name, named_struct('url', s.url, 'picture', s.picture) FROM createsourceduplicate s"; System.out.println(ddl); System.out.println(server.execute(ddl)); System.out.println(server.execute(localTable)); System.out.println(server.execute(load)); System.out.println(server.execute(selectTest)); System.out.println(server.execute(insert)); }
Example #5
Source File: AbstractHiveSaveTest.java From elasticsearch-hadoop with Apache License 2.0 | 5 votes |
@Test(expected = HiveSQLException.class) public void testUpdateWithoutUpsert() throws Exception { // load the raw data as a native, managed table // and then insert its content into the external one String localTable = createTable("updatewoupsertsource"); String load = loadData("updatewoupsertsource"); // create external table String ddl = "CREATE EXTERNAL TABLE updatewoupsertsave (" + "id BIGINT, " + "name STRING, " + "links STRUCT<url:STRING, picture:STRING>) " + tableProps(resource("hive-updatewoupsertsave", "data", targetVersion), "'" + ConfigurationOptions.ES_MAPPING_ID + "'='id'", "'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='update'"); String selectTest = "SELECT s.name, struct(s.url, s.picture) FROM updatewoupsertsource s"; // transfer data String insert = "INSERT OVERWRITE TABLE updatewoupsertsave " + "SELECT s.id, s.name, named_struct('url', s.url, 'picture', s.picture) FROM updatewoupsertsource s"; System.out.println(ddl); System.out.println(server.execute(ddl)); System.out.println(server.execute(localTable)); System.out.println(server.execute(load)); System.out.println(server.execute(selectTest)); System.out.println(server.execute(insert)); }
Example #6
Source File: AbstractHiveSaveJsonTest.java From elasticsearch-hadoop with Apache License 2.0 | 5 votes |
@Test(expected = HiveSQLException.class) public void testCreateWithDuplicates() throws Exception { // load the raw data as a native, managed table // and then insert its content into the external one String localTable = createTable("jsoncreatesourceduplicate"); String load = loadData("jsoncreatesourceduplicate"); // create external table String ddl = "CREATE EXTERNAL TABLE jsoncreatesaveduplicate (" + "json STRING) " + tableProps(resource("json-hive-createsave", "data", targetVersion), "'" + ConfigurationOptions.ES_MAPPING_ID + "'='number'", "'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='create'"); String selectTest = "SELECT s.json FROM jsoncreatesourceduplicate s"; // transfer data String insert = "INSERT OVERWRITE TABLE jsoncreatesaveduplicate " + "SELECT s.json FROM jsoncreatesourceduplicate s"; System.out.println(ddl); System.out.println(server.execute(ddl)); System.out.println(server.execute(localTable)); System.out.println(server.execute(load)); System.out.println(server.execute(selectTest)); System.out.println(server.execute(insert)); }
Example #7
Source File: AbstractHiveSaveJsonTest.java From elasticsearch-hadoop with Apache License 2.0 | 5 votes |
@Test(expected = HiveSQLException.class) public void testUpdateWithoutUpsert() throws Exception { // load the raw data as a native, managed table // and then insert its content into the external one String localTable = createTable("jsonupdatewoupsertsource"); String load = loadData("jsonupdatewoupsertsource"); // create external table String ddl = "CREATE EXTERNAL TABLE jsonupdatewoupsertsave (" + "json STRING) " + tableProps(resource("json-hive-updatewoupsertsave", "data", targetVersion), "'" + ConfigurationOptions.ES_MAPPING_ID + "'='number'", "'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='update'"); String selectTest = "SELECT s.json FROM jsonupdatewoupsertsource s"; // transfer data String insert = "INSERT OVERWRITE TABLE jsonupdatewoupsertsave " + "SELECT s.json FROM jsonupdatewoupsertsource s"; System.out.println(ddl); System.out.println(server.execute(ddl)); System.out.println(server.execute(localTable)); System.out.println(server.execute(load)); System.out.println(server.execute(selectTest)); System.out.println(server.execute(insert)); }
Example #8
Source File: TestDbColumnLevelMetaDataOps.java From incubator-sentry with Apache License 2.0 | 5 votes |
private ResultSet executeQueryWithLog(String query) throws Exception { ResultSet rs; try { LOGGER.info("Running " + query); rs = statement.executeQuery(query); return rs; } catch (HiveSQLException ex) { LOGGER.error("Privilege exception occurs when running : " + query); throw ex; } }
Example #9
Source File: HiveAuthzBindingSessionHook.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * The session hook for sentry authorization that sets the required session level configuration * 1. Setup the sentry hooks - * semantic, exec and filter hooks * 2. Set additional config properties required for auth * set HIVE_EXTENDED_ENITITY_CAPTURE = true * set SCRATCHDIRPERMISSION = 700 * 3. Add sensitive config parameters to the config restrict list so that they can't be overridden by users */ @Override public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException { // Add sentry hooks to the session configuration HiveConf sessionConf = sessionHookContext.getSessionConf(); appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK); HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf); String commandWhitelist = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST, HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT); sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist); sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS); sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true); // set user name sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser()); sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser()); sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.sentry.binding.hive.HiveAuthzBindingSessionHook$SentryHiveAuthorizerFactory"); // Set MR ACLs to session user appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser()); appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser()); // setup restrict list sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST); }
Example #10
Source File: HiveAuthzBindingSessionHookV2.java From incubator-sentry with Apache License 2.0 | 5 votes |
/** * The session hook for sentry authorization that sets the required session level configuration 1. * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3. * Add sensitive config parameters to the config restrict list so that they can't be overridden by * users */ @Override public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException { // Add sentry hooks to the session configuration HiveConf sessionConf = sessionHookContext.getSessionConf(); appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK); // enable sentry authorization V2 sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true); sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false); sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator"); // grant all privileges for table to its owner sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, ""); // Enable compiler to capture transform URI referred in the query sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true); // set security command list HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf); String commandWhitelist = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST, HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT); sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist); // set additional configuration properties required for auth sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS); // setup restrict list sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST); // set user name sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser()); sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser()); // Set MR ACLs to session user appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser()); appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser()); }
Example #11
Source File: HiveServerContainer.java From HiveRunner with Apache License 2.0 | 5 votes |
public List<Object[]> executeStatement(String hiveql) { try { OperationHandle handle = client.executeStatement(sessionHandle, hiveql, new HashMap<>()); List<Object[]> resultSet = new ArrayList<>(); if (handle.hasResultSet()) { /* * fetchResults will by default return 100 rows per fetch (hive 14). For big result sets we need to continuously fetch the result set until all * rows are fetched. */ RowSet rowSet; while ((rowSet = client.fetchResults(handle)) != null && rowSet.numRows() > 0) { for (Object[] row : rowSet) { resultSet.add(row.clone()); } } } LOGGER.debug("ResultSet:\n" + Joiner.on("\n").join(Iterables.transform(resultSet, new Function<Object[], String>() { @Nullable @Override public String apply(@Nullable Object[] objects) { return Joiner.on(", ").useForNull("null").join(objects); } }))); return resultSet; } catch (HiveSQLException e) { throw new IllegalArgumentException("Failed to executeQuery Hive query " + hiveql + ": " + e.getMessage(), e); } }
Example #12
Source File: HiveServerContainerTest.java From HiveRunner with Apache License 2.0 | 5 votes |
@Test(expected = HiveSQLException.class) public void testInvalidQuery() throws Throwable { try { container.executeStatement("use foo"); } catch (IllegalArgumentException e) { throw e.getCause(); } }
Example #13
Source File: HiveAuthzBindingSessionHook.java From incubator-sentry with Apache License 2.0 | 4 votes |
@Override public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException { underlyingHook.run(sessionHookContext); }
Example #14
Source File: AbstractHiveReadJsonTest.java From elasticsearch-hadoop with Apache License 2.0 | 3 votes |
@Test(expected = HiveSQLException.class) public void basicLoadWithNoGoodCandidateField() throws Exception { String create = "CREATE EXTERNAL TABLE jsonartistsread" + testInstance + " (refuse INT, garbage INT) " + tableProps(resource("json-hive-artists", "data", targetVersion), "'es.output.json' = 'true'"); String select = "SELECT * FROM jsonartistsread" + testInstance; server.execute(create); server.execute(select); fail("Should have broken because there are no String fields in the table schema to place the JSON data."); }