org.apache.hadoop.crypto.CryptoInputStream Java Examples
The following examples show how to use
org.apache.hadoop.crypto.CryptoInputStream.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CryptoUtils.java From hadoop with Apache License 2.0 | 6 votes |
/** * Wraps a given InputStream with a CryptoInputStream. The size of the data * buffer required for the stream is specified by the * "mapreduce.job.encrypted-intermediate-data.buffer.kb" Job configuration * variable. * * If the value of 'length' is > -1, The InputStream is additionally * wrapped in a LimitInputStream. CryptoStreams are late buffering in nature. * This means they will always try to read ahead if they can. The * LimitInputStream will ensure that the CryptoStream does not read past the * provided length from the given Input Stream. * * @param conf * @param in * @param length * @return InputStream * @throws IOException */ public static InputStream wrapIfNecessary(Configuration conf, InputStream in, long length) throws IOException { if (isEncryptedSpillEnabled(conf)) { int bufferSize = getBufferSize(conf); if (length > -1) { in = new LimitInputStream(in, length); } byte[] offsetArray = new byte[8]; IOUtils.readFully(in, offsetArray, 0, 8); long offset = ByteBuffer.wrap(offsetArray).getLong(); CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf); byte[] iv = new byte[cryptoCodec.getCipherSuite().getAlgorithmBlockSize()]; IOUtils.readFully(in, iv, 0, cryptoCodec.getCipherSuite().getAlgorithmBlockSize()); if (LOG.isDebugEnabled()) { LOG.debug("IV read from [" + Base64.encodeBase64URLSafeString(iv) + "]"); } return new CryptoInputStream(in, cryptoCodec, bufferSize, getEncryptionKey(), iv, offset + cryptoPadding(conf)); } else { return in; } }
Example #2
Source File: DataTransferSaslUtil.java From hadoop with Apache License 2.0 | 6 votes |
/** * Create IOStreamPair of {@link org.apache.hadoop.crypto.CryptoInputStream} * and {@link org.apache.hadoop.crypto.CryptoOutputStream} * * @param conf the configuration * @param cipherOption negotiated cipher option * @param out underlying output stream * @param in underlying input stream * @param isServer is server side * @return IOStreamPair the stream pair * @throws IOException for any error */ public static IOStreamPair createStreamPair(Configuration conf, CipherOption cipherOption, OutputStream out, InputStream in, boolean isServer) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Creating IOStreamPair of CryptoInputStream and " + "CryptoOutputStream."); } CryptoCodec codec = CryptoCodec.getInstance(conf, cipherOption.getCipherSuite()); byte[] inKey = cipherOption.getInKey(); byte[] inIv = cipherOption.getInIv(); byte[] outKey = cipherOption.getOutKey(); byte[] outIv = cipherOption.getOutIv(); InputStream cIn = new CryptoInputStream(in, codec, isServer ? inKey : outKey, isServer ? inIv : outIv); OutputStream cOut = new CryptoOutputStream(out, codec, isServer ? outKey : inKey, isServer ? outIv : inIv); return new IOStreamPair(cIn, cOut); }
Example #3
Source File: DFSClient.java From hadoop with Apache License 2.0 | 6 votes |
/** * Wraps the stream in a CryptoInputStream if the underlying file is * encrypted. */ public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis) throws IOException { final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo(); if (feInfo != null) { // File is encrypted, wrap the stream in a crypto stream. // Currently only one version, so no special logic based on the version # getCryptoProtocolVersion(feInfo); final CryptoCodec codec = getCryptoCodec(conf, feInfo); final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo); final CryptoInputStream cryptoIn = new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), feInfo.getIV()); return new HdfsDataInputStream(cryptoIn); } else { // No FileEncryptionInfo so no encryption. return new HdfsDataInputStream(dfsis); } }
Example #4
Source File: CryptoUtils.java From big-c with Apache License 2.0 | 6 votes |
/** * Wraps a given InputStream with a CryptoInputStream. The size of the data * buffer required for the stream is specified by the * "mapreduce.job.encrypted-intermediate-data.buffer.kb" Job configuration * variable. * * If the value of 'length' is > -1, The InputStream is additionally * wrapped in a LimitInputStream. CryptoStreams are late buffering in nature. * This means they will always try to read ahead if they can. The * LimitInputStream will ensure that the CryptoStream does not read past the * provided length from the given Input Stream. * * @param conf * @param in * @param length * @return InputStream * @throws IOException */ public static InputStream wrapIfNecessary(Configuration conf, InputStream in, long length) throws IOException { if (isEncryptedSpillEnabled(conf)) { int bufferSize = getBufferSize(conf); if (length > -1) { in = new LimitInputStream(in, length); } byte[] offsetArray = new byte[8]; IOUtils.readFully(in, offsetArray, 0, 8); long offset = ByteBuffer.wrap(offsetArray).getLong(); CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf); byte[] iv = new byte[cryptoCodec.getCipherSuite().getAlgorithmBlockSize()]; IOUtils.readFully(in, iv, 0, cryptoCodec.getCipherSuite().getAlgorithmBlockSize()); if (LOG.isDebugEnabled()) { LOG.debug("IV read from [" + Base64.encodeBase64URLSafeString(iv) + "]"); } return new CryptoInputStream(in, cryptoCodec, bufferSize, getEncryptionKey(), iv, offset + cryptoPadding(conf)); } else { return in; } }
Example #5
Source File: DataTransferSaslUtil.java From big-c with Apache License 2.0 | 6 votes |
/** * Create IOStreamPair of {@link org.apache.hadoop.crypto.CryptoInputStream} * and {@link org.apache.hadoop.crypto.CryptoOutputStream} * * @param conf the configuration * @param cipherOption negotiated cipher option * @param out underlying output stream * @param in underlying input stream * @param isServer is server side * @return IOStreamPair the stream pair * @throws IOException for any error */ public static IOStreamPair createStreamPair(Configuration conf, CipherOption cipherOption, OutputStream out, InputStream in, boolean isServer) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Creating IOStreamPair of CryptoInputStream and " + "CryptoOutputStream."); } CryptoCodec codec = CryptoCodec.getInstance(conf, cipherOption.getCipherSuite()); byte[] inKey = cipherOption.getInKey(); byte[] inIv = cipherOption.getInIv(); byte[] outKey = cipherOption.getOutKey(); byte[] outIv = cipherOption.getOutIv(); InputStream cIn = new CryptoInputStream(in, codec, isServer ? inKey : outKey, isServer ? inIv : outIv); OutputStream cOut = new CryptoOutputStream(out, codec, isServer ? outKey : inKey, isServer ? outIv : inIv); return new IOStreamPair(cIn, cOut); }
Example #6
Source File: DFSClient.java From big-c with Apache License 2.0 | 6 votes |
/** * Wraps the stream in a CryptoInputStream if the underlying file is * encrypted. */ public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis) throws IOException { final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo(); if (feInfo != null) { // File is encrypted, wrap the stream in a crypto stream. // Currently only one version, so no special logic based on the version # getCryptoProtocolVersion(feInfo); final CryptoCodec codec = getCryptoCodec(conf, feInfo); final KeyVersion decrypted = decryptEncryptedDataEncryptionKey(feInfo); final CryptoInputStream cryptoIn = new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), feInfo.getIV()); return new HdfsDataInputStream(cryptoIn); } else { // No FileEncryptionInfo so no encryption. return new HdfsDataInputStream(dfsis); } }
Example #7
Source File: ProxiedDFSClient.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
public HdfsDataInputStream createWrappedInputStream(DFSInputStream dfsis) throws IOException { final FileEncryptionInfo feInfo = dfsis.getFileEncryptionInfo(); if (feInfo != null) { // File is encrypted, wrap the stream in a crypto stream. // Currently only one version, so no special logic based on the version # getCryptoProtocolVersion(feInfo); final CryptoCodec codec = getCryptoCodec(getConfiguration(), feInfo); final KeyProvider.KeyVersion decrypted = decryptEncryptedDataEncryptionKey(dfsis, feInfo); final CryptoInputStream cryptoIn = new CryptoInputStream(dfsis, codec, decrypted.getMaterial(), feInfo.getIV()); return new HdfsDataInputStream(cryptoIn); } else { // No FileEncryptionInfo so no encryption. return new HdfsDataInputStream(dfsis); } }
Example #8
Source File: RpcClient.java From hadoop-ozone with Apache License 2.0 | 5 votes |
private OzoneInputStream createInputStream( OmKeyInfo keyInfo, Function<OmKeyInfo, OmKeyInfo> retryFunction) throws IOException { LengthInputStream lengthInputStream = KeyInputStream .getFromOmKeyInfo(keyInfo, xceiverClientManager, verifyChecksum, retryFunction); FileEncryptionInfo feInfo = keyInfo.getFileEncryptionInfo(); if (feInfo != null) { final KeyProvider.KeyVersion decrypted = getDEK(feInfo); final CryptoInputStream cryptoIn = new CryptoInputStream(lengthInputStream.getWrappedStream(), OzoneKMSUtil.getCryptoCodec(conf, feInfo), decrypted.getMaterial(), feInfo.getIV()); return new OzoneInputStream(cryptoIn); } else { try{ GDPRSymmetricKey gk; Map<String, String> keyInfoMetadata = keyInfo.getMetadata(); if(Boolean.valueOf(keyInfoMetadata.get(OzoneConsts.GDPR_FLAG))){ gk = new GDPRSymmetricKey( keyInfoMetadata.get(OzoneConsts.GDPR_SECRET), keyInfoMetadata.get(OzoneConsts.GDPR_ALGORITHM) ); gk.getCipher().init(Cipher.DECRYPT_MODE, gk.getSecretKey()); return new OzoneInputStream( new CipherInputStream(lengthInputStream, gk.getCipher())); } }catch (Exception ex){ throw new IOException(ex); } } return new OzoneInputStream(lengthInputStream.getWrappedStream()); }
Example #9
Source File: HdfsDataInputStream.java From hadoop with Apache License 2.0 | 4 votes |
public HdfsDataInputStream(CryptoInputStream in) throws IOException { super(in); Preconditions.checkArgument(in.getWrappedStream() instanceof DFSInputStream, "CryptoInputStream should wrap a DFSInputStream"); }
Example #10
Source File: HdfsDataInputStream.java From hadoop with Apache License 2.0 | 4 votes |
private DFSInputStream getDFSInputStream() { if (in instanceof CryptoInputStream) { return (DFSInputStream) ((CryptoInputStream) in).getWrappedStream(); } return (DFSInputStream) in; }
Example #11
Source File: CryptoFSDataInputStream.java From hadoop with Apache License 2.0 | 4 votes |
public CryptoFSDataInputStream(FSDataInputStream in, CryptoCodec codec, int bufferSize, byte[] key, byte[] iv) throws IOException { super(new CryptoInputStream(in, codec, bufferSize, key, iv)); }
Example #12
Source File: CryptoFSDataInputStream.java From hadoop with Apache License 2.0 | 4 votes |
public CryptoFSDataInputStream(FSDataInputStream in, CryptoCodec codec, byte[] key, byte[] iv) throws IOException { super(new CryptoInputStream(in, codec, key, iv)); }
Example #13
Source File: HdfsDataInputStream.java From big-c with Apache License 2.0 | 4 votes |
public HdfsDataInputStream(CryptoInputStream in) throws IOException { super(in); Preconditions.checkArgument(in.getWrappedStream() instanceof DFSInputStream, "CryptoInputStream should wrap a DFSInputStream"); }
Example #14
Source File: HdfsDataInputStream.java From big-c with Apache License 2.0 | 4 votes |
private DFSInputStream getDFSInputStream() { if (in instanceof CryptoInputStream) { return (DFSInputStream) ((CryptoInputStream) in).getWrappedStream(); } return (DFSInputStream) in; }
Example #15
Source File: CryptoFSDataInputStream.java From big-c with Apache License 2.0 | 4 votes |
public CryptoFSDataInputStream(FSDataInputStream in, CryptoCodec codec, int bufferSize, byte[] key, byte[] iv) throws IOException { super(new CryptoInputStream(in, codec, bufferSize, key, iv)); }
Example #16
Source File: CryptoFSDataInputStream.java From big-c with Apache License 2.0 | 4 votes |
public CryptoFSDataInputStream(FSDataInputStream in, CryptoCodec codec, byte[] key, byte[] iv) throws IOException { super(new CryptoInputStream(in, codec, key, iv)); }