Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
package org.apache.hadoop.hive.llap.io.api.impl;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.S3AInputPolicy;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedSupport;
import org.apache.hadoop.hive.ql.io.BatchToRowInputFormat;
import org.apache.hadoop.hive.conf.HiveConf;
Expand Down Expand Up @@ -65,9 +63,6 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hive.common.util.HiveStringUtils;

import static org.apache.hadoop.hive.common.FileUtils.isS3a;
import static org.apache.hadoop.hive.ql.io.HiveInputFormat.isRandomAccessInputFormat;

public class LlapInputFormat implements InputFormat<NullWritable, VectorizedRowBatch>,
VectorizedInputFormatInterface, SelfDescribingInputFormatInterface,
AvoidSplitCombination {
Expand Down Expand Up @@ -107,10 +102,6 @@ public RecordReader<NullWritable, VectorizedRowBatch> getRecordReader(
FileSplit fileSplit = (FileSplit) split;
reporter.setStatus(fileSplit.toString());
FileSystem splitFileSystem = fileSplit.getPath().getFileSystem(job);
if (isS3a(splitFileSystem) && isRandomAccessInputFormat(sourceInputFormat)) {
LlapIoImpl.LOG.debug("Changing S3A input policy to RANDOM");
((S3AFileSystem) splitFileSystem).setInputPolicy(S3AInputPolicy.Random);
}
try {
// At this entry point, we are going to assume that these are logical table columns.
// Perhaps we should go thru the code and clean this up to be more explicit; for now, we
Expand Down
5 changes: 0 additions & 5 deletions ql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -321,11 +321,6 @@
<artifactId>hadoop-yarn-client</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-aws</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.orc</groupId>
<artifactId>orc-tools</artifactId>
Expand Down
25 changes: 0 additions & 25 deletions ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.S3AInputPolicy;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.StringInternUtils;
import org.apache.hadoop.hive.common.ValidTxnWriteIdList;
Expand All @@ -40,8 +38,6 @@
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.VectorizedParquetInputFormat;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
Expand Down Expand Up @@ -93,7 +89,6 @@
import java.util.concurrent.Future;

import static java.lang.Integer.min;
import static org.apache.hadoop.hive.common.FileUtils.isS3a;

/**
* HiveInputFormat is a parameterized InputFormat which looks at the path name
Expand Down Expand Up @@ -384,19 +379,6 @@ public static InputFormat<WritableComparable, Writable> getInputFormatFromCache(
return instance;
}

/**
* Returns true if the inputFormat performs random seek+read
* @param inputFormat
* @return
*/
public static boolean isRandomAccessInputFormat(InputFormat inputFormat) {
if (inputFormat instanceof OrcInputFormat ||
inputFormat instanceof VectorizedParquetInputFormat) {
return true;
}
return false;
}

@Override
public RecordReader getRecordReader(InputSplit split, JobConf job,
Reporter reporter) throws IOException {
Expand Down Expand Up @@ -449,13 +431,6 @@ public RecordReader getRecordReader(InputSplit split, JobConf job,
innerReader = HiveIOExceptionHandlerUtil
.handleRecordReaderCreationException(e, job);
}

FileSystem splitFileSystem = splitPath.getFileSystem(job);
if (isS3a(splitFileSystem) && isRandomAccessInputFormat(inputFormat)) {
LOG.debug("Changing S3A input policy to RANDOM");
((S3AFileSystem) splitFileSystem).setInputPolicy(S3AInputPolicy.Random);
}

HiveRecordReader<K,V> rr = new HiveRecordReader(innerReader, job);
rr.initIOContext(hsplit, job, inputFormatClass, innerReader);
return rr;
Expand Down

This file was deleted.