Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ private CloseableIterable<Record> open(FileScanTask task) {

case ORC:
ORC.ReadBuilder orc = ORC.read(input)
.schema(projection)
.project(projection)
.createReaderFunc(fileSchema -> GenericOrcReader.buildReader(projection, fileSchema))
.split(task.start(), task.length());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ protected Record writeAndRead(String desc,
}

Iterable<Record> records = ORC.read(Files.localInput(file))
.schema(readSchema)
.project(readSchema)
.createReaderFunc(fileSchema -> GenericOrcReader.buildReader(readSchema, fileSchema))
.build();

Expand Down
4 changes: 2 additions & 2 deletions orc/src/main/java/org/apache/iceberg/orc/ORC.java
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,8 @@ public ReadBuilder split(long newStart, long newLength) {
return this;
}

public ReadBuilder schema(org.apache.iceberg.Schema projectSchema) {
this.schema = projectSchema;
public ReadBuilder project(Schema newSchema) {
this.schema = newSchema;
return this;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ private static TypeDescription buildOrcProjection(Integer fieldId, Type type, bo
break;
case MAP:
Types.MapType map = (Types.MapType) type;
TypeDescription keyType = buildOrcProjection(map.keyId(), map.keyType(), true, mapping);
TypeDescription keyType = buildOrcProjection(map.keyId(), map.keyType(), isRequired, mapping);
TypeDescription valueType = buildOrcProjection(map.valueId(), map.valueType(), map.isValueRequired(),
mapping);
orcType = TypeDescription.createMap(keyType, valueType);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ private CloseableIterable<InternalRow> newOrcIterable(
FileScanTask task,
Schema readSchema) {
return ORC.read(location)
.schema(readSchema)
.project(readSchema)
.split(task.start(), task.length())
.createReaderFunc(SparkOrcReader::new)
.caseSensitive(caseSensitive)
Expand Down
14 changes: 13 additions & 1 deletion spark/src/main/java/org/apache/iceberg/spark/source/Writer.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,10 @@
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.io.LocationProvider;
import org.apache.iceberg.io.OutputFile;
import org.apache.iceberg.orc.ORC;
import org.apache.iceberg.parquet.Parquet;
import org.apache.iceberg.spark.data.SparkAvroWriter;
import org.apache.iceberg.spark.data.SparkOrcWriter;
import org.apache.iceberg.spark.data.SparkParquetWriters;
import org.apache.iceberg.util.PropertyUtil;
import org.apache.iceberg.util.Tasks;
Expand Down Expand Up @@ -309,6 +311,14 @@ public FileAppender<InternalRow> newAppender(OutputFile file, FileFormat fileFor
.overwrite()
.build();

case ORC:
return ORC.write(file)
.createWriterFunc(SparkOrcWriter::new)
.setAll(properties)
.schema(dsSchema)
.overwrite()
.build();

default:
throw new UnsupportedOperationException("Cannot write unknown format: " + fileFormat);
}
Expand Down Expand Up @@ -389,7 +399,9 @@ private abstract static class BaseWriter implements DataWriter<InternalRow> {
public abstract void write(InternalRow row) throws IOException;

public void writeInternal(InternalRow row) throws IOException {
if (currentRows % ROWS_DIVISOR == 0 && currentAppender.length() >= targetFileSize) {
//TODO: ORC file now not support target file size
if (format.equals(FileFormat.PARQUET) &&
currentRows % ROWS_DIVISOR == 0 && currentAppender.length() >= targetFileSize) {
closeCurrent();
openCurrent();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ protected void writeAndValidate(Schema schema) throws IOException {
}

try (CloseableIterable<InternalRow> reader = ORC.read(Files.localInput(testFile))
.schema(schema)
.project(schema)
.createReaderFunc(SparkOrcReader::new)
.build()) {
final Iterator<InternalRow> actualRows = reader.iterator();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.spark.source;

import com.google.common.base.Objects;
import java.util.Map;


public class NestedRecord {
private Integer id;
private String data;
private Map info;

public NestedRecord() {
}

NestedRecord(Integer id, String data, Map<String, String> info) {
this.id = id;
this.data = data;
this.info = info;
}

public Integer getId() {
return id;
}

public void setId(Integer id) {
this.id = id;
}

public String getData() {
return data;
}

public void setData(String data) {
this.data = data;
}

public Map<String, String> getInfo() {
return info;
}

public void setInfo(Map<String, String> info) {
this.info = info;
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}

NestedRecord record = (NestedRecord) o;
return Objects.equal(id, record.id) && Objects.equal(data, record.data) && Objects.equal(info, record.info);
}

@Override
public int hashCode() {
return Objects.hashCode(id, data, info);
}
}

Loading