Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import java.util.List;
import java.util.stream.Collectors;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.Schema;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;

public class AggregateEvaluator {
public static AggregateEvaluator create(Schema schema, Expression... aggregates) {
return create(schema, aggregates);
}

public static AggregateEvaluator create(Schema schema, List<Expression> aggregates) {
return create(schema.asStruct(), aggregates);
}

private static AggregateEvaluator create(Types.StructType struct, List<Expression> aggregates) {
List<BoundAggregate<?, ?>> boundAggregates =
aggregates.stream()
.map(expr -> Binder.bind(struct, expr))
.map(bound -> (BoundAggregate<?, ?>) bound)
.collect(Collectors.toList());

return new AggregateEvaluator(boundAggregates);
}

private final List<BoundAggregate.Aggregator<?>> aggregators;
private final Types.StructType resultType;

private AggregateEvaluator(List<BoundAggregate<?, ?>> aggregates) {
ImmutableList.Builder<BoundAggregate.Aggregator<?>> aggregatorsBuilder =
ImmutableList.builder();
List<Types.NestedField> resultFields = Lists.newArrayList();
for (int pos = 0; pos < aggregates.size(); pos += 1) {
BoundAggregate<?, ?> aggregate = aggregates.get(pos);
aggregatorsBuilder.add(aggregates.get(pos).newAggregator());
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess here we could also reuse aggregate?

resultFields.add(Types.NestedField.optional(pos, aggregate.describe(), aggregate.type()));
}

this.aggregators = aggregatorsBuilder.build();
this.resultType = Types.StructType.of(resultFields);
}

public void update(StructLike struct) {
for (BoundAggregate.Aggregator<?> aggregator : aggregators) {
aggregator.update(struct);
}
}

public void update(DataFile file) {
for (BoundAggregate.Aggregator<?> aggregator : aggregators) {
aggregator.update(file);
}
}

public Types.StructType resultType() {
return resultType;
}

public StructLike result() {
Object[] results =
aggregators.stream().map(BoundAggregate.Aggregator::result).toArray(Object[]::new);
return new ArrayStructLike(results);
}

private static class ArrayStructLike implements StructLike {
private final Object[] values;

private ArrayStructLike(Object[] values) {
this.values = values;
}

public int size() {
return values.length;
}

@Override
public <T> T get(int pos, Class<T> javaClass) {
return javaClass.cast(values[pos]);
}

@Override
public <T> void set(int pos, T value) {
values[pos] = value;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
*/
package org.apache.iceberg.expressions;

import java.util.Map;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
Expand All @@ -29,7 +31,18 @@ protected BoundAggregate(Operation op, BoundTerm<T> term) {

@Override
public C eval(StructLike struct) {
throw new UnsupportedOperationException(this.getClass().getName() + " does not implement eval");
throw new UnsupportedOperationException(
this.getClass().getName() + " does not implement eval(StructLike)");
}

C eval(DataFile file) {
throw new UnsupportedOperationException(
this.getClass().getName() + " does not implement eval(DataFile)");
}

Aggregator<C> newAggregator() {
throw new UnsupportedOperationException(
this.getClass().getName() + " does not implement newAggregator()");
}

@Override
Expand All @@ -44,4 +57,85 @@ public Type type() {
return term().type();
}
}

public String describe() {
switch (op()) {
case COUNT_STAR:
return "count(*)";
case COUNT:
return "count(" + ExpressionUtil.describe(term()) + ")";
case MAX:
return "max(" + ExpressionUtil.describe(term()) + ")";
case MIN:
return "min(" + ExpressionUtil.describe(term()) + ")";
default:
throw new UnsupportedOperationException("Unsupported aggregate type: " + op());
}
}

<V> V safeGet(Map<Integer, V> map, int key) {
return safeGet(map, key, null);
}

<V> V safeGet(Map<Integer, V> map, int key, V defaultValue) {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this belong to some util class or possibly null isnt allowed

if (map != null) {
return map.getOrDefault(key, defaultValue);
}

return null;
}

interface Aggregator<R> {
void update(StructLike struct);

void update(DataFile file);

R result();
}

abstract static class NullSafeAggregator<T, R> implements Aggregator<R> {
private final BoundAggregate<T, R> aggregate;
private boolean isNull = false;

NullSafeAggregator(BoundAggregate<T, R> aggregate) {
this.aggregate = aggregate;
}

protected abstract void update(R value);

protected abstract R current();

@Override
public void update(StructLike struct) {
if (!isNull) {
R value = aggregate.eval(struct);
if (value == null) {
this.isNull = true;
} else {
update(value);
}
}
}

@Override
public void update(DataFile file) {
if (!isNull) {
R value = aggregate.eval(file);
if (value == null) {
this.isNull = true;
} else {
update(value);
}
}
}

@Override
public R result() {
if (isNull) {
return null;
}

return result();
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you mean return current();?

}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import org.apache.iceberg.DataFile;
import org.apache.iceberg.StructLike;

public class CountAggregate<T> extends BoundAggregate<T, Long> {
protected CountAggregate(Operation op, BoundTerm<T> term) {
super(op, term);
}

@Override
public Long eval(StructLike struct) {
return countFor(struct);
}

@Override
public Long eval(DataFile file) {
return countFor(file);
}

protected Long countFor(StructLike row) {
throw new UnsupportedOperationException(
this.getClass().getName() + " does not implement countFor(StructLike)");
}

protected Long countFor(DataFile file) {
throw new UnsupportedOperationException(
this.getClass().getName() + " does not implement countFor(DataFile)");
}

@Override
public Aggregator<Long> newAggregator() {
return new CountAggregator<>(this);
}

private static class CountAggregator<T> extends NullSafeAggregator<T, Long> {
private Long count = 0L;

CountAggregator(BoundAggregate<T, Long> aggregate) {
super(aggregate);
}

@Override
protected void update(Long value) {
count += value;
}

@Override
protected Long current() {
return count;
}
}
}
53 changes: 53 additions & 0 deletions api/src/main/java/org/apache/iceberg/expressions/CountNonNull.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import org.apache.iceberg.DataFile;
import org.apache.iceberg.StructLike;
import org.apache.iceberg.types.Types;

public class CountNonNull<T> extends CountAggregate<T> {
private final int fieldId;

protected CountNonNull(BoundTerm<T> term) {
super(Operation.COUNT, term);
Types.NestedField field = term.ref().field();
this.fieldId = field.fieldId();
}

@Override
protected Long countFor(StructLike row) {
return term().eval(row) != null ? 1L : 0L;
}

@Override
protected Long countFor(DataFile file) {
// NaN value counts were not required in v1 and were included in value counts
return safeAdd(safeGet(file.valueCounts(), fieldId), safeGet(file.nanValueCounts(), fieldId, 0L));
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shall we subtract the nullValueCounts?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, you're right. This will include NaN and null values:

Map from column id to number of values in the column (including null and NaN values)

That means we should actually not add the NaN count.

}

private Long safeAdd(Long left, Long right) {
if (left != null && right != null) {
return left + right;
}

return null;
}
}
44 changes: 44 additions & 0 deletions api/src/main/java/org/apache/iceberg/expressions/CountStar.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import org.apache.iceberg.DataFile;
import org.apache.iceberg.StructLike;

public class CountStar<T> extends CountAggregate<T> {
protected CountStar(BoundTerm<T> term) {
super(Operation.COUNT_STAR, term);
}

@Override
protected Long countFor(StructLike row) {
return 1L;
}

@Override
protected Long countFor(DataFile file) {
long count = file.recordCount();
if (count < 0) {
return null;
}
Comment on lines +38 to +40
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Curious when would this ever be negative? or is this just for this logic to be defensive against bad metadata?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Some imported Avro files had incorrect metadata several versions ago. I don't think it is widespread, but it is good to handle it.


return count;
}
}
Loading