Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@

package org.apache.hudi.avro;

import java.util.HashMap;
import org.apache.avro.Schema;
import org.apache.hudi.common.BloomFilter;

import org.apache.avro.Schema;
import org.apache.parquet.avro.AvroWriteSupport;
import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.schema.MessageType;

import java.util.HashMap;

/**
* Wrap AvroWriterSupport for plugging in the bloom filter.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,23 @@

package org.apache.hudi.avro;

import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;

import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

/**
* Converts Json record to Avro Generic Record
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,19 @@

package org.apache.hudi.common;

import org.apache.hudi.exception.HoodieIndexException;

import org.apache.hadoop.util.bloom.Key;
import org.apache.hadoop.util.hash.Hash;

import javax.xml.bind.DatatypeConverter;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import javax.xml.bind.DatatypeConverter;
import org.apache.hadoop.util.bloom.Key;
import org.apache.hadoop.util.hash.Hash;
import org.apache.hudi.exception.HoodieIndexException;

/**
* A Bloom filter implementation built on top of {@link org.apache.hadoop.util.bloom.BloomFilter}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@

package org.apache.hudi.common;

import java.io.Serializable;
import java.util.List;
import org.apache.hudi.common.model.HoodieCleaningPolicy;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;

import java.io.Serializable;
import java.util.List;

/**
* Collects stats about a single partition clean operation
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,23 @@

package org.apache.hudi.common;

import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hudi.avro.MercifulJsonConverter;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieException;

public class HoodieJsonPayload implements HoodieRecordPayload<HoodieJsonPayload> {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@

package org.apache.hudi.common;

import org.apache.hadoop.fs.FileStatus;

import java.io.Serializable;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.FileStatus;

/**
* Collects stats about a single partition clean operation
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@

package org.apache.hudi.common;

import org.apache.hadoop.conf.Configuration;

import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import org.apache.hadoop.conf.Configuration;

public class SerializableConfiguration implements Serializable {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,13 @@

package org.apache.hudi.common.io.storage;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;
import org.apache.hudi.common.storage.StorageSchemes;
import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.NoOpConsistencyGuard;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.ContentSummary;
Expand All @@ -51,12 +49,16 @@
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import org.apache.hudi.common.storage.StorageSchemes;
import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.NoOpConsistencyGuard;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieIOException;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeoutException;

/**
* HoodieWrapperFileSystem wraps the default file system. It holds state about the open streams in the file system to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,15 @@

package org.apache.hudi.common.io.storage;

import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.exception.HoodieException;

import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.util.ConsistencyGuard;
import org.apache.hudi.exception.HoodieException;

/**
* Wrapper over <code>FSDataOutputStream</code> to keep track of the size of the written bytes. This gives a cheap way
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,19 @@

package org.apache.hudi.common.model;

import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.Option;

import org.apache.hadoop.fs.Path;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.avro.model.HoodieCompactionOperation;
import org.apache.hudi.common.util.FSUtils;
import org.apache.hudi.common.util.Option;

/**
* Encapsulates all the needed information about a compaction and make a decision whether this compaction is effective
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,11 @@

package org.apache.hudi.common.model;

import org.apache.hudi.common.util.Option;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hudi.common.util.Option;

/**
* Empty payload used for deletions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@

package org.apache.hudi.common.model;

import org.apache.hudi.common.util.Option;

import java.io.Serializable;
import java.util.Objects;
import java.util.TreeSet;
import java.util.stream.Stream;
import org.apache.hudi.common.util.Option;

/**
* Within a file group, a slice is a combination of data file written at a commit time and list of log files, containing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,16 @@

package org.apache.hudi.common.model;

import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.hudi.common.util.HoodieAvroUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.exception.HoodieIOException;

import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;

import java.io.IOException;

/**
* This is a payload to wrap a existing Hoodie Avro Record. Useful to create a HoodieRecord over existing GenericRecords
* in a hoodie datasets (useful in compactions)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,21 +18,23 @@

package org.apache.hudi.common.model;

import org.apache.hudi.common.util.FSUtils;

import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hudi.common.util.FSUtils;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;

/**
* All the metadata that gets stored along with a commit.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,13 @@

package org.apache.hudi.common.model;

import java.io.Serializable;
import java.util.Objects;
import org.apache.hudi.common.util.FSUtils;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.util.FSUtils;

import java.io.Serializable;
import java.util.Objects;

public class HoodieDataFile implements Serializable {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,16 @@

package org.apache.hudi.common.model;

import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;

import java.io.Serializable;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.stream.Stream;
import org.apache.hudi.common.table.HoodieTimeline;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.util.Option;

/**
* A set of data/base files + set of log files, that make up an unit for all operations
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hudi.common.model;

import com.google.common.base.Objects;

import java.io.Serializable;

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,16 @@

package org.apache.hudi.common.model;

import org.apache.hudi.common.util.FSUtils;

import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.io.Serializable;
import java.util.Comparator;
import java.util.Objects;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.common.util.FSUtils;

/**
* Abstracts a single log file. Contains methods to extract metadata like the fileId, version and extension from the log
Expand Down
Loading