Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,12 @@ public interface HdfsClientConfigKeys {
"dfs.encrypt.data.transfer.cipher.key.bitlength";
int DFS_ENCRYPT_DATA_TRANSFER_CIPHER_KEY_BITLENGTH_DEFAULT = 128;

public static final String
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: fix the checkstyle complain

DFS_DATA_TRANSFER_MAX_PACKET_SIZE =
"dfs.data.transfer.max.packet.size";
public static final int DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT =
16 * 1024 * 1024;

String DFS_TRUSTEDCHANNEL_RESOLVER_CLASS =
"dfs.trustedchannel.resolver.class";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@
import java.nio.channels.ReadableByteChannel;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.util.DirectBufferPool;
import org.apache.hadoop.io.IOUtils;

Expand All @@ -45,7 +48,7 @@ public class PacketReceiver implements Closeable {
* The max size of any single packet. This prevents OOMEs when
* invalid data is sent.
*/
public static final int MAX_PACKET_SIZE = 16 * 1024 * 1024;
public static final int MAX_PACKET_SIZE;

static final Logger LOG = LoggerFactory.getLogger(PacketReceiver.class);

Expand Down Expand Up @@ -74,6 +77,13 @@ public class PacketReceiver implements Closeable {
*/
private PacketHeader curHeader;

static {
Configuration conf = new HdfsConfiguration();
MAX_PACKET_SIZE = conf.getInt(HdfsClientConfigKeys.
DFS_DATA_TRANSFER_MAX_PACKET_SIZE,
HdfsClientConfigKeys.DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT);
}

public PacketReceiver(boolean useDirectBuffers) {
this.useDirectBuffers = useDirectBuffers;
reallocPacketBuf(PacketHeader.PKT_LENGTHS_LEN);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4421,6 +4421,14 @@
</description>
</property>

<property>
<name>dfs.data.transfer.max.packet.size</name>
<value>16777216</value>
<description>
The max size of any single packet.
</description>
</property>

<property>
<name>dfs.datanode.balance.max.concurrent.moves</name>
<value>100</value>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.nio.ByteBuffer;

import org.apache.hadoop.hdfs.AppendTestUtil;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.junit.Test;
import org.mockito.Mockito;

Expand Down Expand Up @@ -56,7 +57,13 @@ private static byte[] remainingAsArray(ByteBuffer buf) {
buf.get(b);
return b;
}


@Test
public void testPacketSize() {
assertEquals(PacketReceiver.MAX_PACKET_SIZE,
HdfsClientConfigKeys.DFS_DATA_TRANSFER_MAX_PACKET_SIZE_DEFAULT);
}

@Test
public void testReceiveAndMirror() throws IOException {
PacketReceiver pr = new PacketReceiver(false);
Expand Down