Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -148,14 +148,14 @@ public void testClientConnectionCloseMidStream() throws Exception {

// await stream handler is ready and request full content
var handler = ctx.awaitRestChannelAccepted(opaqueId);
assertBusy(() -> assertEquals(1, handler.stream.chunkQueue().size()));
assertBusy(() -> assertEquals(1, handler.stream.queueSize()));

// enable auto-read to receive channel close event
handler.stream.channel().config().setAutoRead(true);

// terminate connection and wait resources are released
ctx.clientChannel.close();
assertBusy(() -> assertEquals(0, handler.stream.chunkQueue().size()));
assertBusy(() -> assertEquals(0, handler.stream.queueSize()));
}
}

Expand All @@ -170,11 +170,11 @@ public void testServerCloseConnectionMidStream() throws Exception {

// await stream handler is ready and request full content
var handler = ctx.awaitRestChannelAccepted(opaqueId);
assertBusy(() -> assertEquals(1, handler.stream.chunkQueue().size()));
assertBusy(() -> assertEquals(1, handler.stream.queueSize()));

// terminate connection on server and wait resources are released
handler.channel.request().getHttpChannel().close();
assertBusy(() -> assertEquals(0, handler.stream.chunkQueue().size()));
assertBusy(() -> assertEquals(0, handler.stream.queueSize()));
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.http.netty4;

import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaders;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.cookie.Cookie;
import io.netty.handler.codec.http.cookie.ServerCookieDecoder;
import io.netty.handler.codec.http.cookie.ServerCookieEncoder;

import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.http.HttpRequest;
import org.elasticsearch.http.HttpResponse;
import org.elasticsearch.rest.ChunkedRestResponseBodyPart;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestStatus;

import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;

public abstract non-sealed class Netty4AbstractHttpRequest implements Netty4HttpRequest {

private final io.netty.handler.codec.http.HttpRequest request;
private final int sequence;

Netty4AbstractHttpRequest(int sequence, io.netty.handler.codec.http.HttpRequest request) {
this.request = request;
this.sequence = sequence;
}

@Override
public RestRequest.Method method() {
return translateRequestMethod(request.method());
}

@Override
public String uri() {
return request.uri();
}

@Override
public final Map<String, List<String>> getHeaders() {
return new Netty4HttpHeadersMap(request.headers());
}

@Override
public HttpRequest removeHeader(String header) {
request.headers().remove(header);
return this;
}

@Override
public List<String> strictCookies() {
String cookieString = request.headers().get(HttpHeaderNames.COOKIE);
if (cookieString != null) {
Set<Cookie> cookies = ServerCookieDecoder.STRICT.decode(cookieString);
if (cookies.isEmpty() == false) {
return ServerCookieEncoder.STRICT.encode(cookies);
}
}
return Collections.emptyList();
}

@Override
public HttpVersion protocolVersion() {
if (request.protocolVersion().equals(io.netty.handler.codec.http.HttpVersion.HTTP_1_0)) {
return HttpRequest.HttpVersion.HTTP_1_0;
} else if (request.protocolVersion().equals(io.netty.handler.codec.http.HttpVersion.HTTP_1_1)) {
return HttpRequest.HttpVersion.HTTP_1_1;
} else {
throw new IllegalArgumentException("Unexpected http protocol version: " + request.protocolVersion());
}
}

@Override
public Netty4FullHttpResponse createResponse(RestStatus status, BytesReference contentRef) {
return new Netty4FullHttpResponse(sequence, request.protocolVersion(), status, contentRef);
}

@Override
public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPart firstBodyPart) {
return new Netty4ChunkedHttpResponse(sequence, request.protocolVersion(), status, firstBodyPart);
}

@Override
public io.netty.handler.codec.http.HttpRequest nettyRequest() {
return request;
}

@Override
public int sequence() {
return sequence;
}

public static RestRequest.Method translateRequestMethod(HttpMethod httpMethod) {
if (httpMethod == HttpMethod.GET) return RestRequest.Method.GET;

if (httpMethod == HttpMethod.POST) return RestRequest.Method.POST;

if (httpMethod == HttpMethod.PUT) return RestRequest.Method.PUT;

if (httpMethod == HttpMethod.DELETE) return RestRequest.Method.DELETE;

if (httpMethod == HttpMethod.HEAD) {
return RestRequest.Method.HEAD;
}

if (httpMethod == HttpMethod.OPTIONS) {
return RestRequest.Method.OPTIONS;
}

if (httpMethod == HttpMethod.PATCH) {
return RestRequest.Method.PATCH;
}

if (httpMethod == HttpMethod.TRACE) {
return RestRequest.Method.TRACE;
}

if (httpMethod == HttpMethod.CONNECT) {
return RestRequest.Method.CONNECT;
}

throw new IllegalArgumentException("Unexpected http method: " + httpMethod);
}

public static Map<String, List<String>> getHttpHeadersAsMap(HttpHeaders httpHeaders) {
return new Netty4HttpHeadersMap(httpHeaders);
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.http.netty4;

import io.netty.handler.codec.http.FullHttpRequest;

import org.elasticsearch.http.HttpBody;
import org.elasticsearch.http.HttpRequest;
import org.elasticsearch.transport.netty4.Netty4Utils;

public non-sealed class Netty4FullHttpRequest extends Netty4AbstractHttpRequest implements Netty4HttpRequest {

private final FullHttpRequest request;
private final HttpBody.Full body;

public Netty4FullHttpRequest(int sequence, FullHttpRequest request) {
super(sequence, request);
this.request = request;
this.body = HttpBody.fromReleasableBytesReference(Netty4Utils.toReleasableBytesReference(request.content()));
}

public Netty4FullHttpRequest(Netty4FullHttpRequest other) {
super(other.sequence(), other.request);
this.request = other.request;
this.body = other.body;
}

@Override
public HttpBody body() {
return body;
}

@Override
public HttpRequest removeHeader(String header) {
super.removeHeader(header);
return this;
}

@Override
public Exception getInboundException() {
return null;
}

@Override
public void release() {
body.close();
request.release();
}

@Override
public Netty4FullHttpRequest releaseAndCopy() {
var copy = request.copy();
release();
return new Netty4FullHttpRequest(sequence(), copy);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

package org.elasticsearch.http.netty4;

import io.netty.handler.codec.http.HttpHeaders;

import java.util.AbstractMap;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

/**
* A wrapper of {@link HttpHeaders} that implements a map to prevent copying unnecessarily. This class does not support modifications and
* due to the underlying implementation, it performs case insensitive lookups of key to values.
* <p>
* It is important to note that this implementation does have some downsides in that each invocation of the {@link #values()} and
* {@link #entrySet()} methods will perform a copy of the values in the HttpHeaders rather than returning a view of the underlying values.
*/
class Netty4HttpHeadersMap implements Map<String, List<String>> {

private final HttpHeaders httpHeaders;

Netty4HttpHeadersMap(HttpHeaders httpHeaders) {
this.httpHeaders = httpHeaders;
}

@Override
public int size() {
return httpHeaders.size();
}

@Override
public boolean isEmpty() {
return httpHeaders.isEmpty();
}

@Override
public boolean containsKey(Object key) {
return key instanceof String && httpHeaders.contains((String) key);
}

@Override
public boolean containsValue(Object value) {
return value instanceof List && httpHeaders.names().stream().map(httpHeaders::getAll).anyMatch(value::equals);
}

@Override
public List<String> get(Object key) {
return key instanceof String ? httpHeaders.getAll((String) key) : null;
}

@Override
public List<String> put(String key, List<String> value) {
throw new UnsupportedOperationException("modifications are not supported");
}

@Override
public List<String> remove(Object key) {
throw new UnsupportedOperationException("modifications are not supported");
}

@Override
public void putAll(Map<? extends String, ? extends List<String>> m) {
throw new UnsupportedOperationException("modifications are not supported");
}

@Override
public void clear() {
throw new UnsupportedOperationException("modifications are not supported");
}

@Override
public Set<String> keySet() {
return httpHeaders.names();
}

@Override
public Collection<List<String>> values() {
return httpHeaders.names().stream().map(k -> Collections.unmodifiableList(httpHeaders.getAll(k))).toList();
}

@Override
public Set<Entry<String, List<String>>> entrySet() {
return httpHeaders.names()
.stream()
.map(k -> new AbstractMap.SimpleImmutableEntry<>(k, httpHeaders.getAll(k)))
.collect(Collectors.toSet());
}
}
Loading