Skip to content
Merged
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
package org.apache.hadoop.hbase.http;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;

import javax.servlet.Filter;
import javax.servlet.FilterChain;
Expand All @@ -27,13 +29,15 @@
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;

import org.apache.yetus.audience.InterfaceAudience;

@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class ClickjackingPreventionFilter implements Filter {
private FilterConfig filterConfig;
private static final String DEFAULT_XFRAMEOPTIONS = "DENY";

@Override
public void init(FilterConfig filterConfig) throws ServletException {
Expand All @@ -51,4 +55,11 @@ public void doFilter(ServletRequest req, ServletResponse res, FilterChain chain)
@Override
public void destroy() {
}

public static Map<String, String> getDefaultParameters(Configuration conf) {
Map<String, String> params = new HashMap<>();
params.put("xframeoptions", conf.get("hbase.http.filter.xframeoptions.mode",
DEFAULT_XFRAMEOPTIONS));
return params;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -596,10 +596,15 @@ private void initializeWebServer(String name, String hostName,
addDefaultApps(contexts, appDir, conf);

addGlobalFilter("safety", QuotingInputFilter.class.getName(), null);
Map<String, String> params = new HashMap<>();
params.put("xframeoptions", conf.get("hbase.http.filter.xframeoptions.mode", "DENY"));

addGlobalFilter("clickjackingprevention",
ClickjackingPreventionFilter.class.getName(), params);
ClickjackingPreventionFilter.class.getName(),
ClickjackingPreventionFilter.getDefaultParameters(conf));

addGlobalFilter("securityheaders",
SecurityHeadersFilter.class.getName(),
SecurityHeadersFilter.getDefaultParameters(conf));

final FilterInitializer[] initializers = getFilterInitializers(conf);
if (initializers != null) {
conf = new Configuration(conf);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hbase.http;

import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can change it Private as this class is used internal only. It looks to me that the original class ClickjackingPreventionFilter was already used internal only. So I think it's okay to rename this class. What do you think? @petersomogyi

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have to rename the class? Best if we don't then the patch can safely go back to shipping branches.

This seems like only sticking point. Otherwise patch looks nice.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

LP(Config) is supposed to mean the class name is exposed in user configs. If the user can no longer specify which filters then it'd be fine to make it Private. Otherwise we'd need a layer of indirection so that folks don't give classnames.

I think it'd also be fine for this to go to shipping branches of we expressly checked for the old classname in configs and did the appropriate thing to put the same protection in place.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I rechecked the source code and I found that implementations of Servlet filters in the hbase project are basically LP(Config). It seems to me that we assume that the implementations are used from "hbase.rest.filter.classes". If so, we should not rename the class name or we need a layer of indirection as Sean mentioned.

public class SecurityHeadersFilter implements Filter {
private static final Logger LOG =
LoggerFactory.getLogger(SecurityHeadersFilter.class);
private static final String DEFAULT_HSTS = "";
private static final String DEFAULT_CSP = "";
private FilterConfig filterConfig;

@Override
public void init(FilterConfig filterConfig) throws ServletException {
this.filterConfig = filterConfig;
LOG.info("Added security headers filter");
}

@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
throws IOException, ServletException {
HttpServletResponse httpResponse = (HttpServletResponse) response;
httpResponse.addHeader("X-Content-Type-Options", "nosniff");
httpResponse.addHeader("X-XSS-Protection", "1; mode=block");
String hsts = filterConfig.getInitParameter("hsts");
if (StringUtils.isNotBlank(hsts)) {
httpResponse.addHeader("Strict-Transport-Security", hsts);
}
String csp = filterConfig.getInitParameter("csp");
if (StringUtils.isNotBlank(csp)) {
httpResponse.addHeader("Content-Security-Policy", csp);
}
chain.doFilter(request, response);
}

@Override
public void destroy() {
}

public static Map<String, String> getDefaultParameters(Configuration conf) {
Map<String, String> params = new HashMap<>();
params.put("hsts", conf.get("hbase.http.filter.hsts.value",
DEFAULT_HSTS));
params.put("csp", conf.get("hbase.http.filter.csp.value",
DEFAULT_CSP));
return params;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.http;

import static org.apache.hadoop.hbase.http.HttpServerFunctionalTest.createTestServer;
import static org.apache.hadoop.hbase.http.HttpServerFunctionalTest.getServerURL;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.hamcrest.core.Is;
import org.hamcrest.core.IsEqual;
import org.junit.After;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category({HttpServerFunctionalTest.class, MediumTests.class})
public class TestSecurityHeadersFilter {
private static URL baseUrl;
private HttpServer http;

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestSecurityHeadersFilter.class);

@After
public void tearDown() throws Exception {
http.stop();
}

@Test
public void testDefaultValues() throws Exception {
http = createTestServer();
http.start();
baseUrl = getServerURL(http);

URL url = new URL(baseUrl, "/");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertThat(conn.getResponseCode(), equalTo(HttpURLConnection.HTTP_OK));

assertThat("Header 'X-Content-Type-Options' is missing",
conn.getHeaderField("X-Content-Type-Options"), is(not((String)null)));
assertThat(conn.getHeaderField("X-Content-Type-Options"), equalTo("nosniff"));
assertThat("Header 'X-XSS-Protection' is missing",
conn.getHeaderField("X-XSS-Protection"), is(not((String)null)));
assertThat("Header 'X-XSS-Protection' has invalid value",
conn.getHeaderField("X-XSS-Protection"), equalTo("1; mode=block"));

assertThat("Header 'Strict-Transport-Security' should be missing from response," +
"but it's present",
conn.getHeaderField("Strict-Transport-Security"), is((String)null));
assertThat("Header 'Content-Security-Policy' should be missing from response," +
"but it's present",
conn.getHeaderField("Content-Security-Policy"), is((String)null));
}

@Test
public void testHstsAndCspSettings() throws IOException {
Configuration conf = new Configuration();
conf.set("hbase.http.filter.hsts.value",
"max-age=63072000;includeSubDomains;preload");
conf.set("hbase.http.filter.csp.value",
"default-src https: data: 'unsafe-inline' 'unsafe-eval'");
http = createTestServer(conf);
http.start();
baseUrl = getServerURL(http);

URL url = new URL(baseUrl, "/");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
assertThat(conn.getResponseCode(), equalTo(HttpURLConnection.HTTP_OK));

assertThat("Header 'Strict-Transport-Security' is missing from Rest response",
conn.getHeaderField("Strict-Transport-Security"), Is.is(not((String)null)));
assertThat("Header 'Strict-Transport-Security' has invalid value",
conn.getHeaderField("Strict-Transport-Security"),
IsEqual.equalTo("max-age=63072000;includeSubDomains;preload"));

assertThat("Header 'Content-Security-Policy' is missing from Rest response",
conn.getHeaderField("Content-Security-Policy"), Is.is(not((String)null)));
assertThat("Header 'Content-Security-Policy' has invalid value",
conn.getHeaderField("Content-Security-Policy"),
IsEqual.equalTo("default-src https: data: 'unsafe-inline' 'unsafe-eval'"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,59 +18,56 @@

package org.apache.hadoop.hbase.rest;

import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.EnumSet;
import java.util.concurrent.ArrayBlockingQueue;

import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import javax.servlet.DispatcherType;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.http.ClickjackingPreventionFilter;
import org.apache.hadoop.hbase.http.HttpServerUtil;
import org.apache.hadoop.hbase.http.InfoServer;
import org.apache.hadoop.hbase.http.SecurityHeadersFilter;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.rest.filter.AuthFilter;
import org.apache.hadoop.hbase.rest.filter.GzipFilter;
import org.apache.hadoop.hbase.rest.filter.RestCsrfPreventionFilter;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.http.HttpServerUtil;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.util.VersionInfo;

import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser;

import org.apache.yetus.audience.InterfaceAudience;
import org.eclipse.jetty.http.HttpVersion;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.jmx.MBeanContainer;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.SslConnectionFactory;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.jmx.MBeanContainer;
import org.eclipse.jetty.servlet.FilterHolder;

import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.servlet.DispatcherType;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.HelpFormatter;
import org.apache.hbase.thirdparty.org.apache.commons.cli.Options;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
import org.apache.hbase.thirdparty.org.apache.commons.cli.PosixParser;

/**
* Main class for launching REST gateway as a servlet hosted by Jetty.
Expand Down Expand Up @@ -137,6 +134,23 @@ void addCSRFFilter(ServletContextHandler ctxHandler, Configuration conf) {
}
}

private void addClickjackingPreventionFilter(ServletContextHandler ctxHandler,
Configuration conf) {
FilterHolder holder = new FilterHolder();
holder.setName("clickjackingprevention");
holder.setClassName(ClickjackingPreventionFilter.class.getName());
holder.setInitParameters(ClickjackingPreventionFilter.getDefaultParameters(conf));
ctxHandler.addFilter(holder, PATH_SPEC_ANY, EnumSet.allOf(DispatcherType.class));
}

private void addSecurityHeadersFilter(ServletContextHandler ctxHandler, Configuration conf) {
FilterHolder holder = new FilterHolder();
holder.setName("securityheaders");
holder.setClassName(SecurityHeadersFilter.class.getName());
holder.setInitParameters(SecurityHeadersFilter.getDefaultParameters(conf));
ctxHandler.addFilter(holder, PATH_SPEC_ANY, EnumSet.allOf(DispatcherType.class));
}

// login the server principal (if using secure Hadoop)
private static Pair<FilterHolder, Class<? extends ServletContainer>> loginServerPrincipal(
UserProvider userProvider, Configuration conf) throws Exception {
Expand Down Expand Up @@ -349,6 +363,8 @@ public synchronized void run() throws Exception {
ctxHandler.addFilter(filter, PATH_SPEC_ANY, EnumSet.of(DispatcherType.REQUEST));
}
addCSRFFilter(ctxHandler, conf);
addClickjackingPreventionFilter(ctxHandler, conf);
addSecurityHeadersFilter(ctxHandler, conf);
HttpServerUtil.constrainHttpMethods(ctxHandler, servlet.getConfiguration()
.getBoolean(REST_HTTP_ALLOW_OPTIONS_METHOD, REST_HTTP_ALLOW_OPTIONS_METHOD_DEFAULT));

Expand Down
Loading