-
Notifications
You must be signed in to change notification settings - Fork 106
Validate Splunk configuration before creating task #365
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 3 commits
9f53363
7e1dc39
57a362e
bbe27ef
ce919d2
4872510
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,4 @@ | ||
| { | ||
| "java.configuration.updateBuildConfiguration": "interactive", | ||
| "java.compile.nullAnalysis.mode": "automatic" | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,9 @@ | ||
| package com.splunk.kafka.connect; | ||
|
|
||
| import org.apache.http.impl.client.CloseableHttpClient; | ||
|
|
||
| import com.splunk.hecclient.HecConfig; | ||
|
|
||
| public abstract class AbstractClientWrapper { | ||
| abstract CloseableHttpClient getClient(HecConfig config); | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,17 @@ | ||
| package com.splunk.kafka.connect; | ||
|
|
||
| import org.apache.http.impl.client.CloseableHttpClient; | ||
|
|
||
| import com.splunk.hecclient.Hec; | ||
| import com.splunk.hecclient.HecConfig; | ||
|
|
||
| public class HecClientWrapper extends AbstractClientWrapper { | ||
|
|
||
| @Override | ||
| CloseableHttpClient getClient(HecConfig config) { | ||
| return Hec.createHttpClient(config); | ||
|
|
||
| } | ||
|
|
||
|
|
||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -20,25 +20,50 @@ | |
|
|
||
| import java.util.function.Function; | ||
| import java.util.stream.Collectors; | ||
|
|
||
| import org.apache.avro.Protocol; | ||
|
||
| import org.apache.commons.lang3.StringUtils; | ||
| import org.apache.http.Header; | ||
| import org.apache.http.client.ClientProtocolException; | ||
| import org.apache.http.client.methods.CloseableHttpResponse; | ||
| import org.apache.http.client.methods.HttpPost; | ||
| import org.apache.http.client.methods.HttpUriRequest; | ||
| import org.apache.http.client.protocol.HttpClientContext; | ||
| import org.apache.http.impl.client.CloseableHttpClient; | ||
| import org.apache.http.message.BasicHeader; | ||
| import org.apache.http.protocol.HttpContext; | ||
| import org.apache.http.util.EntityUtils; | ||
| import org.apache.kafka.common.config.Config; | ||
| import org.apache.kafka.common.config.ConfigDef; | ||
| import org.apache.kafka.common.config.ConfigException; | ||
| import org.apache.kafka.common.config.ConfigValue; | ||
| import org.apache.kafka.connect.connector.Task; | ||
| import org.apache.kafka.connect.sink.SinkConnector; | ||
|
|
||
| import java.io.IOException; | ||
| import java.util.ArrayList; | ||
| import java.util.List; | ||
| import java.util.Map; | ||
|
|
||
| import org.slf4j.Logger; | ||
| import org.slf4j.LoggerFactory; | ||
|
|
||
| import com.splunk.hecclient.Event; | ||
| import com.splunk.hecclient.EventBatch; | ||
| import com.splunk.hecclient.JsonEvent; | ||
| import com.splunk.hecclient.JsonEventBatch; | ||
|
|
||
| public final class SplunkSinkConnector extends SinkConnector { | ||
| private static final Logger log = LoggerFactory.getLogger(SplunkSinkConnector.class); | ||
| private Map<String, String> taskConfig; | ||
| private Map<String, ConfigValue> values; | ||
| private List<ConfigValue> validations; | ||
| private AbstractClientWrapper hecAb = new HecClientWrapper(); | ||
|
||
|
|
||
|
|
||
| public void setHecInstance(AbstractClientWrapper hecAb) { | ||
| this.hecAb = hecAb; | ||
| } | ||
|
|
||
| @Override | ||
| public void start(Map<String, String> taskConfig) { | ||
|
|
@@ -76,14 +101,15 @@ public ConfigDef config() { | |
| return SplunkSinkConnectorConfig.conf(); | ||
| } | ||
|
|
||
|
|
||
| @Override | ||
| public Config validate(final Map<String, String> connectorConfigs) { | ||
| Config config = super.validate(connectorConfigs); | ||
| validations = config.configValues(); | ||
| values = validations.stream().collect(Collectors.toMap(ConfigValue::name, Function.identity())); | ||
|
|
||
| validateKerberosConfigs(connectorConfigs); | ||
| validateHealthCheckForSplunkIndexes(connectorConfigs); | ||
| return new Config(validations); | ||
| } | ||
|
|
||
|
|
@@ -100,9 +126,9 @@ void validateKerberosConfigs(final Map<String, String> configs) { | |
| } | ||
|
|
||
| String errorMessage = String.format( | ||
| "Either both or neither '%s' and '%s' must be set for Kerberos authentication. ", | ||
| KERBEROS_KEYTAB_PATH_CONF, | ||
| KERBEROS_USER_PRINCIPAL_CONF | ||
| "Either both or neither '%s' and '%s' must be set for Kerberos authentication. ", | ||
| KERBEROS_KEYTAB_PATH_CONF, | ||
| KERBEROS_USER_PRINCIPAL_CONF | ||
| ); | ||
| addErrorMessage(KERBEROS_KEYTAB_PATH_CONF, errorMessage); | ||
| addErrorMessage(KERBEROS_USER_PRINCIPAL_CONF, errorMessage); | ||
|
|
@@ -111,4 +137,79 @@ void validateKerberosConfigs(final Map<String, String> configs) { | |
| private void addErrorMessage(String property, String error) { | ||
| values.get(property).addErrorMessage(error); | ||
| } | ||
| } | ||
|
|
||
| private static String[] split(String data, String sep) { | ||
| if (data != null && !data.trim().isEmpty()) { | ||
| return data.trim().split(sep); | ||
| } | ||
| return null; | ||
| } | ||
|
|
||
|
|
||
| private void validateHealthCheckForSplunkIndexes(final Map<String, String> configs) throws ConfigException { | ||
|
||
| SplunkSinkConnectorConfig connectorConfig = new SplunkSinkConnectorConfig(configs); | ||
| String[] indexes = split(connectorConfig.indexes, ","); | ||
| if(indexes.length == 0) { | ||
| healthCheckForSplunkHEC(connectorConfig, ""); | ||
| } else { | ||
| for (String index : indexes) { | ||
| healthCheckForSplunkHEC(connectorConfig, index); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| private void healthCheckForSplunkHEC(SplunkSinkConnectorConfig connectorConfig, String index) throws ConfigException { | ||
| Header[] headers; | ||
| headers = new Header[1]; | ||
| headers[0] = new BasicHeader("Authorization", String.format("Splunk %s", connectorConfig.splunkToken)); | ||
|
||
| String endpoint = "/services/collector"; | ||
| String url = connectorConfig.splunkURI + endpoint; | ||
| final HttpPost httpPost = new HttpPost(url); | ||
| httpPost.setHeaders(headers); | ||
| EventBatch batch = new JsonEventBatch(); | ||
| Event event = new JsonEvent("a:a", null); | ||
|
||
| event.setIndex(index); | ||
| batch.add(event); | ||
| httpPost.setEntity(batch.getHttpEntity()); | ||
|
|
||
| CloseableHttpClient httpClient = hecAb.getClient(connectorConfig.getHecConfig()); | ||
| executeHttpRequest(httpPost, httpClient); | ||
| } | ||
|
|
||
|
|
||
| private void executeHttpRequest(final HttpUriRequest req, CloseableHttpClient httpClient) throws ConfigException { | ||
| CloseableHttpResponse resp = null; | ||
| HttpContext context; | ||
| context = HttpClientContext.create(); | ||
| try { | ||
| resp = httpClient.execute(req, context); | ||
| } catch (ClientProtocolException ex) { | ||
| throw new ConfigException("Invalid splunk SSL configuration detected while validating configuration",ex); | ||
| } catch (IOException ex) { | ||
| throw new ConfigException("Invalid Splunk Configurations",ex); | ||
| } | ||
| try { | ||
| String respPayload = EntityUtils.toString(resp.getEntity(), "utf-8"); | ||
| if (respPayload.contains("Incorrect index")){ | ||
| throw new ConfigException("Incorrect Index detected while validating configuration"); | ||
| } | ||
| else if (respPayload.contains("Invalid token")){ | ||
| throw new ConfigException("Incorrect HEC token detected while validating configuration"); | ||
| } | ||
| } catch(ConfigException ex){ | ||
| throw ex; | ||
| } | ||
| catch (Exception ex) { | ||
| throw new ConfigException("failed to process http payload",ex); | ||
| } finally { | ||
| try { | ||
| resp.close(); | ||
| } catch (Exception ex) { | ||
| throw new ConfigException("failed to close http response",ex); | ||
| } | ||
|
|
||
| } | ||
| } | ||
|
|
||
|
|
||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,20 @@ | ||
| package com.splunk.kafka.connect; | ||
|
|
||
| import org.apache.http.impl.client.CloseableHttpClient; | ||
|
|
||
| import com.splunk.hecclient.CloseableHttpClientMock; | ||
| import com.splunk.hecclient.Hec; | ||
| import com.splunk.hecclient.HecConfig; | ||
|
|
||
| public class MockHecClientWrapper extends AbstractClientWrapper{ | ||
| public CloseableHttpClientMock client = new CloseableHttpClientMock(); | ||
|
|
||
| @Override | ||
| CloseableHttpClient getClient(HecConfig config) { | ||
| // TODO Auto-generated method stub | ||
| if (config==null){} | ||
|
|
||
| return client; | ||
| } | ||
|
|
||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -22,14 +22,18 @@ | |
| import static org.junit.Assert.assertFalse; | ||
| import static org.junit.Assert.assertTrue; | ||
|
|
||
| import org.apache.http.impl.client.CloseableHttpClient; | ||
| import org.apache.kafka.common.config.Config; | ||
| import org.apache.kafka.common.config.ConfigDef; | ||
| import org.apache.kafka.common.config.ConfigException; | ||
| import org.apache.kafka.common.config.ConfigValue; | ||
| import org.apache.kafka.connect.connector.Task; | ||
| import org.apache.kafka.connect.sink.SinkConnector; | ||
| import org.junit.Assert; | ||
| import org.junit.jupiter.api.Assertions; | ||
| import org.junit.jupiter.api.Test; | ||
|
|
||
| import com.splunk.hecclient.CloseableHttpClientMock; | ||
|
|
||
| import java.util.*; | ||
|
|
||
|
|
@@ -74,6 +78,10 @@ public void testValidKerberosBothEmpty() { | |
| final Map<String, String> configs = new HashMap<>(); | ||
| addNecessaryConfigs(configs); | ||
| SinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Config result = connector.validate(configs); | ||
| assertNoErrors(result); | ||
| } | ||
|
|
@@ -85,6 +93,10 @@ public void testValidKerberosBothSet() { | |
| configs.put(KERBEROS_USER_PRINCIPAL_CONF, TEST_KERB_PRINCIPAL); | ||
| configs.put(KERBEROS_KEYTAB_PATH_CONF, TEST_KERB_KEYTAB_PATH); | ||
| SinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Config result = connector.validate(configs); | ||
| assertNoErrors(result); | ||
| } | ||
|
|
@@ -95,6 +107,10 @@ public void testInvalidKerberosOnlyPrincipalSet() { | |
| addNecessaryConfigs(configs); | ||
| configs.put(KERBEROS_USER_PRINCIPAL_CONF, TEST_KERB_PRINCIPAL); | ||
| SplunkSinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Config result = connector.validate(configs); | ||
| assertHasErrorMessage(result, KERBEROS_USER_PRINCIPAL_CONF, "must be set"); | ||
| assertHasErrorMessage(result, KERBEROS_KEYTAB_PATH_CONF, "must be set"); | ||
|
|
@@ -106,11 +122,54 @@ public void testInvalidKerberosOnlyKeytabSet() { | |
| addNecessaryConfigs(configs); | ||
| configs.put(KERBEROS_KEYTAB_PATH_CONF, TEST_KERB_KEYTAB_PATH); | ||
| SplunkSinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Config result = connector.validate(configs); | ||
| assertHasErrorMessage(result, KERBEROS_USER_PRINCIPAL_CONF, "must be set"); | ||
| assertHasErrorMessage(result, KERBEROS_KEYTAB_PATH_CONF, "must be set"); | ||
| } | ||
|
|
||
| @Test | ||
| public void testInvalidToken() { | ||
| final Map<String, String> configs = new HashMap<>(); | ||
| addNecessaryConfigs(configs); | ||
| SplunkSinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| clientInstance.client.setResponse(CloseableHttpClientMock.inValidToken); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Assertions.assertThrows(ConfigException.class, ()->connector.validate(configs)); | ||
| } | ||
|
|
||
| @Test | ||
| public void testInvalidIndex() { | ||
| final Map<String, String> configs = new HashMap<>(); | ||
| addNecessaryConfigs(configs); | ||
| SplunkSinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| clientInstance.client.setResponse(CloseableHttpClientMock.inValidIndex); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Assertions.assertThrows(ConfigException.class, ()->connector.validate(configs)); | ||
| } | ||
|
|
||
| @Test | ||
| public void testValidSplunkConfigurations() { | ||
| final Map<String, String> configs = new HashMap<>(); | ||
| addNecessaryConfigs(configs); | ||
| SplunkSinkConnector connector = new SplunkSinkConnector(); | ||
| configs.put("topics", "b"); | ||
| configs.put("splunk.indexes", "b"); | ||
| MockHecClientWrapper clientInstance = new MockHecClientWrapper(); | ||
| clientInstance.client.setResponse(CloseableHttpClientMock.success); | ||
| ((SplunkSinkConnector) connector).setHecInstance(clientInstance); | ||
| Assertions.assertDoesNotThrow(()->connector.validate(configs)); | ||
| } | ||
|
|
||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can you also add tests for an invalid host, and invalid SSL configuration(i.e. HTTP request on HTTPS server and vice-versa)? |
||
| private void addNecessaryConfigs(Map<String, String> configs) { | ||
| configs.put(URI_CONF, TEST_URI); | ||
| configs.put(TOKEN_CONF, "blah"); | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Please remove unwanted files