diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java index 6be0ff221e5..deb13db607a 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/query/render/ClusterBlueprintRenderer.java @@ -45,7 +45,6 @@ import org.apache.ambari.server.controller.internal.ExportBlueprintRequest; import org.apache.ambari.server.controller.internal.RequestImpl; import org.apache.ambari.server.controller.internal.ResourceImpl; -import org.apache.ambari.server.controller.internal.Stack; import org.apache.ambari.server.controller.spi.ClusterController; import org.apache.ambari.server.controller.spi.NoSuchParentResourceException; import org.apache.ambari.server.controller.spi.NoSuchResourceException; @@ -56,6 +55,7 @@ import org.apache.ambari.server.controller.spi.UnsupportedPropertyException; import org.apache.ambari.server.controller.utilities.PredicateBuilder; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.topology.AmbariContext; import org.apache.ambari.server.topology.ClusterTopology; import org.apache.ambari.server.topology.ClusterTopologyImpl; @@ -68,6 +68,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.collect.Iterables; + /** * Renderer which renders a cluster resource as a blueprint. */ @@ -195,9 +197,12 @@ private Resource createBlueprintResource(TreeNode clusterNode) { BlueprintConfigurationProcessor configProcessor = new BlueprintConfigurationProcessor(topology); configProcessor.doUpdateForBlueprintExport(); - Stack stack = topology.getBlueprint().getStack(); - blueprintResource.setProperty("Blueprints/stack_name", stack.getName()); - blueprintResource.setProperty("Blueprints/stack_version", stack.getVersion()); + Set stackIds = topology.getBlueprint().getStackIds(); + if (stackIds.size() == 1) { + StackId stackId = Iterables.getOnlyElement(stackIds); + blueprintResource.setProperty("Blueprints/stack_name", stackId.getStackName()); + blueprintResource.setProperty("Blueprints/stack_version", stackId.getStackVersion()); + } if (topology.isClusterKerberosEnabled()) { Map securityConfigMap = new LinkedHashMap<>(); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java index ddd746b2cd8..94ba563ed1f 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorBlueprintProcessor.java @@ -18,7 +18,6 @@ package org.apache.ambari.server.api.services.stackadvisor; -import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -29,7 +28,7 @@ import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse; import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse.BlueprintConfigurations; import org.apache.ambari.server.controller.internal.ConfigurationTopologyException; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.ValueAttributesInfo; import org.apache.ambari.server.topology.AdvisedConfiguration; import org.apache.ambari.server.topology.Blueprint; @@ -77,26 +76,27 @@ public static void init(StackAdvisorHelper instance) { * @param userProvidedConfigurations User configurations of cluster provided in Blueprint + Cluster template */ public void adviseConfiguration(ClusterTopology clusterTopology, Map> userProvidedConfigurations) throws ConfigurationTopologyException { - StackAdvisorRequest request = createStackAdvisorRequest(clusterTopology, StackAdvisorRequestType.CONFIGURATIONS); - try { - RecommendationResponse response = stackAdvisorHelper.recommend(request); - addAdvisedConfigurationsToTopology(response, clusterTopology, userProvidedConfigurations); - } catch (StackAdvisorException e) { - throw new ConfigurationTopologyException(RECOMMENDATION_FAILED, e); - } catch (IllegalArgumentException e) { - throw new ConfigurationTopologyException(INVALID_RESPONSE, e); + for (StackId stackId : clusterTopology.getBlueprint().getStackIds()) { + StackAdvisorRequest request = createStackAdvisorRequest(clusterTopology, stackId, StackAdvisorRequestType.CONFIGURATIONS); + try { + RecommendationResponse response = stackAdvisorHelper.recommend(request); + addAdvisedConfigurationsToTopology(response, clusterTopology, userProvidedConfigurations); + } catch (StackAdvisorException e) { + throw new ConfigurationTopologyException(RECOMMENDATION_FAILED, e); + } catch (IllegalArgumentException e) { + throw new ConfigurationTopologyException(INVALID_RESPONSE, e); + } } } - private StackAdvisorRequest createStackAdvisorRequest(ClusterTopology clusterTopology, StackAdvisorRequestType requestType) { - Stack stack = clusterTopology.getBlueprint().getStack(); // TODO: implement multi-stack + private StackAdvisorRequest createStackAdvisorRequest(ClusterTopology clusterTopology, StackId stackId, StackAdvisorRequestType requestType) { Map> hgComponentsMap = gatherHostGroupComponents(clusterTopology); Map> hgHostsMap = gatherHostGroupBindings(clusterTopology); Map> componentHostsMap = gatherComponentsHostsMap(hgComponentsMap, hgHostsMap); return StackAdvisorRequest.StackAdvisorRequestBuilder - .forStack(stack.getName(), stack.getVersion()) - .forServices(new ArrayList<>(clusterTopology.getBlueprint().getServices())) + .forStack(stackId) + .forServices(clusterTopology.getBlueprint().getStack().getServices(stackId)) .forHosts(gatherHosts(clusterTopology)) .forHostsGroupBindings(gatherHostGroupBindings(clusterTopology)) .forHostComponents(gatherHostGroupComponents(clusterTopology)) diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java index 3e5599c4c69..f826ff310a2 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelper.java @@ -33,7 +33,6 @@ import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse; import org.apache.ambari.server.api.services.stackadvisor.validations.ValidationResponse; import org.apache.ambari.server.configuration.Configuration; - import org.apache.ambari.server.state.ServiceInfo; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -122,9 +121,7 @@ public synchronized RecommendationResponse recommend(StackAdvisorRequest request throws StackAdvisorException { requestId = generateRequestId(); - // TODO, need to pass the service Name that was modified. - // For now, hardcode - String serviceName = "ZOOKEEPER"; + String serviceName = request.getServices().stream().findAny().orElse(null); ServiceInfo.ServiceAdvisorType serviceAdvisorType = getServiceAdvisorType(request.getStackName(), request.getStackVersion(), serviceName); StackAdvisorCommand command = createRecommendationCommand(serviceName, request); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java index b30eec63b4e..be76cdc1d76 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRequest.java @@ -29,6 +29,7 @@ import org.apache.ambari.server.api.services.stackadvisor.recommendations.RecommendationResponse; import org.apache.ambari.server.state.ChangedConfigInfo; +import org.apache.ambari.server.state.StackId; import org.apache.commons.lang.StringUtils; import com.google.common.base.Preconditions; @@ -146,6 +147,10 @@ public static StackAdvisorRequestBuilder forStack(String stackName, String stack return new StackAdvisorRequestBuilder(stackName, stackVersion); } + public static StackAdvisorRequestBuilder forStack(StackId stackId) { + return new StackAdvisorRequestBuilder(stackId.getStackName(), stackId.getStackVersion()); + } + public StackAdvisorRequestBuilder ofType(StackAdvisorRequestType requestType) { this.instance.requestType = requestType; return this; diff --git a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java index 3ba9f6b1a01..6e7bf43292b 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorRunner.java @@ -52,7 +52,7 @@ public class StackAdvisorRunner { */ public void runScript(ServiceInfo.ServiceAdvisorType serviceAdvisorType, StackAdvisorCommandType saCommandType, File actionDirectory) throws StackAdvisorException { - LOG.info(String.format("StackAdvisorRunner. serviceAdvisorType=%s, actionDirectory=%s, command=%s", serviceAdvisorType.toString(), actionDirectory, + LOG.info(String.format("StackAdvisorRunner. serviceAdvisorType=%s, actionDirectory=%s, command=%s", serviceAdvisorType, actionDirectory, saCommandType)); String outputFile = actionDirectory + File.separator + "stackadvisor.out"; diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java index 5bad0f246eb..4a0d8d16d85 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariServer.java @@ -63,7 +63,6 @@ import org.apache.ambari.server.controller.internal.AbstractControllerResourceProvider; import org.apache.ambari.server.controller.internal.AmbariPrivilegeResourceProvider; import org.apache.ambari.server.controller.internal.BaseClusterRequest; -import org.apache.ambari.server.controller.internal.BlueprintResourceProvider; import org.apache.ambari.server.controller.internal.ClusterPrivilegeResourceProvider; import org.apache.ambari.server.controller.internal.ClusterResourceProvider; import org.apache.ambari.server.controller.internal.HostResourceProvider; @@ -940,8 +939,6 @@ public void performStaticInjection() { SecurityFilter.init(injector.getInstance(Configuration.class)); StackDefinedPropertyProvider.init(injector); AbstractControllerResourceProvider.init(injector.getInstance(ResourceProviderFactory.class)); - BlueprintResourceProvider.init(injector.getInstance(BlueprintFactory.class), - injector.getInstance(BlueprintDAO.class), injector.getInstance(SecurityConfigurationFactory.class), ambariMetaInfo); StackDependencyResourceProvider.init(ambariMetaInfo); ClusterResourceProvider.init(injector.getInstance(TopologyManager.class), injector.getInstance(TopologyRequestFactoryImpl.class), injector.getInstance(SecurityConfigurationFactory diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java index b99fcf1fee0..357f1b61d86 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ControllerModule.java @@ -63,6 +63,7 @@ import org.apache.ambari.server.configuration.Configuration.ConnectionPoolType; import org.apache.ambari.server.configuration.Configuration.DatabaseType; import org.apache.ambari.server.controller.internal.AlertTargetResourceProvider; +import org.apache.ambari.server.controller.internal.BlueprintResourceProvider; import org.apache.ambari.server.controller.internal.ClusterSettingResourceProvider; import org.apache.ambari.server.controller.internal.ClusterStackVersionResourceProvider; import org.apache.ambari.server.controller.internal.ComponentResourceProvider; @@ -168,6 +169,8 @@ import org.apache.ambari.server.state.stack.OsFamily; import org.apache.ambari.server.state.svccomphost.ServiceComponentHostImpl; import org.apache.ambari.server.topology.BlueprintFactory; +import org.apache.ambari.server.topology.BlueprintValidator; +import org.apache.ambari.server.topology.BlueprintValidatorImpl; import org.apache.ambari.server.topology.PersistedState; import org.apache.ambari.server.topology.PersistedStateImpl; import org.apache.ambari.server.topology.SecurityConfigurationFactory; @@ -506,6 +509,7 @@ private void installFactories() { .implement(ResourceProvider.class, Names.named("alertTarget"), AlertTargetResourceProvider.class) .implement(ResourceProvider.class, Names.named("viewInstance"), ViewInstanceResourceProvider.class) .implement(ResourceProvider.class, Names.named("rootServiceHostComponentConfiguration"), RootServiceComponentConfigurationResourceProvider.class) + .implement(ResourceProvider.class, Names.named(BlueprintResourceProvider.NAME), BlueprintResourceProvider.class) .build(ResourceProviderFactory.class)); install(new FactoryModuleBuilder().implement( @@ -537,6 +541,7 @@ private void installFactories() { bind(RegistryFactory.class).to(RegistryFactoryImpl.class); bind(HostRoleCommandFactory.class).to(HostRoleCommandFactoryImpl.class); bind(SecurityHelper.class).toInstance(SecurityHelperImpl.getInstance()); + bind(BlueprintValidator.class).to(BlueprintValidatorImpl.class); bind(BlueprintFactory.class); install(new FactoryModuleBuilder().implement(AmbariEvent.class, Names.named("userCreated"), UserCreatedEvent.class).build(AmbariEventFactory.class)); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java index ec5f656dd4c..7c316aa4bf8 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/ResourceProviderFactory.java @@ -22,6 +22,7 @@ import javax.inject.Named; import org.apache.ambari.server.controller.internal.AlertTargetResourceProvider; +import org.apache.ambari.server.controller.internal.BlueprintResourceProvider; import org.apache.ambari.server.controller.internal.ClusterStackVersionResourceProvider; import org.apache.ambari.server.controller.internal.UpgradeResourceProvider; import org.apache.ambari.server.controller.internal.ViewInstanceResourceProvider; @@ -89,4 +90,9 @@ public interface ResourceProviderFactory { @Named("viewInstance") ViewInstanceResourceProvider getViewInstanceResourceProvider(); + @Named(BlueprintResourceProvider.NAME) + BlueprintResourceProvider getBlueprintResourceProvider( + AmbariManagementController managementController + ); + } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java index 212a372aeb8..2d1d1a1fa91 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/AbstractControllerResourceProvider.java @@ -226,7 +226,7 @@ public static ResourceProvider getResourceProvider(Resource.Type type, case HostComponentProcess: return new HostComponentProcessResourceProvider(managementController); case Blueprint: - return new BlueprintResourceProvider(managementController); + return resourceProviderFactory.getBlueprintResourceProvider(managementController); case KerberosDescriptor: return resourceProviderFactory.getKerberosDescriptorResourceProvider(managementController); case Recommendation: diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java index 5b755328f57..8da1f51ee2c 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java @@ -58,6 +58,7 @@ import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import com.google.common.collect.Sets; @@ -65,6 +66,7 @@ * Updates configuration properties based on cluster topology. This is done when exporting * a blueprint and when a cluster is provisioned via a blueprint. */ +// TODO move to topology package public class BlueprintConfigurationProcessor { private static final Logger LOG = LoggerFactory.getLogger(BlueprintConfigurationProcessor.class); @@ -437,7 +439,7 @@ public Set doUpdateForClusterCreate() throws ConfigurationTopologyExcept private void trimProperties(Configuration clusterConfig, ClusterTopology clusterTopology) { Blueprint blueprint = clusterTopology.getBlueprint(); - Stack stack = blueprint.getStack(); + StackDefinition stack = blueprint.getStack(); Map> configTypes = clusterConfig.getFullProperties(); for (String configType : configTypes.keySet()) { @@ -448,7 +450,7 @@ private void trimProperties(Configuration clusterConfig, ClusterTopology cluster } } - private void trimPropertyValue(Configuration clusterConfig, Stack stack, String configType, Map properties, String propertyName) { + private void trimPropertyValue(Configuration clusterConfig, StackDefinition stack, String configType, Map properties, String propertyName) { if (propertyName != null && properties.get(propertyName) != null) { TrimmingStrategy trimmingStrategy = @@ -2880,7 +2882,7 @@ private Collection setupHDFSProxyUsers(Configuration configuration, Set< * @param configTypesUpdated * @param stack */ - private void addExcludedConfigProperties(Configuration configuration, Set configTypesUpdated, Stack stack) { + private void addExcludedConfigProperties(Configuration configuration, Set configTypesUpdated, StackDefinition stack) { Collection blueprintServices = clusterTopology.getBlueprint().getServices(); LOG.debug("Handling excluded properties for blueprint services: {}", blueprintServices); @@ -2971,38 +2973,41 @@ private static void setRetryConfiguration(Configuration configuration, Set configTypesUpdated) throws ConfigurationTopologyException { ConfigHelper configHelper = clusterTopology.getAmbariContext().getConfigHelper(); - Stack stack = clusterTopology.getBlueprint().getStack(); - String stackName = stack.getName(); - String stackVersion = stack.getVersion(); - StackId stackId = new StackId(stackName, stackVersion); - - Set properties = Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY, - ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY, - ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY, - ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY); + Set properties = ImmutableSet.of( + ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY, + ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, + ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY, + ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY, + ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY + ); try { - Map> defaultStackProperties = configHelper.getDefaultStackProperties(stackId); - Map clusterEnvDefaultProperties = defaultStackProperties.get(CLUSTER_ENV_CONFIG_TYPE_NAME); - - for( String property : properties ){ - if (clusterEnvDefaultProperties.containsKey(property)) { - configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property, - clusterEnvDefaultProperties.get(property)); + for (StackId stackId : clusterTopology.getBlueprint().getStackIds()) { + Map> defaultStackProperties = configHelper.getDefaultStackProperties(stackId); + if (defaultStackProperties.containsKey(CLUSTER_ENV_CONFIG_TYPE_NAME)) { + Map clusterEnvDefaultProperties = defaultStackProperties.get(CLUSTER_ENV_CONFIG_TYPE_NAME); + + for (String property : properties) { + if (clusterEnvDefaultProperties.containsKey(property)) { + configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property, + clusterEnvDefaultProperties.get(property) + ); + + // make sure to include the configuration type as being updated + configTypesUpdated.add(CLUSTER_ENV_CONFIG_TYPE_NAME); + } + } - // make sure to include the configuration type as being updated - configTypesUpdated.add(CLUSTER_ENV_CONFIG_TYPE_NAME); + break; } } - } catch( AmbariException ambariException ){ - throw new ConfigurationTopologyException("Unable to retrieve the stack tools and features", - ambariException); + } catch (AmbariException e) { + throw new ConfigurationTopologyException("Unable to retrieve the stack tools and features", e); } } @@ -3101,7 +3106,7 @@ private static class StackPropertyTypeFilter implements PropertyFilter { */ @Override public boolean isPropertyIncluded(String propertyName, String propertyValue, String configType, ClusterTopology topology) { - Stack stack = topology.getBlueprint().getStack(); + StackDefinition stack = topology.getBlueprint().getStack(); final String serviceName = stack.getServiceForConfigType(configType); return !(stack.isPasswordProperty(serviceName, configType, propertyName) || stack.isKerberosPrincipalNameProperty(serviceName, configType, propertyName)); @@ -3198,7 +3203,7 @@ private static abstract class DependencyFilter implements PropertyFilter { */ @Override public boolean isPropertyIncluded(String propertyName, String propertyValue, String configType, ClusterTopology topology) { - Stack stack = topology.getBlueprint().getStack(); + StackDefinition stack = topology.getBlueprint().getStack(); Configuration configuration = topology.getConfiguration(); final String serviceName = stack.getServiceForConfigType(configType); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java index 1a4221e378e..760d90f89c0 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintResourceProvider.java @@ -20,10 +20,7 @@ import static java.util.stream.Collectors.toList; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -33,6 +30,8 @@ import java.util.Map; import java.util.Set; +import javax.inject.Inject; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.DuplicateResourceException; import org.apache.ambari.server.api.services.AmbariMetaInfo; @@ -60,20 +59,23 @@ import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.topology.Blueprint; import org.apache.ambari.server.topology.BlueprintFactory; +import org.apache.ambari.server.topology.BlueprintValidator; import org.apache.ambari.server.topology.GPLLicenseNotAcceptedException; import org.apache.ambari.server.topology.InvalidTopologyException; import org.apache.ambari.server.topology.MpackInstance; import org.apache.ambari.server.topology.SecurityConfiguration; import org.apache.ambari.server.topology.SecurityConfigurationFactory; +import org.apache.ambari.server.utils.JsonUtils; import org.apache.ambari.server.utils.SecretReference; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; +import com.google.common.collect.ImmutableSet; +import com.google.inject.assistedinject.Assisted; /** @@ -82,21 +84,19 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvider { private static final Logger LOG = LoggerFactory.getLogger(BlueprintResourceProvider.class); + public static final String NAME = "blueprint"; // ----- Property ID constants --------------------------------------------- - // Blueprints - public static final String BLUEPRINT_NAME_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "blueprint_name"); - public static final String STACK_NAME_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "stack_name"); - public static final String STACK_VERSION_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "stack_version"); + public static final String RESPONSE_KEY = "Blueprints"; + public static final String BLUEPRINTS_PROPERTY_ID = "Blueprints"; - public static final String BLUEPRINT_SECURITY_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "security"); - public static final String BLUEPRINTS_PROPERTY_ID = "Blueprints"; + // Blueprints + public static final String BLUEPRINT_NAME_PROPERTY_ID = RESPONSE_KEY + PropertyHelper.EXTERNAL_PATH_SEP + "blueprint_name"; + public static final String STACK_NAME_PROPERTY_ID = RESPONSE_KEY + PropertyHelper.EXTERNAL_PATH_SEP + "stack_name"; + public static final String STACK_VERSION_PROPERTY_ID = RESPONSE_KEY + PropertyHelper.EXTERNAL_PATH_SEP + "stack_version"; + public static final String BLUEPRINT_SECURITY_PROPERTY_ID = RESPONSE_KEY + PropertyHelper.EXTERNAL_PATH_SEP + "security"; // Host Groups public static final String HOST_GROUP_PROPERTY_ID = "host_groups"; @@ -107,8 +107,8 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide public static final String COMPONENT_PROPERTY_ID ="components"; public static final String COMPONENT_NAME_PROPERTY_ID ="name"; public static final String COMPONENT_PROVISION_ACTION_PROPERTY_ID = "provision_action"; - protected static final String COMPONENT_MPACK_INSTANCE_PROPERTY = "mpack_instance"; - protected static final String COMPONENT_SERVICE_INSTANCE_PROPERTY = "service_instance"; + public static final String COMPONENT_MPACK_INSTANCE_PROPERTY = "mpack_instance"; + public static final String COMPONENT_SERVICE_INSTANCE_PROPERTY = "service_instance"; // Configurations public static final String CONFIGURATION_PROPERTY_ID = "configurations"; @@ -116,8 +116,10 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide // Setting public static final String SETTING_PROPERTY_ID = "settings"; + public static final String VALIDATE_TOPOLOGY_PROPERTY_ID = "validate_topology"; public static final String PROPERTIES_PROPERTY_ID = "properties"; public static final String PROPERTIES_ATTRIBUTES_PROPERTY_ID = "properties_attributes"; + public static final String SCHEMA_IS_NOT_SUPPORTED_MESSAGE = "Configuration format provided in Blueprint is not supported"; public static final String REQUEST_BODY_EMPTY_ERROR_MESSAGE = @@ -130,50 +132,51 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide "Configuration Maps must hold a single configuration type each"; public static final String MPACK_INSTANCES_PROPERTY_ID = "mpack_instances"; - // Primary Key Fields - private static Set pkPropertyIds = - new HashSet<>(Arrays.asList(new String[]{ - BLUEPRINT_NAME_PROPERTY_ID})); - /** * The key property ids for a Blueprint resource. */ - private static Map keyPropertyIds = ImmutableMap.builder() - .put(Resource.Type.Blueprint, BLUEPRINT_NAME_PROPERTY_ID) - .build(); + private static final Map KEY_PROPERTY_IDS = ImmutableMap.of(Resource.Type.Blueprint, BLUEPRINT_NAME_PROPERTY_ID); + private static final Set PK_PROPERTY_IDS = ImmutableSet.copyOf(KEY_PROPERTY_IDS.values()); /** * The property ids for a Blueprint resource. */ - private static Set propertyIds = Sets.newHashSet( - BLUEPRINT_NAME_PROPERTY_ID, - STACK_NAME_PROPERTY_ID, - STACK_VERSION_PROPERTY_ID, - BLUEPRINT_SECURITY_PROPERTY_ID, - HOST_GROUP_PROPERTY_ID, - CONFIGURATION_PROPERTY_ID, - SETTING_PROPERTY_ID, - MPACK_INSTANCES_PROPERTY_ID); + private static final Set PROPERTY_IDS = ImmutableSet.of( + BLUEPRINT_NAME_PROPERTY_ID, + STACK_NAME_PROPERTY_ID, + STACK_VERSION_PROPERTY_ID, + BLUEPRINT_SECURITY_PROPERTY_ID, + HOST_GROUP_PROPERTY_ID, + HOST_GROUP_PROPERTY_ID + PropertyHelper.EXTERNAL_PATH_SEP + COMPONENT_PROPERTY_ID, + HOST_GROUP_PROPERTY_ID + PropertyHelper.EXTERNAL_PATH_SEP + HOST_GROUP_CARDINALITY_PROPERTY_ID, + CONFIGURATION_PROPERTY_ID, + VALIDATE_TOPOLOGY_PROPERTY_ID, + SETTING_PROPERTY_ID, + MPACK_INSTANCES_PROPERTY_ID + ); /** * Used to create Blueprint instances */ - private static BlueprintFactory blueprintFactory; + private final BlueprintFactory blueprintFactory; /** * Used to create SecurityConfiguration instances */ - private static SecurityConfigurationFactory securityConfigurationFactory; + private final SecurityConfigurationFactory securityConfigurationFactory; /** * Blueprint Data Access Object */ - private static BlueprintDAO blueprintDAO; + private final BlueprintDAO blueprintDAO; + + private final BlueprintValidator validator; /** - * Used to serialize to/from json. + * Used to get stack metainfo. */ - private static ObjectMapper jsonSerializer = new ObjectMapper(); + private final AmbariMetaInfo ambariMetaInfo; + // ----- Constructors ---------------------------------------------------- @@ -182,18 +185,17 @@ public class BlueprintResourceProvider extends AbstractControllerResourceProvide * * @param controller management controller */ - BlueprintResourceProvider(AmbariManagementController controller) { - super(Resource.Type.Blueprint, propertyIds, keyPropertyIds, controller); - } - - /** - * Static initialization. - * - * @param factory blueprint factory - * @param dao blueprint data access object - */ - public static void init(BlueprintFactory factory, BlueprintDAO dao, SecurityConfigurationFactory - securityFactory, AmbariMetaInfo metaInfo) { + @Inject + BlueprintResourceProvider( + BlueprintValidator validator, + BlueprintFactory factory, + BlueprintDAO dao, + SecurityConfigurationFactory securityFactory, + AmbariMetaInfo metaInfo, + @Assisted AmbariManagementController controller + ) { + super(Resource.Type.Blueprint, PROPERTY_IDS, KEY_PROPERTY_IDS, controller); + this.validator = validator; blueprintFactory = factory; blueprintDAO = dao; securityConfigurationFactory = securityFactory; @@ -204,7 +206,7 @@ public static void init(BlueprintFactory factory, BlueprintDAO dao, SecurityConf @Override protected Set getPKPropertyIds() { - return new HashSet<>(keyPropertyIds.values()); + return PK_PROPERTY_IDS; } @Override @@ -306,11 +308,6 @@ public Void invoke() throws AmbariException { return getRequestStatus(null); } - /** - * Used to get stack metainfo. - */ - private static AmbariMetaInfo ambariMetaInfo; - // ----- Instance Methods ------------------------------------------------ /** @@ -339,7 +336,7 @@ protected Resource toResource(BlueprintEntity entity, Set requestedIds) Map mapComponentProps = new HashMap<>(); mapComponentProps.put(COMPONENT_NAME_PROPERTY_ID, component.getName()); if (component.getProvisionAction() != null) { - mapComponentProps.put(COMPONENT_PROVISION_ACTION_PROPERTY_ID, component.getProvisionAction().toString()); + mapComponentProps.put(COMPONENT_PROVISION_ACTION_PROPERTY_ID, component.getProvisionAction()); } if (component.getMpackName() != null) { mapComponentProps.put(COMPONENT_MPACK_INSTANCE_PROPERTY, @@ -372,35 +369,13 @@ protected Resource toResource(BlueprintEntity entity, Set requestedIds) Collection> mpacks = entity.getMpackInstances().stream().map(mpackEntity -> { MpackInstance mpack = MpackInstance.fromEntity(mpackEntity); - Map mpackAsMap = fromJson(toJson(mpack), Map.class); - return mpackAsMap; + return JsonUtils.fromJson(JsonUtils.toJson(mpack), new TypeReference>(){}); } ).collect(toList()); setResourceProperty(resource, MPACK_INSTANCES_PROPERTY_ID, mpacks, requestedIds); return resource; } - private static T fromJson(String json, Class valueType) { - if (null == json) { - return null; - } - try { - return jsonSerializer.readValue(json, valueType); - } - catch (IOException ex) { - throw new UncheckedIOException(ex); - } - } - - private static String toJson(Object object) { - try { - return jsonSerializer.writeValueAsString(object); - } - catch (IOException ex) { - throw new UncheckedIOException(ex); - } - } - /** * Populate a list of configuration property maps from a collection of configuration entities. * @@ -418,7 +393,7 @@ List>> populateConfigurationList( String type = config.getType(); if(config instanceof BlueprintConfigEntity) { - Map properties = fromJson(config.getConfigData(), Map.class); + Map properties = JsonUtils.fromJson(config.getConfigData(), new TypeReference>(){}); // TODO: use multiple mpacks BlueprintMpackInstanceEntity mpack = @@ -434,15 +409,15 @@ List>> populateConfigurationList( Map> propertiesTypes = metaInfoStack.getConfigPropertiesTypes(type); - SecretReference.replacePasswordsWithReferences(propertiesTypes, properties, type, -1l); + SecretReference.replacePasswordsWithReferences(propertiesTypes, properties, type, -1L); configTypeDefinition.put(PROPERTIES_PROPERTY_ID, properties); } else { - Map properties = fromJson(config.getConfigData(), Map.class); + Map properties = JsonUtils.fromJson(config.getConfigData(), new TypeReference>(){}); configTypeDefinition.put(PROPERTIES_PROPERTY_ID, properties); } - Map> attributes = fromJson(config.getConfigAttributes(), Map.class); + Map> attributes = JsonUtils.fromJson(config.getConfigAttributes(), new TypeReference>>(){}); if (attributes != null && !attributes.isEmpty()) { configTypeDefinition.put(PROPERTIES_ATTRIBUTES_PROPERTY_ID, attributes); } @@ -460,13 +435,13 @@ List>> populateConfigurationList( * * @return list of setting property maps */ - public static List> populateSettingList( + public List> populateSettingList( Collection settings) throws NoSuchResourceException { List> listSettings = new ArrayList<>(); if (settings != null) { for (BlueprintSettingEntity setting : settings) { - List> propertiesList = fromJson(setting.getSettingData(), List.class); + List> propertiesList = JsonUtils.fromJson(setting.getSettingData(), new TypeReference>>(){}); Map settingMap = new HashMap<>(); settingMap.put(setting.getSettingName(), propertiesList); listSettings.add(settingMap); @@ -539,13 +514,12 @@ BlueprintConfigPopulationStrategy decidePopulationStrategy(Map c */ private Command getCreateCommand(final Map properties, final Map requestInfoProps) { return new Command() { - @SuppressWarnings("rawtypes") @Override public Void invoke() throws AmbariException { String rawRequestBody = requestInfoProps.get(Request.REQUEST_INFO_BODY_PROPERTY); Preconditions.checkArgument(!Strings.isNullOrEmpty(rawRequestBody), REQUEST_BODY_EMPTY_ERROR_MESSAGE); - Map rawBodyMap = fromJson(rawRequestBody, Map.class); + Map rawBodyMap = JsonUtils.fromJson(rawRequestBody, new TypeReference>(){}); Object configurationData = rawBodyMap.get(CONFIGURATION_PROPERTY_ID); if (configurationData != null) { @@ -572,22 +546,22 @@ public Void invoke() throws AmbariException { } try { - blueprint.validateRequiredProperties(); + validator.validateRequiredProperties(blueprint); } catch (InvalidTopologyException | GPLLicenseNotAcceptedException e) { throw new IllegalArgumentException("Blueprint configuration validation failed: " + e.getMessage(), e); } - String validateTopology = requestInfoProps.get("validate_topology"); + String validateTopology = requestInfoProps.get(VALIDATE_TOPOLOGY_PROPERTY_ID); if (validateTopology == null || ! validateTopology.equalsIgnoreCase("false")) { try { - blueprint.validateTopology(); + validator.validateTopology(blueprint); } catch (InvalidTopologyException e) { throw new IllegalArgumentException(e.getMessage()); } } LOG.info("Creating Blueprint, name=" + blueprint.getName()); - String blueprintSetting = blueprint.getSetting() == null ? "(null)" : toJson(blueprint.getSetting().getProperties()); + String blueprintSetting = blueprint.getSetting() == null ? "(null)" : JsonUtils.toJson(blueprint.getSetting().getProperties()); LOG.info("Blueprint setting=" + blueprintSetting); try { @@ -629,8 +603,8 @@ public void applyConfiguration(Map configuration, BlueprintConfi } } - blueprintConfiguration.setConfigData(toJson(configData)); - blueprintConfiguration.setConfigAttributes(toJson(configAttributes)); + blueprintConfiguration.setConfigData(JsonUtils.toJson(configData)); + blueprintConfiguration.setConfigAttributes(JsonUtils.toJson(configAttributes)); } protected abstract void addProperty(Map configData, diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompositeStack.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompositeStack.java new file mode 100644 index 00000000000..75baf300bcf --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/CompositeStack.java @@ -0,0 +1,306 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.server.controller.internal; + +import static java.util.stream.Collectors.toMap; +import static java.util.stream.Collectors.toSet; + +import java.util.Collection; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Stream; + +import org.apache.ambari.server.state.AutoDeployInfo; +import org.apache.ambari.server.state.ComponentInfo; +import org.apache.ambari.server.state.ConfigHelper; +import org.apache.ambari.server.state.DependencyInfo; +import org.apache.ambari.server.state.PropertyInfo; +import org.apache.ambari.server.state.StackId; +import org.apache.ambari.server.topology.Cardinality; +import org.apache.ambari.server.topology.Configuration; +import org.apache.commons.lang3.tuple.Pair; + +import com.google.common.collect.ImmutableSet; + +/** Combines multiple mpacks into a single stack. */ +// TODO move to topology package +public class CompositeStack implements StackDefinition { + + private final Set stacks; + + CompositeStack(Set stacks) { + this.stacks = stacks; + } + + @Override + public Set getStacksForService(String serviceName) { + return stacks.stream() + .map(m -> Pair.of(m.getStackId(), m.getServices())) + .filter(p -> p.getRight().contains(serviceName)) + .map(Pair::getLeft) + .collect(toSet()); + } + + @Override + public Set getStacksForComponent(String componentName) { + return stacks.stream() + .map(m -> m.getStacksForComponent(componentName)) + .filter(s -> !s.isEmpty()) + .flatMap(Collection::stream) + .collect(toSet()); + } + + @Override + public Set getServices(StackId stackId) { + return stacks.stream() + .filter(m -> stackId.equals(m.getStackId())) + .findAny() + .flatMap(m -> Optional.of(ImmutableSet.copyOf(m.getServices()))) + .orElse(ImmutableSet.of()); + } + + @Override + public Set getStackIds() { + return stacks.stream() + .map(Stack::getStackId) + .collect(toSet()); + } + + @Override + public Collection getServices() { + return stacks.stream() + .flatMap(s -> s.getServices().stream()) + .collect(toSet()); + } + + @Override + public Collection getComponents(String service) { + return stacks.stream() + .map(s -> s.getComponents(service)) + .filter(Objects::nonNull) + .flatMap(Collection::stream) + .collect(toSet()); + } + + @Override + public Collection getComponents() { + return stacks.stream() + .flatMap(s -> s.getComponents().stream()) + .collect(toSet()); + } + + @Override + public ComponentInfo getComponentInfo(String component) { + return stacks.stream() + .map(m -> m.getComponentInfo(component)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public Collection getAllConfigurationTypes(String service) { + return stacks.stream() + .flatMap(m -> m.getAllConfigurationTypes(service).stream()) + .collect(toSet()); + } + + @Override + public Collection getConfigurationTypes(String service) { + return stacks.stream() + .flatMap(m -> m.getConfigurationTypes(service).stream()) + .collect(toSet()); + } + + @Override + public Set getExcludedConfigurationTypes(String service) { + return stacks.stream() + .flatMap(m -> m.getExcludedConfigurationTypes(service).stream()) + .collect(toSet()); + } + + @Override + public Map getConfigurationProperties(String service, String type) { + return stacks.stream() + .flatMap(m -> m.getConfigurationProperties(service, type).entrySet().stream()) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public Map getConfigurationPropertiesWithMetadata(String service, String type) { + return stacks.stream() + .flatMap(m -> m.getConfigurationPropertiesWithMetadata(service, type).entrySet().stream()) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public Collection getRequiredConfigurationProperties(String service) { + return stacks.stream() + .flatMap(m -> m.getRequiredConfigurationProperties(service).stream()) + .collect(toSet()); + } + + @Override + public Collection getRequiredConfigurationProperties(String service, PropertyInfo.PropertyType propertyType) { + return stacks.stream() + .flatMap(m -> m.getRequiredConfigurationProperties(service, propertyType).stream()) + .collect(toSet()); + } + + @Override + public boolean isPasswordProperty(String service, String type, String propertyName) { + return stacks.stream() + .anyMatch(s -> s.isPasswordProperty(service, type, propertyName)); + } + + @Override + public Map getStackConfigurationProperties(String type) { + return stacks.stream() + .flatMap(m -> m.getStackConfigurationProperties(type).entrySet().stream()) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public boolean isKerberosPrincipalNameProperty(String service, String type, String propertyName) { + return stacks.stream() + .anyMatch(s -> s.isKerberosPrincipalNameProperty(service, type, propertyName)); + } + + @Override + public Map> getConfigurationAttributes(String service, String type) { + return stacks.stream() + .flatMap(m -> m.getConfigurationAttributes(service, type).entrySet().stream()) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public Map> getStackConfigurationAttributes(String type) { + return stacks.stream() + .flatMap(m -> m.getStackConfigurationAttributes(type).entrySet().stream()) + .collect(toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public String getServiceForComponent(String component) { + return stacks.stream() + .map(m -> m.getServiceForComponent(component)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public Collection getServicesForComponents(Collection components) { + return stacks.stream() + .flatMap(m -> m.getServicesForComponents(components).stream()) + .collect(toSet()); + } + + @Override + public String getServiceForConfigType(String config) { + if (ConfigHelper.CLUSTER_ENV.equals(config)) { // for backwards compatibility + return null; + } + return getServicesForConfigType(config) + .findAny() + .orElseThrow(() -> new IllegalArgumentException(Stack.formatMissingServiceForConfigType(config, "ANY"))); + } + + @Override + public Stream getServicesForConfigType(String config) { + if (ConfigHelper.CLUSTER_ENV.equals(config)) { // for backwards compatibility + return Stream.empty(); + } + return stacks.stream() + .map(m -> { + try { + return m.getServiceForConfigType(config); + } catch (IllegalArgumentException e) { + return null; + } + }) + .filter(Objects::nonNull); + } + + @Override + public Collection getDependenciesForComponent(String component) { + return stacks.stream() + .flatMap(m -> m.getDependenciesForComponent(component).stream()) + .collect(toSet()); + } + + @Override + public String getConditionalServiceForDependency(DependencyInfo dependency) { + return stacks.stream() + .map(m -> m.getConditionalServiceForDependency(dependency)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public String getExternalComponentConfig(String component) { + return stacks.stream() + .map(m -> m.getExternalComponentConfig(component)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public Cardinality getCardinality(String component) { + return stacks.stream() + .map(m -> m.getCardinality(component)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public AutoDeployInfo getAutoDeployInfo(String component) { + return stacks.stream() + .map(m -> m.getAutoDeployInfo(component)) + .filter(Objects::nonNull) + .findAny() + .orElse(null); + } + + @Override + public boolean isMasterComponent(String component) { + return stacks.stream() + .anyMatch(s -> s.isMasterComponent(component)); + } + + @Override + public Configuration getConfiguration(Collection services) { + // FIXME probably too costly + return stacks.stream() + .map(m -> m.getConfiguration(services)) + .reduce(Configuration.createEmpty(), Configuration::combine); + } + + @Override + public Configuration getConfiguration() { + // FIXME probably too costly + return stacks.stream() + .map(StackDefinition::getConfiguration) + .reduce(Configuration.createEmpty(), Configuration::combine); + } +} diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java index ed6b5fd04b9..13ae732cec5 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequest.java @@ -38,6 +38,7 @@ import org.apache.ambari.server.controller.utilities.PropertyHelper; import org.apache.ambari.server.state.DesiredConfig; import org.apache.ambari.server.state.HostConfig; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.topology.Blueprint; import org.apache.ambari.server.topology.BlueprintImpl; import org.apache.ambari.server.topology.Component; @@ -50,6 +51,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.collect.ImmutableSet; + /** * Request to export a blueprint from an existing cluster. */ @@ -132,10 +135,11 @@ private void createBlueprint(Collection exportedHostGroups, S componentList.add(new Component(component)); } - hostGroups.add(new HostGroupImpl(exportedHostGroup.getName(), bpName, Collections.singleton(stack), componentList, + hostGroups.add(new HostGroupImpl(exportedHostGroup.getName(), bpName, stack, componentList, exportedHostGroup.getConfiguration(), String.valueOf(exportedHostGroup.getCardinality()))); } - blueprint = new BlueprintImpl(bpName, hostGroups, stack, configuration, null, null); + ImmutableSet stackIds = ImmutableSet.of(stack.getStackId()); + blueprint = new BlueprintImpl(bpName, hostGroups, stack, stackIds, Collections.emptySet(), configuration, null, null); } private void createHostGroupInfo(Collection exportedHostGroups) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PropertyValueTrimmingStrategyDefiner.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PropertyValueTrimmingStrategyDefiner.java index 0d9b095f38c..860a20810c5 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PropertyValueTrimmingStrategyDefiner.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/PropertyValueTrimmingStrategyDefiner.java @@ -60,7 +60,7 @@ private static TrimmingStrategy getTrimmingStrategyByPropertyName(String propert } } - public static TrimmingStrategy defineTrimmingStrategy(Stack stack, String propertyName, String configType) { + public static TrimmingStrategy defineTrimmingStrategy(StackDefinition stack, String propertyName, String configType) { TrimmingStrategy result = null; String service = stack.getServiceForConfigType(configType); if (service != null) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java index 1aadf3a7d6f..5e14445c7e5 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ServiceResourceProvider.java @@ -1102,7 +1102,7 @@ private void validateCreateRequests(Set requests, Clusters clust LOG.debug("Received a createService request, clusterId={}, serviceName={}, request={}", clusterName, serviceName, request); } - if(!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) { + if (!AuthorizationHelper.isAuthorized(ResourceType.CLUSTER, getClusterResourceId(clusterName), RoleAuthorization.SERVICE_ADD_DELETE_SERVICES)) { throw new AuthorizationException("The user is not authorized to create services"); } @@ -1121,8 +1121,8 @@ private void validateCreateRequests(Set requests, Clusters clust State state = State.valueOf(request.getDesiredState()); if (!state.isValidDesiredState() || state != State.INIT) { throw new IllegalArgumentException("Invalid desired state" - + " only INIT state allowed during creation" - + ", providedDesiredState=" + request.getDesiredState()); + + " only INIT state allowed during creation" + + ", providedDesiredState=" + request.getDesiredState()); } } @@ -1148,7 +1148,7 @@ private void validateCreateRequests(Set requests, Clusters clust if (null == desiredRepositoryVersion) { Set repoIds = new HashSet<>(); - if(desiredStackId != null) { + if (desiredStackId != null) { //Todo : How to filter out the right repoversion entity based on the stack id? List list = repositoryVersionDAO.findByStack(desiredStackId); RepositoryVersionEntity serviceRepo = list.remove(0); @@ -1169,13 +1169,14 @@ private void validateCreateRequests(Set requests, Clusters clust } LOG.info("{} was not specified; the following repository ids were found: {}", - SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID, StringUtils.join(repoIds, ',')); + SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID, StringUtils.join(repoIds, ',') + ); if (CollectionUtils.isEmpty(repoIds)) { throw new IllegalArgumentException("No repositories were found for service installation"); } else if (repoIds.size() > 1) { throw new IllegalArgumentException(String.format("%s was not specified, and the cluster " + - "contains more than one standard-type repository", SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID)); + "contains more than one standard-type repository", SERVICE_DESIRED_REPO_VERSION_ID_PROPERTY_ID)); } else { desiredRepositoryVersion = repoIds.iterator().next(); } @@ -1191,8 +1192,10 @@ private void validateCreateRequests(Set requests, Clusters clust throw new IllegalArgumentException(String.format("Could not find any repositories defined by %d", desiredRepositoryVersion)); } - StackId stackId = repositoryVersion.getStackId(); - //StackId stackId = desiredStackId; //Todo Replace after UI is ready + StackId stackId = desiredStackId; + if (stackId == null) { + stackId = repositoryVersion.getStackId(); + } request.setResolvedRepository(repositoryVersion); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java index b54dd20467a..77422eb2fa4 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/Stack.java @@ -18,47 +18,44 @@ package org.apache.ambari.server.controller.internal; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.ReadOnlyConfigurationResponse; -import org.apache.ambari.server.controller.StackConfigurationRequest; -import org.apache.ambari.server.controller.StackLevelConfigurationRequest; -import org.apache.ambari.server.controller.StackServiceComponentRequest; -import org.apache.ambari.server.controller.StackServiceComponentResponse; -import org.apache.ambari.server.controller.StackServiceRequest; -import org.apache.ambari.server.controller.StackServiceResponse; -import org.apache.ambari.server.orm.entities.StackEntity; import org.apache.ambari.server.state.AutoDeployInfo; import org.apache.ambari.server.state.ComponentInfo; +import org.apache.ambari.server.state.ConfigHelper; import org.apache.ambari.server.state.DependencyInfo; import org.apache.ambari.server.state.PropertyDependencyInfo; import org.apache.ambari.server.state.PropertyInfo; +import org.apache.ambari.server.state.ServiceInfo; +import org.apache.ambari.server.state.StackId; +import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.state.ValueAttributesInfo; import org.apache.ambari.server.topology.Cardinality; import org.apache.ambari.server.topology.Configuration; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; + /** - * Encapsulates stack information. + * Encapsulates a single, identifiable stack definition. */ -public class Stack { - /** - * Stack name - */ - private String name; +// TODO move to topology package +public class Stack implements StackDefinition { /** - * Stack version + * Stack info */ - private String version; + private final StackInfo stackInfo; /** * Map of service name to components @@ -130,53 +127,25 @@ public class Stack { private Map> excludedConfigurationTypes = new HashMap<>(); - /** - * Ambari Management Controller, used to obtain Stack definitions - */ - private final AmbariManagementController controller; + public Stack(String name, String version, AmbariManagementController ctrl) throws AmbariException { // FIXME remove or at least change to use metainfo directly + this(ctrl.getAmbariMetaInfo().getStack(name, version)); + } + public Stack(StackInfo stackInfo) { + Preconditions.checkNotNull(stackInfo); + this.stackInfo = stackInfo; - /** - * Constructor. - * - * @param stack - * the stack (not {@code null}). - * @param ambariManagementController - * the management controller (not {@code null}). - * @throws AmbariException - */ - public Stack(StackEntity stack, AmbariManagementController ambariManagementController) throws AmbariException { - this(stack.getStackName(), stack.getStackVersion(), ambariManagementController); - } + parseStackConfigurations(); - /** - * Constructor. - * - * @param name stack name - * @param version stack version - * - * @throws AmbariException an exception occurred getting stack information - * for the specified name and version - */ - //todo: don't pass management controller in constructor - public Stack(String name, String version, AmbariManagementController controller) throws AmbariException { - this.name = name; - this.version = version; - this.controller = controller; - - Set stackServices = controller.getStackServices( - Collections.singleton(new StackServiceRequest(name, version, null))); - - for (StackServiceResponse stackService : stackServices) { - String serviceName = stackService.getServiceName(); - parseComponents(serviceName); + for (ServiceInfo stackService : stackInfo.getServices()) { + parseComponents(stackService); parseExcludedConfigurations(stackService); parseConfigurations(stackService); - registerConditionalDependencies(); } - //todo: already done for each service - parseStackConfigurations(); + if (!stackInfo.getServices().isEmpty()) { + registerConditionalDependencies(); + } } /** @@ -185,7 +154,7 @@ public Stack(String name, String version, AmbariManagementController controller) * @return stack name */ public String getName() { - return name; + return stackInfo.getName(); } /** @@ -194,140 +163,107 @@ public String getName() { * @return stack version */ public String getVersion() { - return version; + return stackInfo.getVersion(); } + public StackId getStackId() { + return new StackId(getName(), getVersion()); + } Map getDependencyConditionalServiceMap() { return dependencyConditionalServiceMap; } - /** - * Get services contained in the stack. - * - * @return collection of all services for the stack - */ + @Override + public Set getStackIds() { + return ImmutableSet.of(getStackId()); + } + + @Override + public Set getStacksForService(String serviceName) { + return serviceComponents.keySet().contains(serviceName) + ? ImmutableSet.of(getStackId()) + : ImmutableSet.of(); + } + + @Override + public Set getStacksForComponent(String componentName) { + return componentService.keySet().contains(componentName) + ? ImmutableSet.of(getStackId()) + : ImmutableSet.of(); + } + + @Override + public Set getServices(StackId stackId) { + return stackId.equals(getStackId()) + ? ImmutableSet.copyOf(getServices()) + : ImmutableSet.of(); + } + + @Override public Collection getServices() { return serviceComponents.keySet(); } - /** - * Get components contained in the stack for the specified service. - * - * @param service service name - * - * @return collection of component names for the specified service - */ + @Override public Collection getComponents(String service) { return serviceComponents.get(service); } - /** - * Get all service components - * - * @return map of service to associated components - */ - public Map> getComponents() { - Map> serviceComponents = new HashMap<>(); - for (String service : getServices()) { - Collection components = new HashSet<>(); - components.addAll(getComponents(service)); - serviceComponents.put(service, components); - } - return serviceComponents; + @Override + public Collection getComponents() { + return componentService.keySet(); } - /** - * Get info for the specified component. - * - * @param component component name - * - * @return component information for the requested component - * or null if the component doesn't exist in the stack - */ + @Override public ComponentInfo getComponentInfo(String component) { - ComponentInfo componentInfo = null; String service = getServiceForComponent(component); if (service != null) { - try { - componentInfo = controller.getAmbariMetaInfo().getComponent( - getName(), getVersion(), service, component); - } catch (AmbariException e) { - // just return null if component doesn't exist + ServiceInfo serviceInfo = stackInfo.getService(service); + if (serviceInfo != null) { + return serviceInfo.getComponentByName(component); } } - return componentInfo; + return null; } - /** - * Get all configuration types, including excluded types for the specified service. - * - * @param service service name - * - * @return collection of all configuration types for the specified service - */ + @Override public Collection getAllConfigurationTypes(String service) { - return serviceConfigurations.get(service).keySet(); + Map> serviceConfigs = getServiceConfigurations(service); + return serviceConfigs != null ? serviceConfigs.keySet() : ImmutableSet.of(); } - /** - * Get configuration types for the specified service. - * This doesn't include any service excluded types. - * - * @param service service name - * - * @return collection of all configuration types for the specified service - */ + @Override public Collection getConfigurationTypes(String service) { - Set serviceTypes = new HashSet<>(serviceConfigurations.get(service).keySet()); + Set serviceTypes = new HashSet<>(getAllConfigurationTypes(service)); serviceTypes.removeAll(getExcludedConfigurationTypes(service)); - return serviceTypes; } - /** - * Get the set of excluded configuration types for this service. - * - * @param service service name - * - * @return Set of names of excluded config types. Will not return null. - */ + @Override public Set getExcludedConfigurationTypes(String service) { return excludedConfigurationTypes.containsKey(service) ? excludedConfigurationTypes.get(service) : Collections.emptySet(); } - /** - * Get config properties for the specified service and configuration type. - * - * @param service service name - * @param type configuration type - * - * @return map of property names to values for the specified service and configuration type - */ + @Override public Map getConfigurationProperties(String service, String type) { Map configMap = new HashMap<>(); - Map configProperties = serviceConfigurations.get(service).get(type); - if (configProperties != null) { - for (Map.Entry configProperty : configProperties.entrySet()) { - configMap.put(configProperty.getKey(), configProperty.getValue().getValue()); - } + Map configProperties = getConfigurationPropertiesWithMetadata(service, type); + for (Map.Entry configProperty : configProperties.entrySet()) { + configMap.put(configProperty.getKey(), configProperty.getValue().getValue()); } return configMap; } + @Override public Map getConfigurationPropertiesWithMetadata(String service, String type) { - return serviceConfigurations.get(service).get(type); + Map map = getServiceConfigurations(service).get(type); + return map != null ? ImmutableMap.copyOf(map) : ImmutableMap.of(); } - /** - * Get all required config properties for the specified service. - * - * @param service service name - * - * @return collection of all required properties for the given service - */ + @Override public Collection getRequiredConfigurationProperties(String service) { Collection requiredConfigProperties = new HashSet<>(); Map> serviceProperties = requiredServiceConfigurations.get(service); @@ -339,14 +275,7 @@ public Collection getRequiredConfigurationProperties(String serv return requiredConfigProperties; } - /** - * Get required config properties for the specified service which belong to the specified property type. - * - * @param service service name - * @param propertyType property type - * - * @return collection of required properties for the given service and property type - */ + @Override public Collection getRequiredConfigurationProperties(String service, PropertyInfo.PropertyType propertyType) { Collection matchingProperties = new HashSet<>(); Map> requiredProperties = requiredServiceConfigurations.get(service); @@ -363,15 +292,17 @@ public Collection getRequiredConfigurationProperties(String serv return matchingProperties; } + @Override public boolean isPasswordProperty(String service, String type, String propertyName) { - return (serviceConfigurations.containsKey(service) && - serviceConfigurations.get(service).containsKey(type) && - serviceConfigurations.get(service).get(type).containsKey(propertyName) && - serviceConfigurations.get(service).get(type).get(propertyName).getPropertyTypes(). + Map> serviceConfigurations = getServiceConfigurations(service); + return (serviceConfigurations.containsKey(type) && + serviceConfigurations.get(type).containsKey(propertyName) && + serviceConfigurations.get(type).get(propertyName).getPropertyTypes(). contains(PropertyInfo.PropertyType.PASSWORD)); } //todo + @Override public Map getStackConfigurationProperties(String type) { Map configMap = new HashMap<>(); Map configProperties = stackConfigurations.get(type); @@ -383,25 +314,19 @@ public Map getStackConfigurationProperties(String type) { return configMap; } + @Override public boolean isKerberosPrincipalNameProperty(String service, String type, String propertyName) { - return (serviceConfigurations.containsKey(service) && - serviceConfigurations.get(service).containsKey(type) && - serviceConfigurations.get(service).get(type).containsKey(propertyName) && - serviceConfigurations.get(service).get(type).get(propertyName).getPropertyTypes(). + Map> serviceConfigurations = getServiceConfigurations(service); + return (serviceConfigurations.containsKey(type) && + serviceConfigurations.get(type).containsKey(propertyName) && + serviceConfigurations.get(type).get(propertyName).getPropertyTypes(). contains(PropertyInfo.PropertyType.KERBEROS_PRINCIPAL)); } - /** - * Get config attributes for the specified service and configuration type. - * - * @param service service name - * @param type configuration type - * - * @return map of attribute names to map of property names to attribute values - * for the specified service and configuration type - */ + + @Override public Map> getConfigurationAttributes(String service, String type) { Map> attributesMap = new HashMap<>(); - Map configProperties = serviceConfigurations.get(service).get(type); + Map configProperties = getServiceConfigurations(service).get(type); if (configProperties != null) { for (Map.Entry configProperty : configProperties.entrySet()) { String propertyName = configProperty.getKey(); @@ -411,12 +336,9 @@ public Map> getConfigurationAttributes(String servic String attributeName = propertyAttribute.getKey(); String attributeValue = propertyAttribute.getValue(); if (attributeValue != null) { - Map attributes = attributesMap.get(attributeName); - if (attributes == null) { - attributes = new HashMap<>(); - attributesMap.put(attributeName, attributes); - } - attributes.put(propertyName, attributeValue); + attributesMap + .computeIfAbsent(attributeName, k -> new HashMap<>()) + .put(propertyName, attributeValue); } } } @@ -425,7 +347,12 @@ public Map> getConfigurationAttributes(String servic return attributesMap; } - //todo: + private Map> getServiceConfigurations(String service) { + Map> map = serviceConfigurations.get(service); + return map != null ? ImmutableMap.copyOf(map) : ImmutableMap.of(); + } + + @Override public Map> getStackConfigurationAttributes(String type) { Map> attributesMap = new HashMap<>(); Map configProperties = stackConfigurations.get(type); @@ -437,12 +364,9 @@ public Map> getStackConfigurationAttributes(String t for (Map.Entry propertyAttribute : propertyAttributes.entrySet()) { String attributeName = propertyAttribute.getKey(); String attributeValue = propertyAttribute.getValue(); - Map attributes = attributesMap.get(attributeName); - if (attributes == null) { - attributes = new HashMap<>(); - attributesMap.put(attributeName, attributes); - } - attributes.put(propertyName, attributeValue); + attributesMap + .computeIfAbsent(attributeName, k -> new HashMap<>()) + .put(propertyName, attributeValue); } } } @@ -450,24 +374,12 @@ public Map> getStackConfigurationAttributes(String t return attributesMap; } - /** - * Get the service for the specified component. - * - * @param component component name - * - * @return service name that contains tha specified component - */ + @Override public String getServiceForComponent(String component) { return componentService.get(component); } - /** - * Get the names of the services which contains the specified components. - * - * @param components collection of components - * - * @return collection of services which contain the specified components - */ + @Override public Collection getServicesForComponents(Collection components) { Set services = new HashSet<>(); for (String component : components) { @@ -477,14 +389,11 @@ public Collection getServicesForComponents(Collection components return services; } - /** - * Obtain the service name which corresponds to the specified configuration. - * - * @param config configuration type - * - * @return name of service which corresponds to the specified configuration type - */ + @Override public String getServiceForConfigType(String config) { + if (ConfigHelper.CLUSTER_ENV.equals(config)) { // for backwards compatibility + return null; + } for (Map.Entry>> entry : serviceConfigurations.entrySet()) { Map> typeMap = entry.getValue(); String serviceName = entry.getKey(); @@ -492,69 +401,53 @@ public String getServiceForConfigType(String config) { return serviceName; } } - throw new IllegalArgumentException( - "Specified configuration type is not associated with any service: " + config); + throw new IllegalArgumentException(formatMissingServiceForConfigType(config, getStackId().toString())); } - public List getServicesForConfigType(String config) { - List serviceNames = new ArrayList<>(); - for (Map.Entry>> entry : serviceConfigurations.entrySet()) { - Map> typeMap = entry.getValue(); - String serviceName = entry.getKey(); - if (typeMap.containsKey(config) && !getExcludedConfigurationTypes(serviceName).contains(config)) { - serviceNames.add(serviceName); - } - } - return serviceNames; + static String formatMissingServiceForConfigType(String config, String stackId) { + return String.format("Specified configuration type %s is not associated with any service in %s stack.", config, stackId); } - /** - * Return the dependencies specified for the given component. - * - * @param component component to get dependency information for - * - * @return collection of dependency information for the specified component - */ - //todo: full dependency graph + @Override + public Stream getServicesForConfigType(String config) { + return serviceConfigurations.entrySet().stream() + .filter(e -> e.getValue().containsKey(config)) + .filter(e -> !getExcludedConfigurationTypes(e.getKey()).contains(config)) + .map(Map.Entry::getKey); + } + + @Override public Collection getDependenciesForComponent(String component) { return dependencies.containsKey(component) ? dependencies.get(component) : Collections.emptySet(); } - /** - * Get the service, if any, that a component dependency is conditional on. - * - * @param dependency dependency to get conditional service for - * - * @return conditional service for provided component or null if dependency - * is not conditional on a service - */ + @Override public String getConditionalServiceForDependency(DependencyInfo dependency) { return dependencyConditionalServiceMap.get(dependency); } + @Override public String getExternalComponentConfig(String component) { return dbDependencyInfo.get(component); } - /** - * Obtain the required cardinality for the specified component. - */ + @Override public Cardinality getCardinality(String component) { return new Cardinality(cardinalityRequirements.get(component)); } - /** - * Obtain auto-deploy information for the specified component. - */ + @Override public AutoDeployInfo getAutoDeployInfo(String component) { return componentAutoDeployInfo.get(component); } + @Override public boolean isMasterComponent(String component) { return masterComponents.contains(component); } + @Override public Configuration getConfiguration(Collection services) { Map>> attributes = new HashMap<>(); Map> properties = new HashMap<>(); @@ -562,12 +455,9 @@ public Configuration getConfiguration(Collection services) { for (String service : services) { Collection serviceConfigTypes = getConfigurationTypes(service); for (String type : serviceConfigTypes) { - Map typeProps = properties.get(type); - if (typeProps == null) { - typeProps = new HashMap<>(); - properties.put(type, typeProps); - } - typeProps.putAll(getConfigurationProperties(service, type)); + properties + .computeIfAbsent(type, k -> new HashMap<>()) + .putAll(getConfigurationProperties(service, type)); Map> stackTypeAttributes = getConfigurationAttributes(service, type); if (!stackTypeAttributes.isEmpty()) { @@ -577,12 +467,9 @@ public Configuration getConfiguration(Collection services) { Map> typeAttributes = attributes.get(type); for (Map.Entry> attribute : stackTypeAttributes.entrySet()) { String attributeName = attribute.getKey(); - Map attributeProps = typeAttributes.get(attributeName); - if (attributeProps == null) { - attributeProps = new HashMap<>(); - typeAttributes.put(attributeName, attributeProps); - } - attributeProps.putAll(attribute.getValue()); + typeAttributes + .computeIfAbsent(attributeName, k -> new HashMap<>()) + .putAll(attribute.getValue()); } } } @@ -590,18 +477,16 @@ public Configuration getConfiguration(Collection services) { return new Configuration(properties, attributes); } - public Configuration getConfiguration() { + @Override + public Configuration getConfiguration() { // TODO get rid of duplication between this and #getConfiguration(Collection) Map>> stackAttributes = new HashMap<>(); Map> stackConfigs = new HashMap<>(); for (String service : getServices()) { for (String type : getAllConfigurationTypes(service)) { - Map typeProps = stackConfigs.get(type); - if (typeProps == null) { - typeProps = new HashMap<>(); - stackConfigs.put(type, typeProps); - } - typeProps.putAll(getConfigurationProperties(service, type)); + stackConfigs + .computeIfAbsent(type, k -> new HashMap<>()) + .putAll(getConfigurationProperties(service, type)); Map> stackTypeAttributes = getConfigurationAttributes(service, type); if (!stackTypeAttributes.isEmpty()) { @@ -611,12 +496,9 @@ public Configuration getConfiguration() { Map> typeAttrs = stackAttributes.get(type); for (Map.Entry> attribute : stackTypeAttributes.entrySet()) { String attributeName = attribute.getKey(); - Map attributes = typeAttrs.get(attributeName); - if (attributes == null) { - attributes = new HashMap<>(); - typeAttrs.put(attributeName, attributes); - } - attributes.putAll(attribute.getValue()); + typeAttrs + .computeIfAbsent(attributeName, k -> new HashMap<>()) + .putAll(attribute.getValue()); } } } @@ -626,20 +508,16 @@ public Configuration getConfiguration() { /** * Parse components for the specified service from the stack definition. - * - * @param service service name - * - * @throws AmbariException an exception occurred getting components from the stack definition */ - private void parseComponents(String service) throws AmbariException{ + private void parseComponents(ServiceInfo serviceInfo) { Collection componentSet = new HashSet<>(); - Set components = controller.getStackComponents( - Collections.singleton(new StackServiceComponentRequest(name, version, service, null))); + String service = serviceInfo.getName(); + Collection components = serviceInfo.getComponents(); // stack service components - for (StackServiceComponentResponse component : components) { - String componentName = component.getComponentName(); + for (ComponentInfo component : components) { + String componentName = component.getName(); componentSet.add(componentName); componentService.put(componentName, service); String cardinality = component.getCardinality(); @@ -652,10 +530,7 @@ private void parseComponents(String service) throws AmbariException{ } // populate component dependencies - //todo: remove usage of AmbariMetaInfo - Collection componentDependencies = controller.getAmbariMetaInfo().getComponentDependencies( - name, version, service, componentName); - + Collection componentDependencies = component.getDependencies(); if (componentDependencies != null && ! componentDependencies.isEmpty()) { dependencies.put(componentName, componentDependencies); } @@ -671,50 +546,40 @@ private void parseComponents(String service) throws AmbariException{ * Parse configurations for the specified service from the stack definition. * * @param stackService service to parse the stack configuration for - * - * @throws AmbariException an exception occurred getting configurations from the stack definition */ - private void parseConfigurations(StackServiceResponse stackService) throws AmbariException { - String service = stackService.getServiceName(); + private void parseConfigurations(ServiceInfo stackService) { + String service = stackService.getName(); Map> mapServiceConfig = new HashMap<>(); Map> mapRequiredServiceConfig = new HashMap<>(); - serviceConfigurations.put(service, mapServiceConfig); requiredServiceConfigurations.put(service, mapRequiredServiceConfig); - Set serviceConfigs = controller.getStackConfigurations( - Collections.singleton(new StackConfigurationRequest(name, version, service, null))); - Set stackLevelConfigs = controller.getStackLevelConfigurations( - Collections.singleton(new StackLevelConfigurationRequest(name, version, null))); + Collection serviceConfigs = stackService.getProperties(); + Collection stackLevelConfigs = stackInfo.getProperties(); serviceConfigs.addAll(stackLevelConfigs); // shouldn't have any required properties in stack level configuration - for (ReadOnlyConfigurationResponse config : serviceConfigs) { + for (PropertyInfo pi : serviceConfigs) { + ReadOnlyConfigurationResponse config = pi.convertToResponse(); // TODO get rid of intermediate object ConfigProperty configProperty = new ConfigProperty(config); String type = configProperty.getType(); - Map mapTypeConfig = mapServiceConfig.get(type); - if (mapTypeConfig == null) { - mapTypeConfig = new HashMap<>(); - mapServiceConfig.put(type, mapTypeConfig); - } + Map mapTypeConfig = mapServiceConfig.computeIfAbsent(type, __ -> new HashMap<>()); - mapTypeConfig.put(config.getPropertyName(), configProperty); + String name = config.getPropertyName(); + mapTypeConfig.put(name, configProperty); if (config.isRequired()) { - Map requiredTypeConfig = mapRequiredServiceConfig.get(type); - if (requiredTypeConfig == null) { - requiredTypeConfig = new HashMap<>(); - mapRequiredServiceConfig.put(type, requiredTypeConfig); - } - requiredTypeConfig.put(config.getPropertyName(), configProperty); + mapRequiredServiceConfig + .computeIfAbsent(type, __ -> new HashMap<>()) + .put(name, configProperty); } } // So far we added only config types that have properties defined // in stack service definition. Since there might be config types // with no properties defined we need to add those separately - Set configTypes = stackService.getConfigTypes().keySet(); + Set configTypes = stackService.getConfigTypeAttributes().keySet(); for (String configType: configTypes) { if (!mapServiceConfig.containsKey(configType)) { mapServiceConfig.put(configType, Collections.emptyMap()); @@ -722,23 +587,17 @@ private void parseConfigurations(StackServiceResponse stackService) throws Ambar } } - private void parseStackConfigurations () throws AmbariException { - - Set stackLevelConfigs = controller.getStackLevelConfigurations( - Collections.singleton(new StackLevelConfigurationRequest(name, version, null))); + private void parseStackConfigurations() { + Collection stackLevelConfigs = stackInfo.getProperties(); - for (ReadOnlyConfigurationResponse config : stackLevelConfigs) { + for (PropertyInfo pi : stackLevelConfigs) { + ReadOnlyConfigurationResponse config = pi.convertToResponse(); // TODO get rid of intermediate object ConfigProperty configProperty = new ConfigProperty(config); String type = configProperty.getType(); - Map mapTypeConfig = stackConfigurations.get(type); - if (mapTypeConfig == null) { - mapTypeConfig = new HashMap<>(); - stackConfigurations.put(type, mapTypeConfig); - } - - mapTypeConfig.put(config.getPropertyName(), - configProperty); + stackConfigurations + .computeIfAbsent(type, __ -> new HashMap<>()) + .put(config.getPropertyName(), configProperty); } } @@ -747,8 +606,8 @@ private void parseStackConfigurations () throws AmbariException { * * @param stackServiceResponse the response object associated with this stack service */ - private void parseExcludedConfigurations(StackServiceResponse stackServiceResponse) { - excludedConfigurationTypes.put(stackServiceResponse.getServiceName(), stackServiceResponse.getExcludedConfigTypes()); + private void parseExcludedConfigurations(ServiceInfo stackServiceResponse) { + excludedConfigurationTypes.put(stackServiceResponse.getName(), stackServiceResponse.getExcludedConfigTypes()); } /** diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinition.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinition.java new file mode 100644 index 00000000000..25c4ede30f6 --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/StackDefinition.java @@ -0,0 +1,278 @@ +package org.apache.ambari.server.controller.internal; + +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; + +import org.apache.ambari.server.state.AutoDeployInfo; +import org.apache.ambari.server.state.ComponentInfo; +import org.apache.ambari.server.state.DependencyInfo; +import org.apache.ambari.server.state.PropertyInfo; +import org.apache.ambari.server.state.StackId; +import org.apache.ambari.server.topology.Cardinality; +import org.apache.ambari.server.topology.Configuration; + +/** + * Encapsulates stack information. + */ +// TODO move to topology package +public interface StackDefinition { + + /** + * @return the IDs for the set of stacks that this stacks is (possibly) composed of. + */ + Set getStackIds(); + + /** + * @return the IDs of the set of stacks that the given service is defined in + */ + Set getStacksForService(String serviceName); + + /** + * @return the IDs of the set of stacks that the given component is defined in + */ + Set getStacksForComponent(String componentName); + + /** + * @return the names of services defined the given stack + */ + Set getServices(StackId stackId); + + /** + * Get services contained in the stack. + * + * @return collection of all services for the stack + */ + Collection getServices(); + + /** + * Get components contained in the stack for the specified service. + * + * @param service service name + * + * @return collection of component names for the specified service + */ + Collection getComponents(String service); + + /** + * Get all service components + * + * @return collection of all components for the stack + */ + Collection getComponents(); + + /** + * Get info for the specified component. + * + * @param component component name + * + * @return component information for the requested component + * or null if the component doesn't exist in the stack + */ + ComponentInfo getComponentInfo(String component); + + /** + * Get all configuration types, including excluded types for the specified service. + * + * @param service service name + * + * @return collection of all configuration types for the specified service + */ + Collection getAllConfigurationTypes(String service); + + /** + * Get configuration types for the specified service. + * This doesn't include any service excluded types. + * + * @param service service name + * @return collection of all configuration types for the specified service + */ + Collection getConfigurationTypes(String service); + + /** + * Get the set of excluded configuration types for this service. + * + * @param service service name + * @return Set of names of excluded config types. Will not return null. + */ + Set getExcludedConfigurationTypes(String service); + + /** + * Get config properties for the specified service and configuration type. + * + * @param service service name + * @param type configuration type + * @return map of property names to values for the specified service and configuration type + */ + Map getConfigurationProperties(String service, String type); + + /** + * Get config properties with metadata attributes for the specified service and configuration type. + * + * @param service service name + * @param type configuration type + * @return map of property names to properties for the specified service and configuration type + */ + Map getConfigurationPropertiesWithMetadata(String service, String type); + + /** + * Get all required config properties for the specified service. + * + * @param service service name + * @return collection of all required properties for the given service + */ + Collection getRequiredConfigurationProperties(String service); + + /** + * Get required config properties for the specified service which belong to the specified property type. + * + * @param service service name + * @param propertyType property type + * + * @return collection of required properties for the given service and property type + */ + Collection getRequiredConfigurationProperties(String service, PropertyInfo.PropertyType propertyType); + + /** + * @return true if the given property for the specified service and config type is a password-type property + * @see org.apache.ambari.server.state.PropertyInfo.PropertyType#PASSWORD + */ + boolean isPasswordProperty(String service, String type, String propertyName); + + /** + * @return map of stack-level property names to properties for the specified configuration type + */ + Map getStackConfigurationProperties(String type); + + /** + * @return true if the given property for the specified service and config type is a Kerberos principal-type property + * @see org.apache.ambari.server.state.PropertyInfo.PropertyType#KERBEROS_PRINCIPAL + */ + boolean isKerberosPrincipalNameProperty(String service, String type, String propertyName); + + /** + * Get config attributes for the specified service and configuration type. + * + * @param service service name + * @param type configuration type + * + * @return map of attribute names to map of property names to attribute values + * for the specified service and configuration type + */ + Map> getConfigurationAttributes(String service, String type); + + /** + * Get stack-level config attributes for the specified configuration type. + * + * @param type configuration type + * + * @return map of attribute names to map of property names to attribute values + * for the specified configuration type + */ + Map> getStackConfigurationAttributes(String type); + + /** + * Get the service for the specified component. + * + * @param component component name + * + * @return service name that contains tha specified component + */ + String getServiceForComponent(String component); + + /** + * Get the names of the services which contains the specified components. + * + * @param components collection of components + * + * @return collection of services which contain the specified components + */ + Collection getServicesForComponents(Collection components); + + /** + * Obtain the service name which corresponds to the specified configuration. + * + * @param config configuration type + * + * @return name of service which corresponds to the specified configuration type + */ + String getServiceForConfigType(String config); + + /** + * @return stream of service names which correspond to the specified configuration type name + */ + Stream getServicesForConfigType(String config); + + /** + * Return the dependencies specified for the given component. + * + * @param component component to get dependency information for + * + * @return collection of dependency information for the specified component + */ + //todo: full dependency graph + Collection getDependenciesForComponent(String component); + + /** + * Get the service, if any, that a component dependency is conditional on. + * + * @param dependency dependency to get conditional service for + * + * @return conditional service for provided component or null if dependency + * is not conditional on a service + */ + String getConditionalServiceForDependency(DependencyInfo dependency); + + /** + * Get the custom "descriptor" that is used to decide whether component + * is a managed or non-managed dependency. The descriptor is formatted as: + * "config_type/property_name". Currently it is only used for Hive Metastore's + * database. + * + * @param component component to get dependency information for + * @return the descriptor of form "config_type/property_name" + * @see org.apache.ambari.server.topology.BlueprintValidatorImpl#isDependencyManaged + */ + String getExternalComponentConfig(String component); + + /** + * Obtain the required cardinality for the specified component. + */ + Cardinality getCardinality(String component); + + /** + * Obtain auto-deploy information for the specified component. + */ + AutoDeployInfo getAutoDeployInfo(String component); + + /** + * @return true if the given component is a master component + */ + boolean isMasterComponent(String component); + + /** + * @return subset of the stack's configuration for the given services + */ + Configuration getConfiguration(Collection services); + + /** + * @return the stack's configuration + */ + Configuration getConfiguration(); + + /** + * Create a stack definition for one or more stacks. + * When given multiple stacks, it returns a composite stack, + * while a single stack is returned as is. + * + * @param stacks the stack(s) to combine + * @return composite or single stack + */ + static StackDefinition of(Set stacks) { + return stacks.size() > 1 + ? new CompositeStack(stacks) + : stacks.stream().findAny().orElse(null); + } + +} diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UnitUpdater.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UnitUpdater.java index 8b7cb67f97e..bb02b44febc 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UnitUpdater.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/UnitUpdater.java @@ -69,18 +69,18 @@ public static class PropertyUnit { private static final String DEFAULT_UNIT = "m"; private final String unit; - public static PropertyUnit of(Stack stack, UnitValidatedProperty property) { + public static PropertyUnit of(StackDefinition stack, UnitValidatedProperty property) { return PropertyUnit.of(stack, property.getServiceName(), property.getConfigType(), property.getPropertyName()); } - public static PropertyUnit of(Stack stack, String serviceName, String configType, String propertyName) { + public static PropertyUnit of(StackDefinition stack, String serviceName, String configType, String propertyName) { return new PropertyUnit( stackUnit(stack, serviceName, configType, propertyName) .map(PropertyUnit::toJvmUnit) .orElse(DEFAULT_UNIT)); } - private static Optional stackUnit(Stack stack, String serviceName, String configType, String propertyName) { + private static Optional stackUnit(StackDefinition stack, String serviceName, String configType, String propertyName) { try { return Optional.ofNullable( stack.getConfigurationPropertiesWithMetadata(serviceName, configType) diff --git a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java index 20edcfd3fd6..afa5a80afc8 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/metadata/RoleCommandOrder.java @@ -17,6 +17,8 @@ */ package org.apache.ambari.server.metadata; +import static java.util.stream.Collectors.toSet; + import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -30,8 +32,10 @@ import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.stageplanner.RoleGraphNode; import org.apache.ambari.server.state.Cluster; +import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.ServiceComponent; +import org.apache.ambari.server.state.ServiceInfo; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.StackInfo; import org.slf4j.Logger; @@ -137,6 +141,16 @@ public void initialize(Cluster cluster, LinkedHashSet sectionKeys) { for (Service service : cluster.getServices().values()) { stackIds.add(service.getDesiredStackId()); } + Set components = cluster.getServices().values().stream() + .flatMap(s -> s.getServiceComponents().values().stream()) + .map(ServiceComponent::getName) + .collect(toSet()); + // FIXME ugly workaround + StackInfo hdp = ambariMetaInfo.getStacks().stream() + .filter(s -> "HDP".equals(s.getName())) + .filter(s -> s.getVersion().startsWith("3.0.0")) + .findAny() + .orElse(null); for (StackId stackId : stackIds) { StackInfo stack = null; @@ -158,6 +172,26 @@ public void initialize(Cluster cluster, LinkedHashSet sectionKeys) { addDependencies(section); } + + // FIXME ugly workaround to make sure hdp-select can be installed in before-INSTALL hook + if (hdp != null && !"HDP".equals(stackId.getStackName())) { + for (ServiceInfo blockedService : stack.getServices()) { + for (ComponentInfo blockedComponent : blockedService.getComponents()) { + if (components.contains(blockedComponent.getName())) { + Role blockedRole = Role.valueOf(blockedComponent.getName()); + for (ServiceInfo blockingService : hdp.getServices()) { + for (ComponentInfo blockingComponent : blockingService.getComponents()) { + if (components.contains(blockedComponent.getName())) { + Role blockerRole = Role.valueOf(blockingComponent.getName()); + addDependency(blockedRole, RoleCommand.INSTALL, blockerRole, RoleCommand.INSTALL, false); + LOG.info("Added FIXME dependency {} -> {}", blockedRole, blockerRole); + } + } + } + } + } + } + } } extendTransitiveDependency(); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintMpackInstanceEntity.java b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintMpackInstanceEntity.java index fb9ed7cfe9a..1d61523a37a 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintMpackInstanceEntity.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/orm/entities/BlueprintMpackInstanceEntity.java @@ -52,10 +52,10 @@ public class BlueprintMpackInstanceEntity { @Column(name = "id", nullable = false, updatable = false) private Long id; - @Column(name = "mpack_name") + @Column(name = "mpack_name", nullable = false) private String mpackName; - @Column(name = "mpack_version") + @Column(name = "mpack_version", nullable = false) private String mpackVersion; @Column(name = "mpack_uri") @@ -68,7 +68,7 @@ public class BlueprintMpackInstanceEntity { private Collection configurations = new ArrayList<>(); @ManyToOne - @JoinColumn(name = "mpack_id", referencedColumnName = "id", nullable = true) + @JoinColumn(name = "mpack_id", referencedColumnName = "id") private MpackEntity mpackEntity; @ManyToOne diff --git a/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java b/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java index 47000db783b..18f4e5c8ae4 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/stack/NoSuchStackException.java @@ -18,10 +18,15 @@ package org.apache.ambari.server.stack; +import org.apache.ambari.server.state.StackId; + /** - * Indicates that the requested Stack doesn't esist. + * Indicates that the requested Stack doesn't exist. */ -public class NoSuchStackException extends Exception { +public class NoSuchStackException extends IllegalArgumentException { + public NoSuchStackException(StackId stackId) { + this(stackId.getStackName(), stackId.getStackVersion()); + } public NoSuchStackException(String stackName, String stackVersion) { super(String.format("The requested stack doesn't exist. Name='%s' Version='%s'", stackName, stackVersion)); } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java index 37840f6dfe3..f4058a1e79d 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/AmbariContext.java @@ -18,6 +18,7 @@ package org.apache.ambari.server.topology; +import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toSet; import java.util.Collection; @@ -67,7 +68,7 @@ import org.apache.ambari.server.controller.internal.ServiceGroupDependencyResourceProvider; import org.apache.ambari.server.controller.internal.ServiceGroupResourceProvider; import org.apache.ambari.server.controller.internal.ServiceResourceProvider; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.controller.internal.VersionDefinitionResourceProvider; import org.apache.ambari.server.controller.predicate.EqualsPredicate; import org.apache.ambari.server.controller.spi.ClusterController; @@ -91,12 +92,11 @@ import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.configgroup.ConfigGroup; import org.apache.ambari.server.utils.RetryHelper; -import org.apache.commons.lang.StringUtils; +import org.apache.directory.api.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Function; -import com.google.common.collect.Collections2; +import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.util.concurrent.Striped; @@ -213,97 +213,114 @@ public Collection getPhysicalTasks(Collection ids) { public void createAmbariResources(ClusterTopology topology, String clusterName, SecurityType securityType, String repoVersionString, Long repoVersionId) { - Stack stack = topology.getBlueprint().getStack(); - StackId stackId = new StackId(stack.getName(), stack.getVersion()); + Map repoVersionByStack = new HashMap<>(); - RepositoryVersionEntity repoVersion = null; - if (StringUtils.isEmpty(repoVersionString) && null == repoVersionId) { - List stackRepoVersions = repositoryVersionDAO.findByStack(stackId); + Set stackIds = topology.getBlueprint().getStackIds(); + for (StackId stackId : stackIds) { + RepositoryVersionEntity repoVersion = null; + if (stackIds.size() == 1) { + repoVersion = findSpecifiedRepo(repoVersionString, repoVersionId, stackId); + } + if (null == repoVersion) { + repoVersion = findRepoForStack(stackId); + } + Preconditions.checkNotNull(repoVersion); + // only use a STANDARD repo when creating a new cluster + if (repoVersion.getType() != RepositoryType.STANDARD) { + throw new IllegalArgumentException(String.format( + "Unable to create a cluster using the following repository since it is not a STANDARD type: %s", + repoVersion + )); + } + } - if (stackRepoVersions.isEmpty()) { - // !!! no repos, try to get the version for the stack - VersionDefinitionResourceProvider vdfProvider = getVersionDefinitionResourceProvider(); + StackId stackId = Iterables.getFirst(topology.getBlueprint().getStackIds(), null); + createAmbariClusterResource(clusterName, stackId, securityType); + createAmbariServiceAndComponentResources(topology, clusterName, repoVersionByStack); + } - Map properties = new HashMap<>(); - properties.put(VersionDefinitionResourceProvider.VERSION_DEF_AVAILABLE_DEFINITION, stackId.toString()); + private RepositoryVersionEntity findRepoForStack(StackId stackId) { + RepositoryVersionEntity repoVersion; + List stackRepoVersions = repositoryVersionDAO.findByStack(stackId); + if (stackRepoVersions.isEmpty()) { + // !!! no repos, try to get the version for the stack + VersionDefinitionResourceProvider vdfProvider = getVersionDefinitionResourceProvider(); - Request request = new RequestImpl(Collections.emptySet(), - Collections.singleton(properties), Collections.emptyMap(), null); + Map properties = new HashMap<>(); + properties.put(VersionDefinitionResourceProvider.VERSION_DEF_AVAILABLE_DEFINITION, stackId.toString()); - Long defaultRepoVersionId = null; + Request request = new RequestImpl(Collections.emptySet(), + Collections.singleton(properties), Collections.emptyMap(), null + ); - try { - RequestStatus requestStatus = vdfProvider.createResources(request); - if (!requestStatus.getAssociatedResources().isEmpty()) { - Resource resource = requestStatus.getAssociatedResources().iterator().next(); - defaultRepoVersionId = (Long) resource.getPropertyValue(VersionDefinitionResourceProvider.VERSION_DEF_ID); - } - } catch (Exception e) { - throw new IllegalArgumentException(String.format( - "Failed to create a default repository version definition for stack %s. " - + "This typically is a result of not loading the stack correctly or being able " - + "to load information about released versions. Create a repository version " - + " and try again.", stackId), e); - } + Long defaultRepoVersionId = null; - repoVersion = repositoryVersionDAO.findByPK(defaultRepoVersionId); - // !!! better not! - if (null == repoVersion) { - throw new IllegalArgumentException(String.format( - "Failed to load the default repository version definition for stack %s. " - + "Check for a valid repository version and try again.", stackId)); + try { + RequestStatus requestStatus = vdfProvider.createResources(request); + if (!requestStatus.getAssociatedResources().isEmpty()) { + Resource resource = requestStatus.getAssociatedResources().iterator().next(); + defaultRepoVersionId = (Long) resource.getPropertyValue(VersionDefinitionResourceProvider.VERSION_DEF_ID); } + } catch (Exception e) { + throw new IllegalArgumentException(String.format( + "Failed to create a default repository version definition for stack %s. " + + "This typically is a result of not loading the stack correctly or being able " + + "to load information about released versions. Create a repository version " + + " and try again.", stackId), e); + } - } else if (stackRepoVersions.size() > 1) { + repoVersion = repositoryVersionDAO.findByPK(defaultRepoVersionId); + // !!! better not! + if (null == repoVersion) { + throw new IllegalArgumentException(String.format( + "Failed to load the default repository version definition for stack %s. " + + "Check for a valid repository version and try again.", stackId)); + } - Function function = new Function() { - @Override - public String apply(RepositoryVersionEntity input) { - return input.getVersion(); - } - }; + } else if (stackRepoVersions.size() > 1) { + String versions = stackRepoVersions.stream() + .map(RepositoryVersionEntity::getVersion) + .collect(joining(", ")); - Collection versions = Collections2.transform(stackRepoVersions, function); + throw new IllegalArgumentException(String.format( + "Several repositories were found for %s: %s. Specify the version with '%s'", + stackId, versions, ProvisionClusterRequest.REPO_VERSION_PROPERTY + )); + } else { + repoVersion = stackRepoVersions.get(0); + LOG.info("Found single matching repository version {} for stack {}", repoVersion.getVersion(), stackId); + } + return repoVersion; + } - throw new IllegalArgumentException(String.format("Several repositories were found for %s: %s. Specify the version" - + " with '%s'", stackId, StringUtils.join(versions, ", "), ProvisionClusterRequest.REPO_VERSION_PROPERTY)); - } else { - repoVersion = stackRepoVersions.get(0); - LOG.warn("Cluster is being provisioned using the single matching repository version {}", repoVersion.getVersion()); - } - } else if (null != repoVersionId){ + private RepositoryVersionEntity findSpecifiedRepo(String repoVersionString, Long repoVersionId, StackId stackId) { + RepositoryVersionEntity repoVersion = null; + if (null != repoVersionId) { repoVersion = repositoryVersionDAO.findByPK(repoVersionId); if (null == repoVersion) { throw new IllegalArgumentException(String.format( "Could not identify repository version with repository version id %s for installing services. " + "Specify a valid repository version id with '%s'", - repoVersionId, ProvisionClusterRequest.REPO_VERSION_ID_PROPERTY)); + repoVersionId, ProvisionClusterRequest.REPO_VERSION_ID_PROPERTY + )); } - } else { + } else if (Strings.isNotEmpty(repoVersionString)) { repoVersion = repositoryVersionDAO.findByStackAndVersion(stackId, repoVersionString); if (null == repoVersion) { throw new IllegalArgumentException(String.format( "Could not identify repository version with stack %s and version %s for installing services. " + "Specify a valid version with '%s'", - stackId, repoVersionString, ProvisionClusterRequest.REPO_VERSION_PROPERTY)); + stackId, repoVersionString, ProvisionClusterRequest.REPO_VERSION_PROPERTY + )); } } - - // only use a STANDARD repo when creating a new cluster - if (repoVersion.getType() != RepositoryType.STANDARD) { - throw new IllegalArgumentException(String.format( - "Unable to create a cluster using the following repository since it is not a STANDARD type: %s", - repoVersion)); - } - - createAmbariClusterResource(clusterName, stack.getName(), stack.getVersion(), securityType); - createAmbariServiceAndComponentResources(topology, clusterName, stackId, repoVersion.getId()); + return repoVersion; } - public void createAmbariClusterResource(String clusterName, String stackName, String stackVersion, SecurityType securityType) { - String stackInfo = String.format("%s-%s", stackName, stackVersion); + public void createAmbariClusterResource(String clusterName, StackId stackId, SecurityType securityType) { + String stackInfo = stackId.toString(); final ClusterRequest clusterRequest = new ClusterRequest(null, clusterName, null, securityType, stackInfo, null); try { @@ -325,9 +342,7 @@ public Object call() throws Exception { } } - public void createAmbariServiceAndComponentResources(ClusterTopology topology, String clusterName, - StackId stackId, Long repositoryVersionId) { - + public void createAmbariServiceAndComponentResources(ClusterTopology topology, String clusterName, Map repoVersionByStack) { Set serviceGroups = Sets.newHashSet(DEFAULT_SERVICE_GROUP_NAME); Collection services = topology.getBlueprint().getServices(); @@ -347,8 +362,11 @@ public void createAmbariServiceAndComponentResources(ClusterTopology topology, S Set componentRequests = new HashSet<>(); for (String service : services) { String credentialStoreEnabled = topology.getBlueprint().getCredentialStoreEnabled(service); + StackId stackId = Iterables.getOnlyElement(topology.getBlueprint().getStackIdsForService(service)); // FIXME temporarily assume each service is defined in only one mpack + Long repositoryVersionId = repoVersionByStack.get(stackId); serviceRequests.add(new ServiceRequest(clusterName, DEFAULT_SERVICE_GROUP_NAME, service, service, - repositoryVersionId, null, credentialStoreEnabled, null)); + repositoryVersionId, null, credentialStoreEnabled, stackId + )); for (String component : topology.getBlueprint().getComponentNames(service)) { String recoveryEnabled = topology.getBlueprint().getRecoveryEnabled(service, component); @@ -522,11 +540,10 @@ public RequestStatusResponse startHost(String hostName, String clusterName, Coll * installed and started and that the monitoring screen for the cluster should be displayed to the user. * * @param clusterName cluster name - * @param stackName stack name - * @param stackVersion stack version + * @param stackId stack ID */ - public void persistInstallStateForUI(String clusterName, String stackName, String stackVersion) { - String stackInfo = String.format("%s-%s", stackName, stackVersion); + public void persistInstallStateForUI(String clusterName, StackId stackId) { + String stackInfo = stackId.toString(); final ClusterRequest clusterRequest = new ClusterRequest(null, clusterName, "INSTALLED", null, stackInfo, null); try { @@ -743,7 +760,7 @@ private boolean addHostToExistingConfigGroups(String hostName, ClusterTopology t */ private void createConfigGroupsAndRegisterHost(ClusterTopology topology, String groupName) throws AmbariException { Map> groupConfigs = new HashMap<>(); - Stack stack = topology.getBlueprint().getStack(); + StackDefinition stack = topology.getBlueprint().getStack(); // get the host-group config with cluster creation template overrides Configuration topologyHostGroupConfig = topology. @@ -756,11 +773,12 @@ private void createConfigGroupsAndRegisterHost(ClusterTopology topology, String // iterate over topo host group configs which were defined in for (Map.Entry> entry : userProvidedGroupProperties.entrySet()) { String type = entry.getKey(); - List services = stack.getServicesForConfigType(type); - String service = services.stream() + String service = stack.getServicesForConfigType(type) .filter(each -> topology.getBlueprint().getServices().contains(each)) .findFirst() - .orElseThrow(() -> new IllegalArgumentException("Specified configuration type is not associated with any service: " + type)); + // TODO check if this is required at all (might be handled by the "orphan" removal) + // TODO move this validation earlier + .orElseThrow(() -> new IllegalArgumentException("Specified configuration type is not associated with any service in the blueprint: " + type)); Config config = configFactory.createReadOnly(type, groupName, entry.getValue(), null); //todo: attributes diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java index 64d2ae34dda..04e004e5c55 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Blueprint.java @@ -21,9 +21,11 @@ import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.Set; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.orm.entities.BlueprintEntity; +import org.apache.ambari.server.state.StackId; /** * Blueprint representation. @@ -39,15 +41,15 @@ public interface Blueprint { /** * Get the hot groups contained in the blueprint. + * * @return map of host group name to host group */ Map getHostGroups(); /** - * Get a hostgroup specified by name. + * Get a host group specified by name. * * @param name name of the host group to get - * * @return the host group with the given name or null */ HostGroup getHostGroup(String name); @@ -124,19 +126,26 @@ public interface Blueprint { /** * Get the stack associated with the blueprint. + * For mpack-based installation this is a composite stack + * that provides a single unified view of all underlying mpacks, + * but does not have any identifier. * * @return associated stack */ - @Deprecated - Stack getStack(); + StackDefinition getStack(); /** - * Get the stacks associated with the blueprint. - * - * @return associated stacks + * @return the set of stack (mpack) IDs associated with the blueprint */ - Collection getStacks(); + Set getStackIds(); + /** + * Look up the stacks that define service. + * To be used only after checking that services map to + * @param service the name of the service as defined in the stack (mpack), eg. ZOOKEEPER + * @return the ID of the stack that defines the given service + */ + Set getStackIdsForService(String service); /** * Get the mpacks associated with the blueprint. @@ -167,24 +176,7 @@ public interface Blueprint { SecurityConfiguration getSecurity(); /** - * Validate the blueprint topology. - * - * @throws InvalidTopologyException if the topology is invalid - */ - void validateTopology() throws InvalidTopologyException; - - /** - * Validate that the blueprint contains all of the required properties. - * - * @throws InvalidTopologyException if the blueprint doesn't contain all required properties - */ - void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException; - - /** - * * A config type is valid if there are services related to except cluster-env and global. - * @param configType - * @return */ boolean isValidConfigType(String configType); @@ -197,14 +189,4 @@ public interface Blueprint { List getRepositorySettings(); - /** - * @return a boolean indicating if all mpack referenced by the blueprints are resolved (installed on the system) - */ - boolean isAllMpacksResolved(); - - /** - * @return a collection of the names unresolved mpacks in {name}-{version} format. - */ - Collection getUnresolvedMpackNames(); - } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java index 48f41a62d31..4386ec6919d 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintFactory.java @@ -10,8 +10,7 @@ * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distribut - * ed on an "AS IS" BASIS, + * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. @@ -19,6 +18,24 @@ package org.apache.ambari.server.topology; +import static java.util.stream.Collectors.joining; +import static java.util.stream.Collectors.toCollection; +import static java.util.stream.Collectors.toSet; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BLUEPRINT_NAME_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.COMPONENT_MPACK_INSTANCE_PROPERTY; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.COMPONENT_NAME_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.COMPONENT_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.COMPONENT_PROVISION_ACTION_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.COMPONENT_SERVICE_INSTANCE_PROPERTY; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.CONFIGURATION_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.HOST_GROUP_CARDINALITY_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.HOST_GROUP_NAME_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.HOST_GROUP_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.MPACK_INSTANCES_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.SETTING_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.STACK_NAME_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.STACK_VERSION_PROPERTY_ID; + import java.io.IOException; import java.util.ArrayList; import java.util.Collection; @@ -27,6 +44,9 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.TreeSet; +import java.util.function.Function; +import java.util.stream.Stream; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.ObjectNotFoundException; @@ -35,16 +55,19 @@ import org.apache.ambari.server.controller.RootComponent; import org.apache.ambari.server.controller.internal.ProvisionAction; import org.apache.ambari.server.controller.internal.Stack; -import org.apache.ambari.server.controller.utilities.PropertyHelper; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.orm.dao.BlueprintDAO; import org.apache.ambari.server.orm.entities.BlueprintEntity; import org.apache.ambari.server.stack.NoSuchStackException; +import org.apache.ambari.server.state.StackId; +import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Joiner; import com.google.inject.Inject; /** @@ -54,38 +77,9 @@ public class BlueprintFactory { private static final Logger LOG = LoggerFactory.getLogger(BlueprintFactory.class); - // Blueprints - protected static final String BLUEPRINT_NAME_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "blueprint_name"); - protected static final String STACK_NAME_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "stack_name"); - protected static final String STACK_VERSION_PROPERTY_ID = - PropertyHelper.getPropertyId("Blueprints", "stack_version"); - - // Host Groups - protected static final String HOST_GROUP_PROPERTY_ID = "host_groups"; - protected static final String HOST_GROUP_NAME_PROPERTY_ID = "name"; - protected static final String HOST_GROUP_CARDINALITY_PROPERTY_ID = "cardinality"; - - // Host Group Components - protected static final String COMPONENT_PROPERTY_ID ="components"; - protected static final String COMPONENT_NAME_PROPERTY_ID ="name"; - protected static final String COMPONENT_PROVISION_ACTION_PROPERTY_ID = "provision_action"; - - // Configurations - protected static final String CONFIGURATION_PROPERTY_ID = "configurations"; - protected static final String PROPERTIES_PROPERTY_ID = "properties"; - protected static final String PROPERTIES_ATTRIBUTES_PROPERTY_ID = "properties_attributes"; - - protected static final String SETTINGS_PROPERTY_ID = "settings"; - - protected static final String MPACK_INSTANCES_PROPERTY = "mpack_instances"; - protected static final String MPACK_INSTANCE_PROPERTY = "mpack_instance"; - protected static final String SERVICE_INSTANCE_PROPERTY = "service_instance"; - private static BlueprintDAO blueprintDAO; - private ConfigurationFactory configFactory = new ConfigurationFactory(); + private final ConfigurationFactory configFactory = new ConfigurationFactory(); private final StackFactory stackFactory; public BlueprintFactory() { @@ -98,8 +92,14 @@ protected BlueprintFactory(StackFactory stackFactory) { public Blueprint getBlueprint(String blueprintName) throws NoSuchStackException { BlueprintEntity entity = blueprintDAO.findByName(blueprintName); - //todo: just return null? - return entity == null ? null : new BlueprintImpl(entity); + if (entity != null) { + Set stackIds = entity.getMpackInstances().stream() + .map(m -> new StackId(m.getMpackName(), m.getMpackVersion())) + .collect(toSet()); + StackDefinition stack = composeStacks(stackIds); + return new BlueprintImpl(entity, stack, stackIds); + } + return null; } /** @@ -118,42 +118,31 @@ public Blueprint createBlueprint(Map properties, SecurityConfigu throw new IllegalArgumentException("Blueprint name must be provided"); } - Stack stack; Collection mpackInstances = createMpackInstances(properties); if (mpackInstances.isEmpty()) { - stack = createStack(properties); + StackId stackId = getStackId(properties); + mpackInstances = Collections.singleton(new MpackInstance(stackId.getStackName(), stackId.getStackVersion(), null, null, Configuration.createEmpty())); } - else { - stack = mpackInstances.iterator().next().getStack(); - } - + Set stackIds = mpackInstances.stream() + .map(m -> new StackId(m.getMpackName(), m.getMpackVersion())) + .collect(toSet()); + StackDefinition stack = composeStacks(stackIds); Collection hostGroups = processHostGroups(name, stack, properties); Configuration configuration = configFactory.getConfiguration((Collection>) properties.get(CONFIGURATION_PROPERTY_ID)); - Setting setting = SettingFactory.getSetting((Collection>) properties.get(SETTINGS_PROPERTY_ID)); + Setting setting = SettingFactory.getSetting((Collection>) properties.get(SETTING_PROPERTY_ID)); - if (!mpackInstances.isEmpty()) { - return new BlueprintImpl(name, hostGroups, mpackInstances, configuration, securityConfiguration, setting); - } - else { - // Legacy constructor for old blueprints without mpacks - return new BlueprintImpl(name, hostGroups, stack, configuration, securityConfiguration, setting); - } + return new BlueprintImpl(name, hostGroups, stack, stackIds, mpackInstances, configuration, securityConfiguration, setting); } private Collection createMpackInstances(Map properties) throws NoSuchStackException { - if (properties.containsKey(MPACK_INSTANCES_PROPERTY)) { + if (properties.containsKey(MPACK_INSTANCES_PROPERTY_ID)) { ObjectMapper mapper = new ObjectMapper(); mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); try { - String mpackInstancesJson = mapper.writeValueAsString(properties.get(MPACK_INSTANCES_PROPERTY)); - Collection mpacks = mapper.readValue(mpackInstancesJson, new TypeReference>(){}); - for (MpackInstance mpack: mpacks) { - tryResolveStack(mpack); - } - return mpacks; - } - catch (IOException ex) { + String mpackInstancesJson = mapper.writeValueAsString(properties.get(MPACK_INSTANCES_PROPERTY_ID)); + return mapper.readValue(mpackInstancesJson, new TypeReference>(){}); + } catch (IOException ex) { throw new RuntimeException("Unable to parse mpack instances for blueprint: " + String.valueOf(properties.get(BLUEPRINT_NAME_PROPERTY_ID)), ex); } @@ -162,39 +151,64 @@ private Collection createMpackInstances(Map prope } } - protected void tryResolveStack(MpackInstance mpack) { - try { - Stack stack = loadStack(mpack.getMpackName(), mpack.getMpackVersion()); - mpack.setStack(stack); - } - catch (NoSuchStackException ex) { - // This case can be normal if a blueprint had been sent in before the referenced mpack was installed - LOG.warn("Cannot resolve stack for mpack {}-{}. Is mpack installed?", mpack.getMpackName(), mpack.getMpackVersion()); - } - } - - protected Stack createStack(Map properties) throws NoSuchStackException { + private static StackId getStackId(Map properties) throws NoSuchStackException { String stackName = String.valueOf(properties.get(STACK_NAME_PROPERTY_ID)); String stackVersion = String.valueOf(properties.get(STACK_VERSION_PROPERTY_ID)); - return loadStack(stackName, stackVersion); + return new StackId(stackName, stackVersion); + } + + private StackDefinition composeStacks(Set stackIds) { + Set stacks = stackIds.stream() + .map(this::createStack) + .collect(toSet()); + StackDefinition composite = StackDefinition.of(stacks); + + // temporary check + verifyStackDefinitionsAreDisjoint(composite.getServices().stream(), "Service", composite::getStacksForService); + verifyStackDefinitionsAreDisjoint(composite.getComponents().stream(), "Component", composite::getStacksForComponent); + + return composite; } - protected Stack loadStack(String stackName, String stackVersion) throws NoSuchStackException { + /** + * Verify that each item in items is defined by only one stack. + * + * @param items the items to check + * @param type string description of the type of items (eg. "Service", or "Component") + * @param lookup a function to find the set of stacks that an item belongs to + * @throws IllegalArgumentException if some items are defined in multiple stacks + */ + static void verifyStackDefinitionsAreDisjoint(Stream items, String type, Function> lookup) { + Set>> definedInMultipleStacks = items + .map(s -> Pair.of(s, lookup.apply(s))) + .filter(p -> p.getRight().size() > 1) + .collect(toCollection(TreeSet::new)); + + if (!definedInMultipleStacks.isEmpty()) { + String msg = definedInMultipleStacks.stream() + .map(p -> String.format("%s %s is defined in multiple stacks: %s", type, p.getLeft(), Joiner.on(", ").join(p.getRight()))) + .collect(joining("\n")); + LOG.error(msg); + throw new IllegalArgumentException(msg); + } + } + + protected Stack createStack(StackId stackId) { try { //todo: don't pass in controller - return stackFactory.createStack(stackName, stackVersion, AmbariServer.getController()); + return stackFactory.createStack(stackId, AmbariServer.getController()); } catch (ObjectNotFoundException e) { - throw new NoSuchStackException(stackName, stackVersion); + throw new NoSuchStackException(stackId); } catch (AmbariException e) { // todo throw new RuntimeException( - String.format("An error occurred parsing the stack information for %s-%s", stackName, stackVersion) , e); + String.format("An error occurred parsing the stack information for %s", stackId) , e); } } //todo: Move logic to HostGroupImpl @SuppressWarnings("unchecked") - private Collection processHostGroups(String bpName, Stack stack, Map properties) { + private Collection processHostGroups(String bpName, StackDefinition stack, Map properties) { Set> hostGroupProps = (HashSet>) properties.get(HOST_GROUP_PROPERTY_ID); @@ -219,14 +233,14 @@ private Collection processHostGroups(String bpName, Stack stack, Map< Configuration configuration = configFactory.getConfiguration(configProps); String cardinality = String.valueOf(hostGroupProperties.get(HOST_GROUP_CARDINALITY_PROPERTY_ID)); - HostGroup group = new HostGroupImpl(hostGroupName, bpName, Collections.singleton(stack), components, configuration, cardinality); + HostGroup group = new HostGroupImpl(hostGroupName, bpName, stack, components, configuration, cardinality); hostGroups.add(group); } return hostGroups; } - private Collection processHostGroupComponents(Stack stack, String groupName, HashSet> componentProps) { + private Collection processHostGroupComponents(StackDefinition stack, String groupName, HashSet> componentProps) { if (componentProps == null || componentProps.isEmpty()) { throw new IllegalArgumentException("Host group '" + groupName + "' must contain at least one component"); } @@ -246,8 +260,8 @@ private Collection processHostGroupComponents(Stack stack, String gro groupName + "' is not valid for the specified stack"); } - String mpackInstance = componentProperties.get(MPACK_INSTANCE_PROPERTY); - String serviceInstance = componentProperties.get(SERVICE_INSTANCE_PROPERTY); + String mpackInstance = componentProperties.get(COMPONENT_MPACK_INSTANCE_PROPERTY); + String serviceInstance = componentProperties.get(COMPONENT_SERVICE_INSTANCE_PROPERTY); //TODO, might want to add some validation here, to only accept value enum types, rwn ProvisionAction provisionAction = componentProperties.containsKey(COMPONENT_PROVISION_ACTION_PROPERTY_ID) ? ProvisionAction.valueOf(componentProperties.get(COMPONENT_PROVISION_ACTION_PROPERTY_ID)) : null; @@ -263,12 +277,10 @@ private Collection processHostGroupComponents(Stack stack, String gro * @return collection of component names for the specified stack * @throws IllegalArgumentException if the specified stack doesn't exist */ - private Collection getAllStackComponents(Stack stack) { - Collection allComponents = new HashSet<>(); - for (Collection components: stack.getComponents().values()) { - allComponents.addAll(components); - } - // currently ambari server is no a recognized component + private Collection getAllStackComponents(StackDefinition stack) { + Collection allComponents = new HashSet<>(stack.getComponents()); + + // currently ambari server is not a recognized component allComponents.add(RootComponent.AMBARI_SERVER.name()); return allComponents; @@ -292,7 +304,7 @@ public static void init(BlueprintDAO dao) { * simulate various Stack or error conditions. */ interface StackFactory { - Stack createStack(String stackName, String stackVersion, AmbariManagementController managementController) throws AmbariException; + Stack createStack(StackId stackId, AmbariManagementController managementController) throws AmbariException; } /** @@ -303,8 +315,8 @@ interface StackFactory { */ private static class DefaultStackFactory implements StackFactory { @Override - public Stack createStack(String stackName, String stackVersion, AmbariManagementController managementController) throws AmbariException { - return new Stack(stackName, stackVersion, managementController); + public Stack createStack(StackId stackId, AmbariManagementController managementController) throws AmbariException { + return new Stack(stackId.getStackName(), stackId.getStackVersion(), managementController); } } } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java index 24e5d3631de..04616a72270 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintImpl.java @@ -29,13 +29,10 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.Set; +import java.util.function.Supplier; -import org.apache.ambari.server.AmbariException; -import org.apache.ambari.server.StackAccessException; -import org.apache.ambari.server.controller.AmbariServer; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.orm.entities.BlueprintConfigEntity; import org.apache.ambari.server.orm.entities.BlueprintConfiguration; import org.apache.ambari.server.orm.entities.BlueprintEntity; @@ -47,13 +44,13 @@ import org.apache.ambari.server.orm.entities.HostGroupEntity; import org.apache.ambari.server.stack.NoSuchStackException; import org.apache.ambari.server.state.ConfigHelper; +import org.apache.ambari.server.state.StackId; +import org.apache.ambari.server.utils.JsonUtils; import org.apache.commons.lang.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Preconditions; import com.google.common.base.Splitter; -import com.google.common.base.Supplier; import com.google.gson.Gson; /** @@ -61,81 +58,71 @@ */ public class BlueprintImpl implements Blueprint { - private static final Logger LOG = LoggerFactory.getLogger(BlueprintImpl.class); - - private String name; - private Map hostGroups = new HashMap<>(); - private Collection mpacks = new ArrayList<>(); - private Configuration configuration; - private BlueprintValidator validator; - private SecurityConfiguration security; - private Setting setting; - private List repoSettings = new ArrayList<>(); - private boolean allMpacksResolved = false; - - public BlueprintImpl(BlueprintEntity entity) throws NoSuchStackException { - this.name = entity.getBlueprintName(); - if (entity.getSecurityType() != null) { - this.security = new SecurityConfiguration(entity.getSecurityType(), - entity.getSecurityDescriptorReference(), - null); - } - mpacks.addAll(parseMpacks(entity)); + private final String name; + private final Map hostGroups; + private final Collection mpacks; + private final StackDefinition stack; + private final Set stackIds; + private final Configuration configuration; + private final SecurityConfiguration security; + private final Setting setting; + private final List repoSettings; + + public BlueprintImpl(BlueprintEntity entity, StackDefinition stack, Set stackIds) throws NoSuchStackException { + name = entity.getBlueprintName(); + security = entity.getSecurityType() != null + ? new SecurityConfiguration(entity.getSecurityType(), entity.getSecurityDescriptorReference(), null) + : SecurityConfiguration.NONE; + mpacks = parseMpacks(entity); + + this.stack = stack; + this.stackIds = stackIds; // create config first because it is set as a parent on all host-group configs configuration = processConfiguration(entity.getConfigurations()); - parseBlueprintHostGroups(entity); + hostGroups = parseBlueprintHostGroups(entity); // TODO: how to handle multiple stacks correctly? -// configuration.setParentConfiguration(stack.getConfiguration(getServices())); - validator = new BlueprintValidatorImpl(this); - processSetting(entity.getSettings()); - processRepoSettings(); - } - - /** - * Legacy constructor for pre-multi-mpack code. - * @param name blueprint name - * @param groups host groups - * @param stack stack - * @param configuration configuration - * @param security security config - * @param setting setting - */ - @Deprecated - public BlueprintImpl(String name, Collection groups, Stack stack, Configuration configuration, - SecurityConfiguration security, Setting setting) { - this(name, groups, stackToMpacks(stack), configuration, security, setting); - } - - private static Collection stackToMpacks(Stack stack) { - MpackInstance mpack = new MpackInstance(stack.getName(), stack.getVersion(), null, stack, new Configuration()); - return Collections.singleton(mpack); + configuration.setParentConfiguration(stack.getConfiguration(getServices())); + setting = processSetting(entity.getSettings()); + repoSettings = processRepoSettings(); } - public BlueprintImpl(String name, Collection groups, Collection mpacks, - Configuration configuration, SecurityConfiguration security, Setting setting) { + public BlueprintImpl(String name, Collection groups, StackDefinition stack, Set stackIds, Collection mpacks, + Configuration configuration, SecurityConfiguration security, Setting setting) { this.name = name; this.mpacks = mpacks; - this.security = security; + this.stack = stack; + this.stackIds = stackIds; + this.security = security != null ? security : SecurityConfiguration.NONE; // caller should set host group configs + hostGroups = new HashMap<>(); for (HostGroup hostGroup : groups) { hostGroups.put(hostGroup.getName(), hostGroup); } // TODO: handle configuration from multiple stacks properly // if the parent isn't set, the stack configuration is set as the parent this.configuration = configuration; -// if (configuration.getParentConfiguration() == null) { -// configuration.setParentConfiguration(stack.getConfiguration(getServices())); -// } - validator = new BlueprintValidatorImpl(this); + if (configuration.getParentConfiguration() == null) { + configuration.setParentConfiguration(stack.getConfiguration(getServices())); + } this.setting = setting; + repoSettings = processRepoSettings(); } public String getName() { return name; } + public Set getStackIds() { + return stackIds; + } + + @Override + public Set getStackIdsForService(String service) { + return stack.getStacksForService(service); + } + public SecurityConfiguration getSecurity() { return security; } @@ -307,14 +294,8 @@ public Collection getMpacks() { return mpacks; } - @Override - public Collection getStacks() { - return mpacks.stream().map(MpackInstance::getStack).filter(s -> null != s).collect(toList()); - } - - @Override - public Stack getStack() { - return getStacks().iterator().next(); + public StackDefinition getStack() { + return stack; } /** @@ -353,25 +334,18 @@ public Collection getHostGroupsForService(String service) { return resultGroups; } - @Override - public void validateTopology() throws InvalidTopologyException { - validator.validateTopology(); - } - public BlueprintEntity toEntity() { BlueprintEntity entity = new BlueprintEntity(); entity.setBlueprintName(name); - if (security != null) { - if (security.getType() != null) { - entity.setSecurityType(security.getType()); - } - if (security.getDescriptorReference() != null) { - entity.setSecurityDescriptorReference(security.getDescriptorReference()); - } + if (security.getType() != null) { + entity.setSecurityType(security.getType()); + } + if (security.getDescriptorReference() != null) { + entity.setSecurityDescriptorReference(security.getDescriptorReference()); } createHostGroupEntities(entity); - Collection configEntities = toConfigEntities(getConfiguration(), () -> new BlueprintConfigEntity()); + Collection configEntities = toConfigEntities(getConfiguration(), BlueprintConfigEntity::new); configEntities.forEach(configEntity -> { configEntity.setBlueprintEntity(entity); configEntity.setBlueprintName(getName()); @@ -390,17 +364,6 @@ private void createMpackInstanceEntities(BlueprintEntity entity) { }); } - /** - * Validate blueprint configuration. - * - * @throws InvalidTopologyException if the blueprint configuration is invalid - * @throws GPLLicenseNotAcceptedException ambari was configured to use gpl software, but gpl license is not accepted - */ - @Override - public void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException { - validator.validateRequiredProperties(); - } - private Collection parseMpacks(BlueprintEntity blueprintEntity) throws NoSuchStackException { Collection mpackInstances = new ArrayList<>(); for (BlueprintMpackInstanceEntity mpack: blueprintEntity.getMpackInstances()) { @@ -410,7 +373,6 @@ private Collection parseMpacks(BlueprintEntity blueprintEntity) t mpackInstance.setUrl(mpack.getMpackUri()); mpackInstance.setConfiguration(processConfiguration(mpack.getConfigurations())); // TODO: come up with proper mpack -> stack resolution - tryParseStack(mpack.getMpackName(), mpack.getMpackVersion()).ifPresent( stack -> mpackInstance.setStack(stack) ); for(BlueprintServiceEntity serviceEntity: mpack.getServiceInstances()) { ServiceInstance serviceInstance = new ServiceInstance( serviceEntity.getName(), @@ -423,32 +385,10 @@ private Collection parseMpacks(BlueprintEntity blueprintEntity) t return mpackInstances; } - private Stack parseStack(String stackName, String stackVersion) throws NoSuchStackException { - try { - //todo: don't pass in controller - return new Stack(stackName, stackVersion, AmbariServer.getController()); - } catch (StackAccessException e) { - throw new NoSuchStackException(stackName, stackVersion, e); - } catch (AmbariException e) { - //todo: - throw new RuntimeException("An error occurred parsing the stack information.", e); - } - } - - private Optional tryParseStack(String stackName, String stackVersion) { - try { - return Optional.of(parseStack(stackName, stackVersion)); - } - catch (Exception ex) { - LOG.warn("Cannot parse stack {}-{}. Exception: {}/{}", stackName, stackVersion, ex.getClass().getName(), - ex.getMessage()); - return Optional.empty(); - } - } - private Map parseBlueprintHostGroups(BlueprintEntity entity) { + Map hostGroups = new HashMap<>(); for (HostGroupEntity hostGroupEntity : entity.getHostGroups()) { - HostGroupImpl hostGroup = new HostGroupImpl(hostGroupEntity, getName(), getStacks()); + HostGroupImpl hostGroup = new HostGroupImpl(hostGroupEntity, getName(), getStack()); // set the bp configuration as the host group config parent hostGroup.getConfiguration().setParentConfiguration(configuration); hostGroups.put(hostGroupEntity.getName(), hostGroup); @@ -467,13 +407,11 @@ private Configuration processConfiguration(Collection blueprintSetting) { - if (blueprintSetting != null) { - setting = new Setting(parseSetting(blueprintSetting)); - } + private Setting processSetting(Collection blueprintSetting) { + return blueprintSetting != null + ? new Setting(parseSetting(blueprintSetting)) + : null; } /** @@ -549,7 +487,7 @@ private void createHostGroupEntities(BlueprintEntity blueprintEntity) { hostGroupEntity.setBlueprintName(getName()); hostGroupEntity.setCardinality(group.getCardinality()); - Collection configEntities = toConfigEntities(group.getConfiguration(), () -> new HostGroupConfigEntity()); + Collection configEntities = toConfigEntities(group.getConfiguration(), HostGroupConfigEntity::new); configEntities.forEach(configEntity -> { configEntity.setBlueprintName(getName()); configEntity.setHostGroupEntity(hostGroupEntity); @@ -641,13 +579,14 @@ static Collection toConfigEntities(Configu * @return the configuration */ static Configuration fromConfigEntities(Collection configEntities) { - Gson jsonSerializer = new Gson(); Configuration configuration = new Configuration(); for (BlueprintConfiguration configEntity: configEntities) { String type = configEntity.getType(); - Map configData = jsonSerializer.fromJson(configEntity.getConfigData(), Map.class); - Map> configAttributes = jsonSerializer.fromJson(configEntity.getConfigAttributes(), Map.class); + Map configData = JsonUtils.fromJson(configEntity.getConfigData(), + new TypeReference>(){}); + Map> configAttributes = JsonUtils.fromJson(configEntity.getConfigAttributes(), + new TypeReference>>(){}); if (null != configData) { configuration.getProperties().put(type, configData); } @@ -689,29 +628,26 @@ public boolean isValidConfigType(String configType) { if (ConfigHelper.CLUSTER_ENV.equals(configType) || "global".equals(configType)) { return true; } - - Collection services = getStacks().stream().map(stack -> stack.getServiceForConfigType(configType)).collect(toList()); - for (String service: services) { - if (getServices().contains(service)) { - return true; - } - } - return false; + String service = getStack().getServiceForConfigType(configType); + return getServices().contains(service); } /** * Parse stack repo info stored in the blueprint_settings table - * @return set of repositories - * */ - private void processRepoSettings(){ - repoSettings = new ArrayList<>(); - if (setting != null){ - Set> settingValue = setting.getSettingValue(Setting.SETTING_NAME_REPOSITORY_SETTINGS); - for (Map setting : settingValue) { - RepositorySetting rs = parseRepositorySetting(setting); - repoSettings.add(rs); - } + */ + private List processRepoSettings() { + if (setting == null) { + return Collections.emptyList(); + } + + Set> repositorySettingsValue = setting.getSettingValue(Setting.SETTING_NAME_REPOSITORY_SETTINGS); + if (repositorySettingsValue == null) { + return Collections.emptyList(); } + + return repositorySettingsValue.stream() + .map(this::parseRepositorySetting) + .collect(toList()); } private RepositorySetting parseRepositorySetting(Map setting){ @@ -727,19 +663,4 @@ public List getRepositorySettings(){ return repoSettings; } - /** - * {@inheritDoc} - */ - @Override - public boolean isAllMpacksResolved() { - return !mpacks.stream().filter(mpack -> mpack.getStack() == null).findAny().isPresent(); - } - - /** - * {@inheritDoc} - */ - public Collection getUnresolvedMpackNames() { - return mpacks.stream().filter(mpack -> mpack.getStack() == null).map(MpackInstance::getMpackNameAndVersion).collect(toList()); - } - } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java index 0f831685fb5..64e08ad1b5c 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidator.java @@ -16,10 +16,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.ambari.server.topology; - /** * Provides blueprint validation. */ @@ -27,16 +25,18 @@ public interface BlueprintValidator { /** * Validate blueprint topology. * + * @param blueprint the blueprint to validate * @throws InvalidTopologyException if the topology is invalid */ - void validateTopology() throws InvalidTopologyException; + void validateTopology(Blueprint blueprint) throws InvalidTopologyException; /** * Validate that required properties are provided. * This doesn't include password properties. * + * @param blueprint the blueprint to validate * @throws InvalidTopologyException if required properties are not set in blueprint - * @throws GPLLicenseNotAcceptedException ambari was configured to use gpl software, but gpl license is not accepted + * @throws GPLLicenseNotAcceptedException if the blueprint requires use of GPL software, but GPL license was not accepted */ - void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException; + void validateRequiredProperties(Blueprint blueprint) throws InvalidTopologyException, GPLLicenseNotAcceptedException; } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java index 1e6213edc35..39cbbfaf360 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/BlueprintValidatorImpl.java @@ -26,56 +26,45 @@ import java.util.Map; import java.util.Set; -import org.apache.ambari.server.StaticallyInject; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.configuration.Configuration; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.state.AutoDeployInfo; import org.apache.ambari.server.state.DependencyConditionInfo; import org.apache.ambari.server.state.DependencyInfo; import org.apache.ambari.server.utils.SecretReference; -import org.apache.ambari.server.utils.VersionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Joiner; import com.google.inject.Inject; /** * Default blueprint validator. */ -@StaticallyInject public class BlueprintValidatorImpl implements BlueprintValidator { private static final Logger LOGGER = LoggerFactory.getLogger(BlueprintValidatorImpl.class); - private final Blueprint blueprint; - private final Stack stack; public static final String LZO_CODEC_CLASS_PROPERTY_NAME = "io.compression.codec.lzo.class"; public static final String CODEC_CLASSES_PROPERTY_NAME = "io.compression.codecs"; public static final String LZO_CODEC_CLASS = "com.hadoop.compression.lzo.LzoCodec"; - @Inject - private static org.apache.ambari.server.configuration.Configuration configuration; + private final Configuration configuration; - public BlueprintValidatorImpl(Blueprint blueprint) { - this.blueprint = blueprint; - this.stack = blueprint.getStack(); + @Inject + public BlueprintValidatorImpl(Configuration configuration) { + this.configuration = configuration; } @Override - public void validateTopology() throws InvalidTopologyException { + public void validateTopology(Blueprint blueprint) throws InvalidTopologyException { LOGGER.info("Validating topology for blueprint: [{}]", blueprint.getName()); - if (!blueprint.isAllMpacksResolved()) { - LOGGER.warn("The following macks are not resolved: [{}] Skipping topology validation.", - Joiner.on(", ").join(blueprint.getUnresolvedMpackNames())); - return; - } - + StackDefinition stack = blueprint.getStack(); Collection hostGroups = blueprint.getHostGroups().values(); Map>> missingDependencies = new HashMap<>(); for (HostGroup group : hostGroups) { - Map> missingGroupDependencies = validateHostGroup(group); + Map> missingGroupDependencies = validateHostGroup(blueprint, stack, group); if (!missingGroupDependencies.isEmpty()) { missingDependencies.put(group.getName(), missingGroupDependencies); } @@ -89,10 +78,10 @@ public void validateTopology() throws InvalidTopologyException { Cardinality cardinality = stack.getCardinality(component); AutoDeployInfo autoDeploy = stack.getAutoDeployInfo(component); if (cardinality.isAll()) { - cardinalityFailures.addAll(verifyComponentInAllHostGroups(new Component(component), autoDeploy)); + cardinalityFailures.addAll(verifyComponentInAllHostGroups(blueprint, new Component(component), autoDeploy)); } else { cardinalityFailures.addAll(verifyComponentCardinalityCount( - new Component(component), cardinality, autoDeploy)); + stack, blueprint, new Component(component), cardinality, autoDeploy)); } } } @@ -103,7 +92,7 @@ public void validateTopology() throws InvalidTopologyException { } @Override - public void validateRequiredProperties() throws InvalidTopologyException, GPLLicenseNotAcceptedException { + public void validateRequiredProperties(Blueprint blueprint) throws InvalidTopologyException, GPLLicenseNotAcceptedException { // we don't want to include default stack properties so we can't just use hostGroup full properties Map> clusterConfigurations = blueprint.getConfiguration().getProperties(); @@ -161,58 +150,31 @@ public void validateRequiredProperties() throws InvalidTopologyException, GPLLic } } if (ClusterTopologyImpl.isNameNodeHAEnabled(clusterConfigurations) && component.equals("NAMENODE")) { - Map hadoopEnvConfig = clusterConfigurations.get("hadoop-env"); - if(hadoopEnvConfig != null && !hadoopEnvConfig.isEmpty() && hadoopEnvConfig.containsKey("dfs_ha_initial_namenode_active") && hadoopEnvConfig.containsKey("dfs_ha_initial_namenode_standby")) { - ArrayList hostGroupsForComponent = new ArrayList<>(blueprint.getHostGroupsForComponent(component)); - Set givenHostGroups = new HashSet<>(); - givenHostGroups.add(hadoopEnvConfig.get("dfs_ha_initial_namenode_active")); - givenHostGroups.add(hadoopEnvConfig.get("dfs_ha_initial_namenode_standby")); - if(givenHostGroups.size() != hostGroupsForComponent.size()) { - throw new IllegalArgumentException("NAMENODE HA host groups mapped incorrectly for properties 'dfs_ha_initial_namenode_active' and 'dfs_ha_initial_namenode_standby'. Expected Host groups are :" + hostGroupsForComponent); - } - if(HostGroup.HOSTGROUP_REGEX.matcher(hadoopEnvConfig.get("dfs_ha_initial_namenode_active")).matches() && HostGroup.HOSTGROUP_REGEX.matcher(hadoopEnvConfig.get("dfs_ha_initial_namenode_standby")).matches()){ - for (HostGroup hostGroupForComponent : hostGroupsForComponent) { - Iterator itr = givenHostGroups.iterator(); - while(itr.hasNext()){ - if(itr.next().contains(hostGroupForComponent.getName())){ - itr.remove(); - } - } - } - if(!givenHostGroups.isEmpty()){ - throw new IllegalArgumentException("NAMENODE HA host groups mapped incorrectly for properties 'dfs_ha_initial_namenode_active' and 'dfs_ha_initial_namenode_standby'. Expected Host groups are :" + hostGroupsForComponent); + Map hadoopEnvConfig = clusterConfigurations.get("hadoop-env"); + if(hadoopEnvConfig != null && !hadoopEnvConfig.isEmpty() && hadoopEnvConfig.containsKey("dfs_ha_initial_namenode_active") && hadoopEnvConfig.containsKey("dfs_ha_initial_namenode_standby")) { + ArrayList hostGroupsForComponent = new ArrayList<>(blueprint.getHostGroupsForComponent(component)); + Set givenHostGroups = new HashSet<>(); + givenHostGroups.add(hadoopEnvConfig.get("dfs_ha_initial_namenode_active")); + givenHostGroups.add(hadoopEnvConfig.get("dfs_ha_initial_namenode_standby")); + if(givenHostGroups.size() != hostGroupsForComponent.size()) { + throw new IllegalArgumentException("NAMENODE HA host groups mapped incorrectly for properties 'dfs_ha_initial_namenode_active' and 'dfs_ha_initial_namenode_standby'. Expected Host groups are :" + hostGroupsForComponent); + } + if(HostGroup.HOSTGROUP_REGEX.matcher(hadoopEnvConfig.get("dfs_ha_initial_namenode_active")).matches() && HostGroup.HOSTGROUP_REGEX.matcher(hadoopEnvConfig.get("dfs_ha_initial_namenode_standby")).matches()){ + for (HostGroup hostGroupForComponent : hostGroupsForComponent) { + Iterator itr = givenHostGroups.iterator(); + while(itr.hasNext()){ + if(itr.next().contains(hostGroupForComponent.getName())){ + itr.remove(); + } } - } } - } - - if (blueprint.isAllMpacksResolved()) { - if (component.equals("HIVE_METASTORE")) { - Map hiveEnvConfig = clusterConfigurations.get("hive-env"); - if (hiveEnvConfig != null && !hiveEnvConfig.isEmpty() && hiveEnvConfig.get("hive_database") != null - && hiveEnvConfig.get("hive_database").equals("Existing SQL Anywhere Database") - && VersionUtils.compareVersions(stack.getVersion(), "2.3.0.0") < 0 - && stack.getName().equalsIgnoreCase("HDP")) { - throw new InvalidTopologyException("Incorrect configuration: SQL Anywhere db is available only for stack HDP-2.3+ " + - "and repo version 2.3.2+!"); } - } - if (component.equals("OOZIE_SERVER")) { - Map oozieEnvConfig = clusterConfigurations.get("oozie-env"); - if (oozieEnvConfig != null && !oozieEnvConfig.isEmpty() && oozieEnvConfig.get("oozie_database") != null - && oozieEnvConfig.get("oozie_database").equals("Existing SQL Anywhere Database") - && VersionUtils.compareVersions(stack.getVersion(), "2.3.0.0") < 0 - && stack.getName().equalsIgnoreCase("HDP")) { - throw new InvalidTopologyException("Incorrect configuration: SQL Anywhere db is available only for stack HDP-2.3+ " + - "and repo version 2.3.2+!"); + if(!givenHostGroups.isEmpty()){ + throw new IllegalArgumentException("NAMENODE HA host groups mapped incorrectly for properties 'dfs_ha_initial_namenode_active' and 'dfs_ha_initial_namenode_standby'. Expected Host groups are :" + hostGroupsForComponent); } } } - else { - LOGGER.warn("The following macks are not resolved: [{}] Skipping validation of HIVE_METASTORE and OOZIE_SERVER properties.", - Joiner.on(", ").join(blueprint.getUnresolvedMpackNames())); - } } } } @@ -221,13 +183,14 @@ public void validateRequiredProperties() throws InvalidTopologyException, GPLLic * Verify that a component is included in all host groups. * For components that are auto-install enabled, will add component to topology if needed. * + * + * @param blueprint blueprint to validate * @param component component to validate * @param autoDeploy auto-deploy information for component * * @return collection of missing component information */ - private Collection verifyComponentInAllHostGroups(Component component, AutoDeployInfo autoDeploy) { - + private Collection verifyComponentInAllHostGroups(Blueprint blueprint, Component component, AutoDeployInfo autoDeploy) { Collection cardinalityFailures = new HashSet<>(); int actualCount = blueprint.getHostGroupsForComponent(component.getName()).size(); Map hostGroups = blueprint.getHostGroups(); @@ -243,7 +206,7 @@ private Collection verifyComponentInAllHostGroups(Component component, A return cardinalityFailures; } - private Map> validateHostGroup(HostGroup group) { + private Map> validateHostGroup(Blueprint blueprint, StackDefinition stack, HostGroup group) { LOGGER.info("Validating hostgroup: {}", group.getName()); Map> missingDependencies = new HashMap<>(); @@ -288,7 +251,7 @@ private Map> validateHostGroup(HostGroup grou } if (dependencyScope.equals("cluster")) { Collection missingDependencyInfo = verifyComponentCardinalityCount( - new Component(componentName), new Cardinality("1+"), autoDeployInfo); + stack, blueprint, new Component(componentName), new Cardinality("1+"), autoDeployInfo); resolved = missingDependencyInfo.isEmpty(); } else if (dependencyScope.equals("host")) { @@ -315,16 +278,22 @@ private Map> validateHostGroup(HostGroup grou * Verify that a component meets cardinality requirements. For components that are * auto-install enabled, will add component to topology if needed. * + * + * @param stack stack definition + * @param blueprint blueprint to validate * @param component component to validate * @param cardinality required cardinality * @param autoDeploy auto-deploy information for component * * @return collection of missing component information */ - public Collection verifyComponentCardinalityCount(Component component, - Cardinality cardinality, - AutoDeployInfo autoDeploy) { - + private Collection verifyComponentCardinalityCount( + StackDefinition stack, + Blueprint blueprint, + Component component, + Cardinality cardinality, + AutoDeployInfo autoDeploy + ) { Map> configProperties = blueprint.getConfiguration().getProperties(); Collection cardinalityFailures = new HashSet<>(); //todo: don't hard code this HA logic here @@ -362,13 +331,13 @@ public Collection verifyComponentCardinalityCount(Component component, * Determine if a component is managed, meaning that it is running inside of the cluster * topology. Generally, non-managed dependencies will be database components. * - * @param stack stack instance + * @param stack stack definition * @param component component to determine if it is managed * @param clusterConfig cluster configuration * * @return true if the specified component managed by the cluster; false otherwise */ - protected boolean isDependencyManaged(Stack stack, String component, Map> clusterConfig) { + protected boolean isDependencyManaged(StackDefinition stack, String component, Map> clusterConfig) { boolean isManaged = true; String externalComponentConfig = stack.getExternalComponentConfig(component); if (externalComponentConfig != null) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java index dfe5dd01c55..ee626bce84a 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/ClusterConfigurationRequest.java @@ -36,7 +36,7 @@ import org.apache.ambari.server.controller.internal.BlueprintConfigurationProcessor; import org.apache.ambari.server.controller.internal.ClusterResourceProvider; import org.apache.ambari.server.controller.internal.ConfigurationTopologyException; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.serveraction.kerberos.KerberosInvalidConfigurationException; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.SecurityType; @@ -63,7 +63,7 @@ public class ClusterConfigurationRequest { private ClusterTopology clusterTopology; private BlueprintConfigurationProcessor configurationProcessor; private StackAdvisorBlueprintProcessor stackAdvisorBlueprintProcessor; - private Stack stack; + private StackDefinition stack; private boolean configureSecurity = false; public ClusterConfigurationRequest(AmbariContext ambariContext, ClusterTopology topology, boolean setInitial, StackAdvisorBlueprintProcessor stackAdvisorBlueprintProcessor, boolean configureSecurity) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java index b71b12c0bf8..c20d10a4314 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configurable.java @@ -18,8 +18,8 @@ package org.apache.ambari.server.topology; -import static org.apache.ambari.server.topology.BlueprintFactory.PROPERTIES_ATTRIBUTES_PROPERTY_ID; -import static org.apache.ambari.server.topology.BlueprintFactory.PROPERTIES_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.PROPERTIES_ATTRIBUTES_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.PROPERTIES_PROPERTY_ID; import java.util.ArrayList; import java.util.Collection; @@ -42,7 +42,7 @@ public interface Configurable { Configuration getConfiguration(); @JsonProperty("configurations") - default void setConfigs(Collection> configs) { + default void setConfigs(Collection> configs) { Configuration configuration; if (!configs.isEmpty() && configs.iterator().next().keySet().iterator().next().contains("/")) { // Configuration has keys with slashes like "zk.cfg/properties/dataDir" means it is coming through @@ -73,9 +73,9 @@ default void setConfigs(Collection> conf } @JsonProperty("configurations") - default Collection>> getConfigs() { + default Collection>> getConfigs() { Configuration configuration = getConfiguration(); - Collection>> configurations = new ArrayList<>(); + Collection>> configurations = new ArrayList<>(); Set allConfigTypes = Sets.union(configuration.getProperties().keySet(), configuration.getAttributes().keySet()); for (String configType: allConfigTypes) { Map> configData = new HashMap<>(); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java index 8ffbdae771e..c32b775e421 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/Configuration.java @@ -43,12 +43,15 @@ public class Configuration { */ private Configuration parentConfiguration; - public Configuration() { properties = new HashMap<>(); attributes = new HashMap<>(); } + public static Configuration createEmpty() { + return new Configuration(new HashMap<>(), new HashMap<>()); + } + /** * Constructor. * @@ -365,4 +368,19 @@ public void removeConfigType(String configType) { parentConfiguration.removeConfigType(configType); } } + + public static Configuration combine(Configuration c1, Configuration c2) { + if (c1 == null || (c1.getProperties().isEmpty() && c1.getAttributes().isEmpty())) { + return c2; + } + if (c2 == null || (c2.getProperties().isEmpty() && c2.getAttributes().isEmpty())) { + return c1; + } + Configuration combined = new Configuration(new HashMap<>(), new HashMap<>()); + combined.getProperties().putAll(c1.getProperties()); + combined.getProperties().putAll(c2.getProperties()); + combined.getAttributes().putAll(c1.getAttributes()); + combined.getAttributes().putAll(c2.getAttributes()); + return combined; + } } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/ConfigurationFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/ConfigurationFactory.java index 7f9a06f2d2b..7249e3c995b 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/ConfigurationFactory.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/ConfigurationFactory.java @@ -10,8 +10,7 @@ * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distribut - * ed on an "AS IS" BASIS, + * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. @@ -19,6 +18,9 @@ package org.apache.ambari.server.topology; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.PROPERTIES_ATTRIBUTES_PROPERTY_ID; +import static org.apache.ambari.server.controller.internal.BlueprintResourceProvider.PROPERTIES_PROPERTY_ID; + import java.util.Collection; import java.util.HashMap; import java.util.Map; @@ -59,8 +61,8 @@ private ConfigurationStrategy decidePopulationStrategy(Map confi String propertiesType = keyNameTokens[1]; if (levels == 2) { return new ConfigurationStrategyV1(); - } else if ((levels == 3 && BlueprintFactory.PROPERTIES_PROPERTY_ID.equals(propertiesType)) - || (levels == 4 && BlueprintFactory.PROPERTIES_ATTRIBUTES_PROPERTY_ID.equals(propertiesType))) { + } else if ((levels == 3 && PROPERTIES_PROPERTY_ID.equals(propertiesType)) + || (levels == 4 && PROPERTIES_ATTRIBUTES_PROPERTY_ID.equals(propertiesType))) { return new ConfigurationStrategyV2(); } else { throw new IllegalArgumentException(SCHEMA_IS_NOT_SUPPORTED_MESSAGE); @@ -111,9 +113,9 @@ protected static class ConfigurationStrategyV2 extends ConfigurationStrategy { @Override protected void setConfiguration(Configuration configuration, String[] propertyNameTokens, String propertyValue) { String type = propertyNameTokens[0]; - if (BlueprintFactory.PROPERTIES_PROPERTY_ID.equals(propertyNameTokens[1])) { + if (PROPERTIES_PROPERTY_ID.equals(propertyNameTokens[1])) { configuration.setProperty(type, propertyNameTokens[2], propertyValue); - } else if (BlueprintFactory.PROPERTIES_ATTRIBUTES_PROPERTY_ID.equals(propertyNameTokens[1])) { + } else if (PROPERTIES_ATTRIBUTES_PROPERTY_ID.equals(propertyNameTokens[1])) { configuration.setAttribute(type, propertyNameTokens[3], propertyNameTokens[2], propertyValue); } } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroup.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroup.java index 43ce5b92a93..a329f42053f 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroup.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroup.java @@ -22,7 +22,7 @@ import java.util.regex.Pattern; import org.apache.ambari.server.controller.internal.ProvisionAction; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; /** * Host Group representation. @@ -109,8 +109,6 @@ public interface HostGroup { /** * Add a component to the host group - * @param component - * @return */ boolean addComponent(Component component); @@ -142,15 +140,7 @@ public interface HostGroup { * * @return associated stack */ - @Deprecated - Stack getStack(); - - /** - * Get the stack associated with the host group. - * - * @return associated stacks - */ - Collection getStacks(); + StackDefinition getStack(); /** * Get the cardinality value that was specified for the host group. diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroupImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroupImpl.java index 88ad1627ca8..0899d0bf111 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroupImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostGroupImpl.java @@ -22,27 +22,22 @@ import static java.util.stream.Collectors.toList; import static java.util.stream.Collectors.toSet; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; +import java.util.LinkedHashSet; import java.util.Map; -import java.util.Optional; import java.util.Set; -import java.util.stream.Collectors; import org.apache.ambari.server.controller.internal.ProvisionAction; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.orm.entities.HostGroupComponentEntity; import org.apache.ambari.server.orm.entities.HostGroupConfigEntity; import org.apache.ambari.server.orm.entities.HostGroupEntity; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.collect.Sets; import com.google.gson.Gson; /** @@ -65,7 +60,7 @@ public class HostGroupImpl implements HostGroup { /** * components contained in the host group */ - private final List components = new ArrayList<>(); + private final Set components = new LinkedHashSet<>(); /** * map of service to components for the host group @@ -78,31 +73,29 @@ public class HostGroupImpl implements HostGroup { */ private Configuration configuration = null; - private boolean containsMasterComponent = false; + private boolean containsMasterComponent = false; // FIXME never set - private Map stackMap; + private StackDefinition stack; private String cardinality = "NOT SPECIFIED"; - public HostGroupImpl(HostGroupEntity entity, String blueprintName, Collection stacks) { + public HostGroupImpl(HostGroupEntity entity, String blueprintName, StackDefinition stack) { this.name = entity.getName(); this.cardinality = entity.getCardinality(); this.blueprintName = blueprintName; - this.stackMap = stacks.stream().collect(Collectors.toMap(stack -> stack.getName() + "-" + stack.getVersion(), stack -> stack)); + this.stack = stack; parseComponents(entity); parseConfigurations(entity); } - public HostGroupImpl(String name, String bpName, Collection stacks, Collection components, Configuration configuration, String cardinality) { + public HostGroupImpl(String name, String bpName, StackDefinition stack, Collection components, Configuration configuration, String cardinality) { this.name = name; this.blueprintName = bpName; - this.stackMap = stacks.stream().collect(Collectors.toMap(stack -> stack.getName() + "-" + stack.getVersion(), stack -> stack)); + this.stack = stack; // process each component for (Component component: components) { - if (!addComponent(component)) { - throw new IllegalArgumentException("Ambiguous component or can't determine stack for: " + component); - } + addComponent(component); } this.configuration = configuration; @@ -162,82 +155,25 @@ public Collection getServices() { } /** - * Adds a component to the host group. The component is successfully added if it is not a duplicate or ambiguous (as of - * Ambari 3.1 multiple components of the same type can exist in a hostgroup. However, they have to come from different - * management packs or belong to different service instances) + * Adds a component to the host group. * @param component the component to add - * @return a boolean to indicate if addition was successful */ + @Override public boolean addComponent(Component component) { - // Exclude ambiguous component definitions - boolean ambigous = components.stream().filter(c -> { - if (c.getName().equals(component.getName())) { // found another component with the same name - if (c.getMpackInstance() == null || component.getMpackInstance() == null) { - return true; // if either of them has no mpack instance defined it is ambiguous - } - if (c.getMpackInstance().equals(component.getMpackInstance())) { - // both components are in the same mpack, and one of them does not declare a service instance or - // both declare the same service instance --> ambiguous - return c.getServiceInstance() != null && component.getServiceInstance() != null && - c.getServiceInstance().equals(component.getServiceInstance()); - } - else { - return false; // different mpacks --> no ambiguity - } - } - else { - return false; // different name --> no ambiguity + if (components.add(component)) { + containsMasterComponent |= stack.isMasterComponent(component.getName()); + + String service = stack.getServiceForComponent(component.getName()); + if (service != null) { + componentsForService + .computeIfAbsent(service, __ -> new HashSet<>()) + .add(component); } - }).findAny().isPresent(); - if (ambigous) { - return false; - } - addComponent(component, getStackForComponent(component)); - return true; - } - private Optional getStackForComponent(Component component) { - // Look for the stack of this component - if (component.getMpackInstance() == null) { - // Component does not declare its stack. Let's find it. - Collection candidateStacks = - stackMap.values().stream().filter(stack -> stack.getServiceForComponent(component.getName()) != null).collect(toList()); - switch (candidateStacks.size()) { - case 0: - // no stack (no service) for this component - LOG.warn("No stack/service found for component: {}", component); - return Optional.empty(); - case 1: - return Optional.of(candidateStacks.iterator().next()); - default: - LOG.warn("Ambiguous stack resolution for component: {}, stacks: {}", component, candidateStacks); - return Optional.empty(); - } - } - else { - // TODO: refine this logic - Stack stack = stackMap.get(component.getMpackInstance()); - if (null == stack) { - LOG.warn("Component declared an invalid stack: {}", component); - } - return Optional.ofNullable(stack); + return true; } - } - private void addComponent(Component component, Optional stack) { - if (stack.isPresent()) { - String serviceName = stack.get().getServiceForComponent(component.getName()); - if (!componentsForService.containsKey(serviceName)) { - componentsForService.put(serviceName, Sets.newHashSet(component)); - } - else { - componentsForService.get(serviceName).add(component); - } - if (stack.get().isMasterComponent(component.getName())) { - containsMasterComponent = true; - } - } - components.add(component); + return false; } /** @@ -255,7 +191,7 @@ public Collection getComponents(String service) { } /** - * Get the names components for the specified service which are associated with the host group. + * Get the names of components for the specified service which are associated with the host group. * * @param service service name * @@ -296,14 +232,8 @@ public boolean containsMasterComponent() { } @Override - public Collection getStacks() { - return stackMap.values(); - } - - @Override - @Deprecated - public Stack getStack() { - return getStacks().iterator().next(); + public StackDefinition getStack() { + return stack; } @Override diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java index 13f89b5fbf1..3a8acc82162 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/HostRequest.java @@ -34,7 +34,7 @@ import org.apache.ambari.server.api.predicate.PredicateCompiler; import org.apache.ambari.server.controller.internal.HostResourceProvider; import org.apache.ambari.server.controller.internal.ResourceImpl; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.orm.entities.HostRoleCommandEntity; @@ -257,7 +257,7 @@ private void createTasks(boolean skipFailure) { "PENDING HOST ASSIGNMENT : HOSTGROUP=" + getHostgroupName(); AmbariContext context = topology.getAmbariContext(); - Stack stack = hostGroup.getStack(); + StackDefinition stack = hostGroup.getStack(); // Skip INSTALL task in case server component is marked as START_ONLY, or the cluster provision_action is // START_ONLY, unless component is marked with INSTALL_ONLY or INSTALL_AND_START. diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/MpackInstance.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/MpackInstance.java index cde01e0565f..6b57b0020e7 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/MpackInstance.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/MpackInstance.java @@ -122,7 +122,7 @@ public BlueprintMpackInstanceEntity toEntity() { mpackEntity.setMpackName(mpackName); mpackEntity.setMpackVersion(mpackVersion); Collection mpackConfigEntities = - BlueprintImpl.toConfigEntities(configuration, () -> new BlueprintMpackConfigEntity()); + BlueprintImpl.toConfigEntities(configuration, BlueprintMpackConfigEntity::new); mpackConfigEntities.forEach( configEntity -> configEntity.setMpackInstance(mpackEntity) ); mpackEntity.setConfigurations(mpackConfigEntities); @@ -131,7 +131,7 @@ public BlueprintMpackInstanceEntity toEntity() { serviceEntity.setName(serviceInstance.getName()); serviceEntity.setType(serviceInstance.getType()); Collection serviceConfigEntities = - BlueprintImpl.toConfigEntities(serviceInstance.getConfiguration(), () -> new BlueprintServiceConfigEntity()); + BlueprintImpl.toConfigEntities(serviceInstance.getConfiguration(), BlueprintServiceConfigEntity::new); serviceConfigEntities.forEach( configEntity -> configEntity.setService(serviceEntity) ); serviceEntity.setConfigurations(serviceConfigEntities); mpackEntity.getServiceInstances().add(serviceEntity); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java index 4ff55042e8e..d7418ced1d2 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/SecurityConfiguration.java @@ -28,6 +28,8 @@ */ public class SecurityConfiguration { + public static final SecurityConfiguration NONE = new SecurityConfiguration(SecurityType.NONE); + /** * Security Type */ diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java index 35add3c203c..51eedd31e23 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/TopologyManager.java @@ -51,7 +51,7 @@ import org.apache.ambari.server.controller.internal.ProvisionClusterRequest; import org.apache.ambari.server.controller.internal.RequestImpl; import org.apache.ambari.server.controller.internal.ScaleClusterRequest; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.controller.spi.NoSuchParentResourceException; import org.apache.ambari.server.controller.spi.RequestStatus; import org.apache.ambari.server.controller.spi.Resource; @@ -71,6 +71,7 @@ import org.apache.ambari.server.security.authorization.AuthorizationHelper; import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.host.HostImpl; import org.apache.ambari.server.state.quicklinksprofile.QuickLinksProfile; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; @@ -81,6 +82,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.inject.Singleton; import com.google.inject.persist.Transactional; @@ -275,7 +277,7 @@ public RequestStatusResponse provisionCluster(final ProvisionClusterRequest requ final ClusterTopology topology = new ClusterTopologyImpl(ambariContext, request); final String clusterName = request.getClusterName(); - final Stack stack = topology.getBlueprint().getStack(); // TODO: implement multi-stack + final StackDefinition stack = topology.getBlueprint().getStack(); final String repoVersion = request.getRepositoryVersion(); final Long repoVersionID = request.getRepositoryVersionId(); @@ -347,7 +349,8 @@ public LogicalRequest call() throws Exception { //todo: this should be invoked as part of a generic lifecycle event which could possibly //todo: be tied to cluster state - ambariContext.persistInstallStateForUI(clusterName, stack.getName(), stack.getVersion()); + StackId stackId = Iterables.getFirst(topology.getBlueprint().getStackIds(), null); // FIXME need for stackId in ClusterRequest will be removed + ambariContext.persistInstallStateForUI(clusterName, stackId); clusterProvisionWithBlueprintCreateRequests.put(clusterId, logicalRequest); return getRequestStatus(logicalRequest.getRequestId()); } diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java index 38a5153ffdf..5c9e0eb6555 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/RequiredPasswordValidator.java @@ -21,6 +21,7 @@ import org.apache.ambari.server.controller.RootComponent; import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.state.PropertyInfo; import org.apache.ambari.server.topology.Blueprint; import org.apache.ambari.server.topology.ClusterTopology; @@ -82,7 +83,7 @@ private Map>> validateRequiredPasswords(C Collection processedServices = new HashSet<>(); Blueprint blueprint = topology.getBlueprint(); - Stack stack = blueprint.getStack(); + StackDefinition stack = blueprint.getStack(); HostGroup hostGroup = blueprint.getHostGroup(hostGroupName); for (String component : hostGroup.getComponentNames()) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/UnitValidator.java b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/UnitValidator.java index e75ffa42737..31d5275d220 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/UnitValidator.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/topology/validators/UnitValidator.java @@ -22,7 +22,7 @@ import java.util.Map; import java.util.Set; -import org.apache.ambari.server.controller.internal.Stack; +import org.apache.ambari.server.controller.internal.StackDefinition; import org.apache.ambari.server.controller.internal.UnitUpdater.PropertyUnit; import org.apache.ambari.server.topology.ClusterTopology; import org.apache.ambari.server.topology.HostGroupInfo; @@ -42,33 +42,33 @@ public UnitValidator(Set propertiesToBeValidated) { @Override public void validate(ClusterTopology topology) throws InvalidTopologyException { - Stack stack = topology.getBlueprint().getStack(); + StackDefinition stack = topology.getBlueprint().getStack(); validateConfig(topology.getConfiguration().getFullProperties(), stack); for (HostGroupInfo hostGroup : topology.getHostGroupInfo().values()) { validateConfig(hostGroup.getConfiguration().getFullProperties(), stack); } } - private void validateConfig(Map> configuration, Stack stack) { + private void validateConfig(Map> configuration, StackDefinition stack) { for (Map.Entry> each : configuration.entrySet()) { validateConfigType(each.getKey(), each.getValue(), stack); } } - private void validateConfigType(String configType, Map config, Stack stack) { + private void validateConfigType(String configType, Map config, StackDefinition stack) { for (String propertyName : config.keySet()) { validateProperty(configType, config, propertyName, stack); } } - private void validateProperty(String configType, Map config, String propertyName, Stack stack) { + private void validateProperty(String configType, Map config, String propertyName, StackDefinition stack) { relevantProps.stream() .filter(each -> each.hasTypeAndName(configType, propertyName)) .findFirst() .ifPresent(relevantProperty -> checkUnit(config, stack, relevantProperty)); } - private void checkUnit(Map configToBeValidated, Stack stack, UnitValidatedProperty prop) { + private void checkUnit(Map configToBeValidated, StackDefinition stack, UnitValidatedProperty prop) { PropertyUnit stackUnit = PropertyUnit.of(stack, prop); PropertyValue value = PropertyValue.of(prop.getPropertyName(), configToBeValidated.get(prop.getPropertyName())); if (value.hasAnyUnit() && !value.hasUnit(stackUnit)) { diff --git a/ambari-server/src/main/java/org/apache/ambari/server/utils/JsonUtils.java b/ambari-server/src/main/java/org/apache/ambari/server/utils/JsonUtils.java index 8d5e307cc7d..8c6df293652 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/utils/JsonUtils.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/utils/JsonUtils.java @@ -17,8 +17,14 @@ */ package org.apache.ambari.server.utils; +import java.io.IOException; +import java.io.UncheckedIOException; + import org.apache.commons.lang.StringUtils; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; import com.google.gson.JsonParser; import com.google.gson.JsonSyntaxException; @@ -27,8 +33,14 @@ */ public class JsonUtils { + /** + * Used to serialize to/from json. + */ public static JsonParser jsonParser = new JsonParser(); + private static final ObjectMapper JSON_SERIALIZER = new ObjectMapper(); + private static final ObjectWriter JSON_WRITER = JSON_SERIALIZER.writer(); + /** * Checks if an input string is in valid JSON format * @param jsonString input json string to validate @@ -46,4 +58,23 @@ public static boolean isValidJson(String jsonString) { return false; } } + + public static T fromJson(String json, TypeReference valueType) { + if (null == json) { + return null; + } + try { + return JSON_SERIALIZER.reader(valueType).readValue(json); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + public static String toJson(Object object) { + try { + return JSON_WRITER.writeValueAsString(object); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } } diff --git a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql index d0692d0bfcd..2e0c0fd2287 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql @@ -594,7 +594,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR(255) NOT NULL, mpack_name VARCHAR(255) NOT NULL, mpack_version VARCHAR(255) NOT NULL, - mpack_uri VARCHAR(255) NOT NULL, + mpack_uri VARCHAR(255), mpack_id BIGINT, CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql index c45da45e3ae..a177a91d41a 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql @@ -612,7 +612,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR(255) NOT NULL, mpack_name VARCHAR(255) NOT NULL, mpack_version VARCHAR(255) NOT NULL, - mpack_uri VARCHAR(255) NOT NULL, + mpack_uri VARCHAR(255), mpack_id BIGINT, CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql index 04f24ad55c0..fe0c66b0b55 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql @@ -592,7 +592,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR2(255) NOT NULL, mpack_name VARCHAR2(255) NOT NULL, mpack_version VARCHAR2(255) NOT NULL, - mpack_uri VARCHAR2(255) NOT NULL, + mpack_uri VARCHAR2(255), mpack_id NUMBER(19), CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql index 5ba57fbb5f1..1848346a3a9 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql @@ -593,7 +593,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR(255) NOT NULL, mpack_name VARCHAR(255) NOT NULL, mpack_version VARCHAR(255) NOT NULL, - mpack_uri VARCHAR(255) NOT NULL, + mpack_uri VARCHAR(255), mpack_id BIGINT, CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql index 790926f02ab..128ff4a4ce4 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql @@ -589,7 +589,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR(255) NOT NULL, mpack_name VARCHAR(255) NOT NULL, mpack_version VARCHAR(255) NOT NULL, - mpack_uri VARCHAR(255) NOT NULL, + mpack_uri VARCHAR(255), mpack_id NUMERIC(19), CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql index eead2a04bfe..6ba6a41c558 100644 --- a/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql +++ b/ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql @@ -598,7 +598,7 @@ CREATE TABLE blueprint_mpack_instance( blueprint_name VARCHAR(255) NOT NULL, mpack_name VARCHAR(255) NOT NULL, mpack_version VARCHAR(255) NOT NULL, - mpack_uri VARCHAR(255) NOT NULL, + mpack_uri VARCHAR(255), mpack_id BIGINT, CONSTRAINT PK_blueprint_mpack_inst PRIMARY KEY (id), CONSTRAINT FK_mpi_blueprint_name FOREIGN KEY (blueprint_name) REFERENCES blueprint(blueprint_name), diff --git a/ambari-server/src/main/resources/key_properties.json b/ambari-server/src/main/resources/key_properties.json index 73eb0397a02..6e2463bdef6 100644 --- a/ambari-server/src/main/resources/key_properties.json +++ b/ambari-server/src/main/resources/key_properties.json @@ -123,9 +123,6 @@ "Cluster": "RequestSchedule/cluster_name", "RequestSchedule": "RequestSchedule/id" }, - "Blueprint": { - "Blueprint": "Blueprints/blueprint_name" - }, "Recommendation": { "Recommendation": "Recommendation/id", "Stack": "Versions/stack_name", diff --git a/ambari-server/src/main/resources/properties.json b/ambari-server/src/main/resources/properties.json index aac2140f2a7..4deb33afbf3 100644 --- a/ambari-server/src/main/resources/properties.json +++ b/ambari-server/src/main/resources/properties.json @@ -369,19 +369,6 @@ "RootServiceHostComponents/component_version", "RootServiceHostComponents/properties" ], - "Blueprint":[ - "Blueprints/blueprint_name", - "Blueprints/stack_name", - "Blueprints/stack_version", - "Blueprints/security", - "host_groups", - "host_groups/components", - "host_groups/cardinality", - "configurations", - "validate_topology", - "settings", - "mpack_instances" - ], "Recommendation":[ "Recommendation/id", "Versions/stack_name", diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py index 20992e25384..e050db57ffb 100644 --- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py +++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py @@ -243,7 +243,7 @@ def is_secure_port(port): user_to_gid_dict = collections.defaultdict(lambda:user_group) user_list = json.loads(config['hostLevelParams']['user_list']) -group_list = json.loads(config['hostLevelParams']['group_list']) +group_list = set(json.loads(config['hostLevelParams']['group_list']) + [user_group]) host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False) tez_am_view_acls = config['configurations']['tez-site']["tez.am.view-acls"] diff --git a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java index cedf3a2da5f..5c83c825c02 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/api/services/stackadvisor/StackAdvisorHelperTest.java @@ -42,6 +42,8 @@ import org.junit.Test; import org.mockito.Mockito; +import com.google.common.collect.ImmutableSet; + /** * StackAdvisorHelper unit tests. */ @@ -112,8 +114,11 @@ public void testRecommend_returnsCommandResult() throws StackAdvisorException, I StackAdvisorCommand command = mock(StackAdvisorCommand.class); RecommendationResponse expected = mock(RecommendationResponse.class); StackAdvisorRequestType requestType = StackAdvisorRequestType.HOST_GROUPS; - StackAdvisorRequest request = StackAdvisorRequestBuilder.forStack("stackName", "stackVersion") - .ofType(requestType).build(); + StackAdvisorRequest request = StackAdvisorRequestBuilder + .forStack("stackName", "stackVersion") + .forServices(ImmutableSet.of("ZOOKEEPER")) + .ofType(requestType) + .build(); when(command.invoke(request, ServiceInfo.ServiceAdvisorType.PYTHON)).thenReturn(expected); doReturn(command).when(helper).createRecommendationCommand("ZOOKEEPER", request); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java index dae6fdf2dac..341e889fdad 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BaseBlueprintProcessorTest.java @@ -1,8 +1,5 @@ package org.apache.ambari.server.controller.internal; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.isA; import static org.junit.Assert.assertEquals; import java.util.Collection; @@ -11,11 +8,8 @@ import java.util.Map; import java.util.Set; -import org.apache.ambari.server.controller.AmbariManagementController; -import org.apache.ambari.server.controller.StackLevelConfigurationRequest; -import org.apache.ambari.server.controller.StackServiceResponse; import org.apache.ambari.server.state.DependencyInfo; -import org.easymock.EasyMockSupport; +import org.apache.ambari.server.state.StackInfo; import org.junit.Test; /** @@ -42,15 +36,7 @@ public class BaseBlueprintProcessorTest { //todo: BaseBluprintProcess no longer exists. @Test public void testStackRegisterConditionalDependencies() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); + StackInfo stackInfo = new StackInfo(); // test dependencies final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT"); @@ -63,10 +49,8 @@ public void testStackRegisterConditionalDependencies() throws Exception { final DependencyInfo oozieClientDependency = new TestDependencyInfo( "OOZIE/OOZIE_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(stackInfo) { @Override public Collection getDependenciesForComponent( String component) { @@ -138,22 +122,10 @@ void registerConditionalDependencies() { "OOZIE", testStack.getDependencyConditionalServiceMap().get( oozieClientDependency)); - - mockSupport.verifyAll(); } @Test public void testStackRegisterConditionalDependenciesNoHCAT() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - // test dependencies final DependencyInfo yarnClientDependency = new TestDependencyInfo( "YARN/YARN_CLIENT"); @@ -164,10 +136,8 @@ public void testStackRegisterConditionalDependenciesNoHCAT() throws Exception { final DependencyInfo oozieClientDependency = new TestDependencyInfo( "OOZIE/OOZIE_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(new StackInfo()) { @Override public Collection getDependenciesForComponent( String component) { @@ -236,23 +206,11 @@ void registerConditionalDependencies() { "OOZIE", testStack.getDependencyConditionalServiceMap().get( oozieClientDependency)); - - mockSupport.verifyAll(); } @Test public void testStackRegisterConditionalDependenciesNoYarnClient() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - // test dependencies final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT"); final DependencyInfo tezClientDependency = new TestDependencyInfo( @@ -262,10 +220,8 @@ public void testStackRegisterConditionalDependenciesNoYarnClient() final DependencyInfo oozieClientDependency = new TestDependencyInfo( "OOZIE/OOZIE_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(new StackInfo()) { @Override public Collection getDependenciesForComponent( String component) { @@ -332,23 +288,11 @@ void registerConditionalDependencies() { "OOZIE", testStack.getDependencyConditionalServiceMap().get( oozieClientDependency)); - - mockSupport.verifyAll(); } @Test public void testStackRegisterConditionalDependenciesNoTezClient() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - // test dependencies final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT"); final DependencyInfo yarnClientDependency = new TestDependencyInfo( @@ -358,10 +302,8 @@ public void testStackRegisterConditionalDependenciesNoTezClient() final DependencyInfo oozieClientDependency = new TestDependencyInfo( "OOZIE/OOZIE_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(new StackInfo()) { @Override public Collection getDependenciesForComponent( String component) { @@ -430,23 +372,11 @@ void registerConditionalDependencies() { "OOZIE", testStack.getDependencyConditionalServiceMap().get( oozieClientDependency)); - - mockSupport.verifyAll(); } @Test public void testStackRegisterConditionalDependenciesNoMapReduceClient() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - // test dependencies final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT"); final DependencyInfo yarnClientDependency = new TestDependencyInfo( @@ -456,10 +386,8 @@ public void testStackRegisterConditionalDependenciesNoMapReduceClient() final DependencyInfo oozieClientDependency = new TestDependencyInfo( "OOZIE/OOZIE_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(new StackInfo()) { @Override public Collection getDependenciesForComponent( String component) { @@ -526,23 +454,11 @@ void registerConditionalDependencies() { "OOZIE", testStack.getDependencyConditionalServiceMap().get( oozieClientDependency)); - - mockSupport.verifyAll(); } @Test public void testStackRegisterConditionalDependenciesNoOozieClient() throws Exception { - EasyMockSupport mockSupport = new EasyMockSupport(); - AmbariManagementController mockMgmtController = mockSupport.createMock(AmbariManagementController.class); - - // setup mock expectations - expect(mockMgmtController.getStackServices(isA(Set.class))).andReturn( - Collections. emptySet()); - - expect(mockMgmtController.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - // test dependencies final DependencyInfo hCatDependency = new TestDependencyInfo("HIVE/HCAT"); final DependencyInfo yarnClientDependency = new TestDependencyInfo( @@ -552,10 +468,8 @@ public void testStackRegisterConditionalDependenciesNoOozieClient() final DependencyInfo mapReduceTwoClientDependency = new TestDependencyInfo( "YARN/MAPREDUCE2_CLIENT"); - mockSupport.replayAll(); - // create stack for testing - Stack testStack = new Stack("HDP", "2.1", mockMgmtController) { + Stack testStack = new Stack(new StackInfo()) { @Override public Collection getDependenciesForComponent( String component) { @@ -622,231 +536,8 @@ void registerConditionalDependencies() { "MAPREDUCE2", testStack.getDependencyConditionalServiceMap().get( mapReduceTwoClientDependency)); - - mockSupport.verifyAll(); } - - //todo: validate method moved -// @Test -// public void testValidationOverrideForSecondaryNameNodeWithHA() throws Exception { -// EasyMockSupport mockSupport = new EasyMockSupport(); -// -// AmbariManagementController mockController = -// mockSupport.createMock(AmbariManagementController.class); -// -// AmbariMetaInfo mockMetaInfo = -// mockSupport.createMock(AmbariMetaInfo.class); -// -// BaseBlueprintProcessor.stackInfo = mockMetaInfo; -// -// ServiceInfo serviceInfo = new ServiceInfo(); -// serviceInfo.setName("HDFS"); -// -// StackServiceResponse stackServiceResponse = -// new StackServiceResponse(serviceInfo); -// -// ComponentInfo componentInfo = new ComponentInfo(); -// componentInfo.setName("SECONDARY_NAMENODE"); -// // simulate the stack requirements that there -// // always be one SECONDARY_NAMENODE per cluster -// componentInfo.setCardinality("1"); -// -// StackServiceComponentResponse stackComponentResponse = -// new StackServiceComponentResponse(componentInfo); -// -// ComponentInfo componentInfoNameNode = new ComponentInfo(); -// componentInfoNameNode.setName("NAMENODE"); -// componentInfo.setCardinality("1-2"); -// StackServiceComponentResponse stackServiceComponentResponseTwo = -// new StackServiceComponentResponse(componentInfoNameNode); -// -// Set responses = -// new HashSet(); -// responses.add(stackComponentResponse); -// responses.add(stackServiceComponentResponseTwo); -// -// expect(mockController.getStackServices(isA(Set.class))).andReturn( -// Collections.singleton(stackServiceResponse)); -// expect(mockController.getStackComponents(isA(Set.class))).andReturn( -// responses); -// expect(mockController.getStackConfigurations(isA(Set.class))).andReturn(Collections.emptySet()); -// expect(mockController.getStackLevelConfigurations(isA(Set.class))).andReturn(Collections.emptySet()); -// -// expect(mockMetaInfo.getComponentDependencies("HDP", "2.0.6", "HDFS", "SECONDARY_NAMENODE")).andReturn(Collections.emptyList()); -// expect(mockMetaInfo.getComponentDependencies("HDP", "2.0.6", "HDFS", "NAMENODE")).andReturn(Collections.emptyList()); -// -// -// mockSupport.replayAll(); -// -// BaseBlueprintProcessor baseBlueprintProcessor = -// new BaseBlueprintProcessor(Collections.emptySet(), Collections.emptyMap(), mockController) { -// @Override -// protected Set getPKPropertyIds() { -// return null; -// } -// -// @Override -// public RequestStatus createResources(Request request) throws SystemException, UnsupportedPropertyException, ResourceAlreadyExistsException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public Set getResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public RequestStatus updateResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public RequestStatus deleteResources(Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// }; -// -// HostGroupComponentEntity hostGroupComponentEntity = -// new HostGroupComponentEntity(); -// // don't include the SECONDARY_NAMENODE in this entity -// hostGroupComponentEntity.setName("NAMENODE"); -// -// HostGroupEntity hostGroupEntity = -// new HostGroupEntity(); -// hostGroupEntity.setName("host-group-one"); -// hostGroupEntity.setComponents(Collections.singleton(hostGroupComponentEntity)); -// hostGroupEntity.setConfigurations(Collections.emptyList()); -// -// // setup config entity to simulate the case of NameNode HA being enabled -// BlueprintConfigEntity configEntity = -// new BlueprintConfigEntity(); -// configEntity.setConfigData("{\"dfs.nameservices\":\"mycluster\",\"key4\":\"value4\"}"); -// configEntity.setType("hdfs-site"); -// -// BlueprintEntity testEntity = -// new BlueprintEntity(); -// testEntity.setBlueprintName("test-blueprint"); -// testEntity.setStackName("HDP"); -// testEntity.setStackVersion("2.0.6"); -// testEntity.setHostGroups(Collections.singleton(hostGroupEntity)); -// testEntity.setConfigurations(Collections.singleton(configEntity)); -// -// baseBlueprintProcessor.validateTopology(testEntity); -// -// mockSupport.verifyAll(); -// } - -// @Test -// public void testValidationOverrideForSecondaryNameNodeWithoutHA() throws Exception { -// EasyMockSupport mockSupport = new EasyMockSupport(); -// -// AmbariManagementController mockController = -// mockSupport.createMock(AmbariManagementController.class); -// -// AmbariMetaInfo mockMetaInfo = -// mockSupport.createMock(AmbariMetaInfo.class); -// -// BaseBlueprintProcessor.stackInfo = mockMetaInfo; -// -// ServiceInfo serviceInfo = new ServiceInfo(); -// serviceInfo.setName("HDFS"); -// -// StackServiceResponse stackServiceResponse = -// new StackServiceResponse(serviceInfo); -// -// ComponentInfo componentInfo = new ComponentInfo(); -// componentInfo.setName("SECONDARY_NAMENODE"); -// // simulate the stack requirements that there -// // always be one SECONDARY_NAMENODE per cluster -// componentInfo.setCardinality("1"); -// -// StackServiceComponentResponse stackComponentResponse = -// new StackServiceComponentResponse(componentInfo); -// -// ComponentInfo componentInfoNameNode = new ComponentInfo(); -// componentInfoNameNode.setName("NAMENODE"); -// componentInfo.setCardinality("1-2"); -// StackServiceComponentResponse stackServiceComponentResponseTwo = -// new StackServiceComponentResponse(componentInfoNameNode); -// -// Set responses = -// new HashSet(); -// responses.add(stackComponentResponse); -// responses.add(stackServiceComponentResponseTwo); -// -// expect(mockController.getStackServices(isA(Set.class))).andReturn( -// Collections.singleton(stackServiceResponse)); -// expect(mockController.getStackComponents(isA(Set.class))).andReturn( -// responses); -// expect(mockController.getStackConfigurations(isA(Set.class))).andReturn(Collections.emptySet()); -// expect(mockController.getStackLevelConfigurations(isA(Set.class))).andReturn(Collections.emptySet()); -// -// expect(mockMetaInfo.getComponentDependencies("HDP", "2.0.6", "HDFS", "SECONDARY_NAMENODE")).andReturn(Collections.emptyList()); -// expect(mockMetaInfo.getComponentDependencies("HDP", "2.0.6", "HDFS", "NAMENODE")).andReturn(Collections.emptyList()); -// -// -// mockSupport.replayAll(); -// -// BaseBlueprintProcessor baseBlueprintProcessor = -// new BaseBlueprintProcessor(Collections.emptySet(), Collections.emptyMap(), mockController) { -// @Override -// protected Set getPKPropertyIds() { -// return null; -// } -// -// @Override -// public RequestStatus createResources(Request request) throws SystemException, UnsupportedPropertyException, ResourceAlreadyExistsException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public Set getResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public RequestStatus updateResources(Request request, Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// -// @Override -// public RequestStatus deleteResources(Predicate predicate) throws SystemException, UnsupportedPropertyException, NoSuchResourceException, NoSuchParentResourceException { -// return null; -// } -// }; -// -// HostGroupComponentEntity hostGroupComponentEntity = -// new HostGroupComponentEntity(); -// // don't include the SECONDARY_NAMENODE in this entity -// hostGroupComponentEntity.setName("NAMENODE"); -// -// HostGroupEntity hostGroupEntity = -// new HostGroupEntity(); -// hostGroupEntity.setName("host-group-one"); -// hostGroupEntity.setComponents(Collections.singleton(hostGroupComponentEntity)); -// hostGroupEntity.setConfigurations(Collections.emptyList()); -// -// -// -// BlueprintEntity testEntity = -// new BlueprintEntity(); -// testEntity.setBlueprintName("test-blueprint"); -// testEntity.setStackName("HDP"); -// testEntity.setStackVersion("2.0.6"); -// testEntity.setHostGroups(Collections.singleton(hostGroupEntity)); -// testEntity.setConfigurations(Collections.emptyList()); -// -// try { -// baseBlueprintProcessor.validateTopology(testEntity); -// fail("IllegalArgumentException should have been thrown"); -// } catch (IllegalArgumentException expectedException) { -// // expected exception -// } -// -// mockSupport.verifyAll(); -// } - /** * Convenience class for easier setup/initialization of dependencies for unit * testing. diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java index d9d674d096b..23283702dfa 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessorTest.java @@ -104,6 +104,7 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport { private final String STACK_NAME = "testStack"; private final String STACK_VERSION = "1"; + private final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); @Rule public EasyMockRule mocks = new EasyMockRule(this); @@ -144,6 +145,7 @@ public class BlueprintConfigurationProcessorTest extends EasyMockSupport { @Before public void init() throws Exception { expect(bp.getStack()).andReturn(stack).anyTimes(); + expect(bp.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(bp.getName()).andReturn("test-bp").anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).atLeastOnce(); @@ -8232,7 +8234,7 @@ private ClusterTopology createClusterTopology(Blueprint blueprint, Configuration } //create host group which is set on topology - allHostGroups.put(hostGroup.name, new HostGroupImpl(hostGroup.name, "test-bp", Collections.singleton(stack), + allHostGroups.put(hostGroup.name, new HostGroupImpl(hostGroup.name, "test-bp", stack, componentList, EMPTY_CONFIG, "1")); hostGroupInfo.put(hostGroup.name, groupInfo); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java index e303731d35d..dece002f9bb 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/BlueprintResourceProviderTest.java @@ -19,6 +19,7 @@ package org.apache.ambari.server.controller.internal; import static org.easymock.EasyMock.anyBoolean; +import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.createStrictMock; @@ -47,6 +48,7 @@ import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.controller.AmbariManagementController; +import org.apache.ambari.server.controller.ResourceProviderFactory; import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategy; import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategyV1; import org.apache.ambari.server.controller.internal.BlueprintResourceProvider.BlueprintConfigPopulationStrategyV2; @@ -73,20 +75,18 @@ import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.topology.Blueprint; import org.apache.ambari.server.topology.BlueprintFactory; +import org.apache.ambari.server.topology.BlueprintValidator; import org.apache.ambari.server.topology.InvalidTopologyException; import org.apache.ambari.server.topology.SecurityConfiguration; import org.apache.ambari.server.topology.SecurityConfigurationFactory; import org.apache.ambari.server.topology.Setting; import org.apache.ambari.server.utils.StageUtils; -import org.easymock.EasyMock; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import com.google.gson.Gson; - /** * BlueprintResourceProvider unit tests. */ @@ -98,18 +98,21 @@ public class BlueprintResourceProviderTest { @Rule public ExpectedException expectedException = ExpectedException.none(); + private static final ResourceProviderFactory resourceProviderFactory = createNiceMock(ResourceProviderFactory.class); private final static BlueprintDAO dao = createStrictMock(BlueprintDAO.class); private final static BlueprintEntity entity = createStrictMock(BlueprintEntity.class); private final static Blueprint blueprint = createMock(Blueprint.class); + private final static BlueprintValidator blueprintValidator = createMock(BlueprintValidator.class); private final static AmbariMetaInfo metaInfo = createMock(AmbariMetaInfo.class); private final static BlueprintFactory blueprintFactory = createMock(BlueprintFactory.class); private final static SecurityConfigurationFactory securityFactory = createMock(SecurityConfigurationFactory.class); private final static BlueprintResourceProvider provider = createProvider(); - private final static Gson gson = new Gson(); @BeforeClass public static void initClass() { - BlueprintResourceProvider.init(blueprintFactory, dao, securityFactory, metaInfo); + AbstractControllerResourceProvider.init(resourceProviderFactory); + expect(resourceProviderFactory.getBlueprintResourceProvider(anyObject())).andReturn(provider).anyTimes(); + replay(resourceProviderFactory); } private Map>> getSettingProperties() { @@ -118,7 +121,7 @@ private Map>> getSettingProperties() { @Before public void resetGlobalMocks() { - reset(dao, metaInfo, blueprintFactory, securityFactory, blueprint, entity); + reset(dao, metaInfo, blueprintFactory, securityFactory, blueprint, blueprintValidator, entity); } @Test @@ -135,8 +138,8 @@ public void testCreateResources() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); expect(securityFactory.createSecurityConfigurationFromRequest(null, true)).andReturn(null).anyTimes(); - blueprint.validateRequiredProperties(); - blueprint.validateTopology(); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -146,7 +149,7 @@ public void testCreateResources() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -165,7 +168,7 @@ public void testCreateResources() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, securityFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, securityFactory, metaInfo, request, managementController); } @Test() @@ -212,7 +215,7 @@ public void testCreateResources_NoValidation() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); + blueprintValidator.validateRequiredProperties(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -222,7 +225,7 @@ public void testCreateResources_NoValidation() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -241,7 +244,7 @@ public void testCreateResources_NoValidation() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request, managementController); } @Test @@ -253,16 +256,16 @@ public void testCreateResources_TopologyValidationFails() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce(); - blueprint.validateTopology(); - expectLastCall().andThrow(new InvalidTopologyException("test")); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); + expectLastCall().andThrow(new InvalidTopologyException("test")).once(); expect(request.getProperties()).andReturn(setProperties); expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties); expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); - replay(dao, entity, metaInfo, blueprintFactory, blueprint, request); + replay(dao, entity, metaInfo, blueprintFactory, blueprint, blueprintValidator, request); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -279,7 +282,7 @@ public void testCreateResources_TopologyValidationFails() throws Exception { // expected } - verify(dao, entity, blueprintFactory, metaInfo, request); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request); } @@ -296,8 +299,8 @@ public void testCreateResources_withConfiguration() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); - blueprint.validateTopology(); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -307,7 +310,7 @@ public void testCreateResources_withConfiguration() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -326,7 +329,7 @@ public void testCreateResources_withConfiguration() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request, managementController); } @Test @@ -346,7 +349,7 @@ public void testCreateResource_BlueprintFactoryThrowsException() throws Exceptio expect(request.getProperties()).andReturn(setProperties); expect(request.getRequestInfoProperties()).andReturn(requestInfoProperties); - replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, request); + replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, blueprintValidator, request); // end expectations try { @@ -355,7 +358,7 @@ public void testCreateResource_BlueprintFactoryThrowsException() throws Exceptio } catch (IllegalArgumentException e) { // expected } - verify(dao, entity, blueprintFactory, metaInfo, request); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request); } @Test @@ -370,11 +373,11 @@ public void testCreateResources_withSecurityConfiguration() throws Exception { SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null); // set expectations - expect(securityFactory.createSecurityConfigurationFromRequest(EasyMock.anyObject(), anyBoolean())).andReturn + expect(securityFactory.createSecurityConfigurationFromRequest(anyObject(), anyBoolean())).andReturn (securityConfiguration).once(); expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), securityConfiguration)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); - blueprint.validateTopology(); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -384,7 +387,7 @@ public void testCreateResources_withSecurityConfiguration() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, securityFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -403,7 +406,7 @@ public void testCreateResources_withSecurityConfiguration() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request, managementController); } @Test @@ -499,8 +502,8 @@ public void testCreateResources_withEmptyConfiguration() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); - blueprint.validateTopology(); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -510,7 +513,7 @@ public void testCreateResources_withEmptyConfiguration() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -529,7 +532,7 @@ public void testCreateResources_withEmptyConfiguration() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request, managementController); } @Test @@ -546,8 +549,8 @@ public void testCreateResources_withSingleConfigurationType() throws Exception { // set expectations expect(blueprintFactory.createBlueprint(setProperties.iterator().next(), null)).andReturn(blueprint).once(); - blueprint.validateRequiredProperties(); - blueprint.validateTopology(); + blueprintValidator.validateRequiredProperties(blueprint); + blueprintValidator.validateTopology(blueprint); expect(blueprint.getSetting()).andReturn(setting).anyTimes(); expect(setting.getProperties()).andReturn(settingProperties).anyTimes(); expect(blueprint.toEntity()).andReturn(entity); @@ -557,7 +560,7 @@ public void testCreateResources_withSingleConfigurationType() throws Exception { expect(dao.findByName(BLUEPRINT_NAME)).andReturn(null); dao.create(entity); - replay(dao, entity, metaInfo, blueprintFactory, blueprint, setting, request, managementController); + replay(dao, entity, metaInfo, blueprintFactory, blueprint, blueprintValidator, setting, request, managementController); // end expectations ResourceProvider provider = AbstractControllerResourceProvider.getResourceProvider( @@ -576,7 +579,7 @@ public void testCreateResources_withSingleConfigurationType() throws Exception { assertEquals(request, lastEvent.getRequest()); assertNull(lastEvent.getPredicate()); - verify(dao, entity, blueprintFactory, metaInfo, request, managementController); + verify(dao, entity, blueprintFactory, blueprint, blueprintValidator, metaInfo, request, managementController); } @Test @@ -795,10 +798,6 @@ private void validateResource(Resource resource, boolean containsConfig) { } - private static BlueprintResourceProvider createProvider() { - return new BlueprintResourceProvider(null); - } - private BlueprintEntity createEntity(Map properties) { BlueprintEntity entity = new BlueprintEntity(); entity.setBlueprintName((String) properties.get(BlueprintResourceProvider.BLUEPRINT_NAME_PROPERTY_ID)); @@ -841,7 +840,7 @@ private BlueprintEntity createEntity(Map properties) { Collection> configProperties = (Collection>) properties.get( BlueprintResourceProvider.CONFIGURATION_PROPERTY_ID); - createProvider().createBlueprintConfigEntities(configProperties, entity); + provider.createBlueprintConfigEntities(configProperties, entity); return entity; } @@ -1208,5 +1207,9 @@ public void testPopulateSettingList() throws Exception { assertTrue(setting3value.get(1).containsKey("recovery_enabled")); assertEquals(setting3value.get(1).get("recovery_enabled"), "false"); } + + private static BlueprintResourceProvider createProvider() { + return new BlueprintResourceProvider(blueprintValidator, blueprintFactory, dao, securityFactory, metaInfo, null); + } } diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CompositeStackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CompositeStackTest.java new file mode 100644 index 00000000000..0561bb461b0 --- /dev/null +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/CompositeStackTest.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.server.controller.internal; + +import static java.util.Collections.emptySet; +import static java.util.stream.Collectors.toSet; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +import org.apache.ambari.server.stack.StackManager; +import org.apache.ambari.server.stack.StackManagerTest; +import org.apache.ambari.server.state.StackId; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CompositeStackTest { + + private static Set elements; + private static StackDefinition composite; + + @BeforeClass + public static void initStack() throws Exception{ + StackManager stackManager = StackManagerTest.createTestStackManager(); + elements = stackManager.getStacksByName().values().stream() + .flatMap(stacks -> stacks.stream().limit(1)) + .filter(Objects::nonNull) + .map(Stack::new) + .collect(toSet()); + composite = StackDefinition.of(elements); + } + + @Test + public void getStackIds() { + assertEquals(elements.size(), composite.getStackIds().size()); + } + + @Test + public void getServices() { + Set services = new HashSet<>(composite.getServices()); + for (Stack stack : elements) { + assertTrue(services.containsAll(stack.getServices())); + } + for (Stack stack : elements) { + services.removeAll(stack.getServices()); + } + assertEquals(emptySet(), services); + } + + @Test + public void getComponents() { + Set components = new HashSet<>(composite.getComponents()); + for (Stack stack : elements) { + assertTrue(components.containsAll(stack.getComponents())); + } + for (Stack stack : elements) { + components.removeAll(stack.getComponents()); + } + assertEquals(emptySet(), components); + } + + @Test + public void getStacksForService() { + Set services = new HashSet<>(composite.getServices()); + for (String service : services) { + Set stackIds = new HashSet<>(); + for (Stack stack : elements) { + if (stack.getServices().contains(service)) { + stackIds.add(stack.getStackId()); + } + } + assertEquals(service, stackIds, composite.getStacksForService(service)); + } + } + + @Test + public void getStacksForComponent() { + Set components = new HashSet<>(composite.getComponents()); + for (String component : components) { + Set stackIds = new HashSet<>(); + for (Stack stack : elements) { + if (stack.getComponents().contains(component)) { + stackIds.add(stack.getStackId()); + } + } + assertEquals(component, stackIds, composite.getStacksForComponent(component)); + } + } + + // TODO add more tests after StackDefinition interface is finalized + +} diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequestTest.java index e70901aaf22..65d89c11d2f 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequestTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ExportBlueprintRequestTest.java @@ -19,7 +19,6 @@ package org.apache.ambari.server.controller.internal; -import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.expect; @@ -39,14 +38,13 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; +import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.api.util.TreeNode; import org.apache.ambari.server.api.util.TreeNodeImpl; import org.apache.ambari.server.controller.AmbariManagementController; -import org.apache.ambari.server.controller.StackLevelConfigurationRequest; -import org.apache.ambari.server.controller.StackServiceRequest; import org.apache.ambari.server.controller.spi.Resource; +import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.topology.Blueprint; import org.apache.ambari.server.topology.HostGroup; import org.apache.ambari.server.topology.HostGroupInfo; @@ -76,11 +74,14 @@ public void setupTest() throws Exception { f.setAccessible(true); f.set(null, controller); - expect(controller.getStackServices((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - expect(controller.getStackLevelConfigurations((Set) anyObject())).andReturn( - Collections.emptySet()).anyTimes(); - replay(controller); + AmbariMetaInfo metainfo = createNiceMock(AmbariMetaInfo.class); + expect(controller.getAmbariMetaInfo()).andReturn(metainfo).anyTimes(); + StackInfo stackInfo = createNiceMock(StackInfo.class); + expect(metainfo.getStack("TEST", "1.0")).andReturn(stackInfo); + expect(stackInfo.getServices()).andReturn(Collections.emptySet()).anyTimes(); + expect(stackInfo.getProperties()).andReturn(Collections.emptyList()).anyTimes(); + + replay(controller, metainfo, stackInfo); // This can save precious time mockStatic(InetAddress.class); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackTest.java b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackTest.java index 9e61a7082d1..e6794b80b21 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/StackTest.java @@ -18,119 +18,226 @@ package org.apache.ambari.server.controller.internal; -import static org.easymock.EasyMock.capture; +import static java.util.Collections.emptySet; +import static java.util.Collections.singleton; +import static java.util.stream.Collectors.toSet; import static org.easymock.EasyMock.expect; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.powermock.api.easymock.PowerMock.createNiceMock; -import static org.powermock.api.easymock.PowerMock.replay; -import static org.powermock.api.easymock.PowerMock.verifyAll; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import java.util.stream.Stream; -import org.apache.ambari.server.api.services.AmbariMetaInfo; -import org.apache.ambari.server.controller.AmbariManagementController; -import org.apache.ambari.server.controller.StackConfigurationRequest; -//import org.apache.ambari.server.controller.StackConfigurationResponse; -import org.apache.ambari.server.controller.StackLevelConfigurationRequest; import org.apache.ambari.server.controller.StackLevelConfigurationResponse; -import org.apache.ambari.server.controller.StackServiceComponentRequest; -import org.apache.ambari.server.controller.StackServiceComponentResponse; -import org.apache.ambari.server.controller.StackServiceRequest; -import org.apache.ambari.server.controller.StackServiceResponse; +import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.PropertyDependencyInfo; import org.apache.ambari.server.state.PropertyInfo; +import org.apache.ambari.server.state.ServiceInfo; +import org.apache.ambari.server.state.StackId; +import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.state.ValueAttributesInfo; import org.apache.ambari.server.topology.Configuration; -import org.easymock.Capture; -import org.easymock.EasyMock; +import org.apache.commons.lang3.tuple.Pair; import org.easymock.EasyMockSupport; +import org.junit.Before; import org.junit.Test; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; /** * Stack unit tests. */ public class StackTest { + private static final StackId STACK_ID = new StackId("stack name", "1.0"); + private static final String STACK_CONFIG_TYPE = "cluster-env"; + private static final String STACK_CONFIG_FILE = STACK_CONFIG_TYPE + ".xml"; + private static final String SERVICE_CONFIG_TYPE = "test-site"; + private static final String SERVICE_CONFIG_FILE = SERVICE_CONFIG_TYPE + ".xml"; + + private StackInfo stackInfo; + private ServiceInfo serviceInfo; + private ComponentInfo componentInfo; + private PropertyInfo serviceLevelProperty; + private PropertyInfo stackLevelProperty; + private PropertyInfo optionalServiceLevelProperty; + private PropertyInfo passwordProperty; + + @Before + public void setUp() { + stackInfo = new StackInfo(); + stackInfo.setName(STACK_ID.getStackName()); + stackInfo.setVersion(STACK_ID.getStackVersion()); + + serviceInfo = new ServiceInfo(); + serviceInfo.setName("some service"); + stackInfo.getServices().add(serviceInfo); + + componentInfo = new ComponentInfo(); + componentInfo.setName("some component"); + serviceInfo.getComponents().add(componentInfo); + + serviceLevelProperty = new PropertyInfo(); + serviceLevelProperty.setName("service_level"); + serviceLevelProperty.setValue("service-level value"); + serviceLevelProperty.setFilename(SERVICE_CONFIG_FILE); + serviceLevelProperty.setRequireInput(true); + serviceLevelProperty.getPropertyTypes().add(PropertyInfo.PropertyType.TEXT); + serviceInfo.getProperties().add(serviceLevelProperty); + + passwordProperty = new PropertyInfo(); + passwordProperty.setName("a_password"); + passwordProperty.setValue("secret"); + passwordProperty.setFilename(SERVICE_CONFIG_FILE); + passwordProperty.setRequireInput(true); + passwordProperty.getPropertyTypes().add(PropertyInfo.PropertyType.PASSWORD); + serviceInfo.getProperties().add(passwordProperty); + + optionalServiceLevelProperty = new PropertyInfo(); + optionalServiceLevelProperty.setName("optional_service_level"); + optionalServiceLevelProperty.setValue("service-level value (optional)"); + optionalServiceLevelProperty.setFilename(SERVICE_CONFIG_FILE); + optionalServiceLevelProperty.setRequireInput(false); + optionalServiceLevelProperty.getPropertyTypes().add(PropertyInfo.PropertyType.USER); + serviceInfo.getProperties().add(optionalServiceLevelProperty); + + stackLevelProperty = new PropertyInfo(); + stackLevelProperty.setName("stack_level"); + stackLevelProperty.setValue("stack-level value"); + stackLevelProperty.setFilename(STACK_CONFIG_FILE); + stackLevelProperty.setRequireInput(true); + stackLevelProperty.getPropertyTypes().add(PropertyInfo.PropertyType.TEXT); + stackInfo.getProperties().add(stackLevelProperty); + } + + @Test + public void stackHasCorrectNameAndVersion() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); + + // THEN + assertEquals(stackInfo.getName(), stack.getName()); + assertEquals(stackInfo.getVersion(), stack.getVersion()); + } + @Test - public void testTestXmlExtensionStrippedOff() throws Exception { - AmbariManagementController controller = createNiceMock(AmbariManagementController.class); - AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class); - Capture> stackServiceRequestCapture = EasyMock.newCapture(); - StackServiceResponse stackServiceResponse = createNiceMock(StackServiceResponse.class); - Capture> stackComponentRequestCapture = EasyMock.newCapture(); - StackServiceComponentResponse stackComponentResponse = createNiceMock(StackServiceComponentResponse.class); - Capture> stackConfigurationRequestCapture = EasyMock.newCapture(); - Capture> stackLevelConfigurationRequestCapture = EasyMock.newCapture(); - StackLevelConfigurationResponse stackConfigurationResponse = EasyMock.createNiceMock(StackLevelConfigurationResponse.class); + public void getServices() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); - expect(controller.getStackServices(capture(stackServiceRequestCapture))). - andReturn(Collections.singleton(stackServiceResponse)).anyTimes(); + // WHEN + Collection services = stack.getServices(); - expect(controller.getAmbariMetaInfo()).andReturn(metaInfo).anyTimes(); + // THEN + assertEquals(ImmutableSet.of(serviceInfo.getName()), ImmutableSet.copyOf(services)); + } - expect(stackServiceResponse.getServiceName()).andReturn("service1").anyTimes(); - expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.emptySet()); - expect(stackServiceResponse.getConfigTypes()).andReturn(Collections.emptyMap()); + @Test + public void getServicesForOwnStackId() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); - expect(controller.getStackComponents(capture(stackComponentRequestCapture))). - andReturn(Collections.singleton(stackComponentResponse)).anyTimes(); + // WHEN + Collection services = stack.getServices(stack.getStackId()); + + // THEN + assertEquals(ImmutableSet.of(serviceInfo.getName()), ImmutableSet.copyOf(services)); + } - expect(stackComponentResponse.getComponentName()).andReturn("component1").anyTimes(); - expect(stackComponentResponse.getComponentCategory()).andReturn("test-site.xml").anyTimes(); + @Test + public void getServicesForOtherStackId() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); - expect(controller.getStackConfigurations(capture(stackConfigurationRequestCapture))). - andReturn(Collections.singleton(stackConfigurationResponse)).anyTimes(); + // WHEN + Collection services = stack.getServices(new StackId("other stack", "1.0")); - // no stack level configs for this test - expect(controller.getStackLevelConfigurations(capture(stackLevelConfigurationRequestCapture))). - andReturn(Collections.emptySet()).anyTimes(); + // THEN + assertEquals(ImmutableSet.of(), ImmutableSet.copyOf(services)); + } - expect(stackConfigurationResponse.getPropertyName()).andReturn("prop1").anyTimes(); - expect(stackConfigurationResponse.getPropertyValue()).andReturn("prop1Val").anyTimes(); - expect(stackConfigurationResponse.getType()).andReturn("test-site.xml").anyTimes(); - expect(stackConfigurationResponse.getPropertyType()).andReturn( - Collections.emptySet()).anyTimes(); - expect(stackConfigurationResponse.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse.isRequired()).andReturn(true).anyTimes(); + @Test + public void getStacksForServicesInStack() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); + + for (String service : stack.getServices()) { + // WHEN + Set stacksForService = stack.getStacksForService(service); + // THEN + assertEquals("Stack for service " + service, ImmutableSet.of(STACK_ID), stacksForService); + } + } - expect(metaInfo.getComponentDependencies("test", "1.0", "service1", "component1")). - andReturn(Collections.emptyList()).anyTimes(); + @Test + public void getStacksForUnknownService() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); + // WHEN + Set stacksForService = stack.getStacksForService("unknown service"); - replay(controller, stackServiceResponse, stackComponentResponse, stackConfigurationResponse, metaInfo); + // THEN + assertEquals(ImmutableSet.of(), stacksForService); + } + @Test + public void configTypeOmitsFileExtension() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); - Stack stack = new Stack("test", "1.0", controller); - Configuration configuration = stack.getConfiguration(Collections.singleton("service1")); - assertEquals("prop1Val", configuration.getProperties().get("test-site").get("prop1")); + // WHEN + Configuration configuration = stack.getConfiguration(singleton(serviceInfo.getName())); - assertEquals("test-site", stack.getRequiredConfigurationProperties("service1").iterator().next().getType()); + // THEN + assertEquals(serviceLevelProperty.getValue(), configuration.getProperties().get(SERVICE_CONFIG_TYPE).get(serviceLevelProperty.getName())); + } - // assertions - StackServiceRequest stackServiceRequest = stackServiceRequestCapture.getValue().iterator().next(); - assertEquals("test", stackServiceRequest.getStackName()); - assertEquals("1.0", stackServiceRequest.getStackVersion()); + @Test + public void getRequiredPropertiesForService() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); + + // WHEN + Collection requiredConfigurationProperties = stack.getRequiredConfigurationProperties(serviceInfo.getName()); + + // THEN + // should include stack-level property + // should exclude optional property + Set> actualRequiredProperties = convertToPropertySet(requiredConfigurationProperties); + Set> expected = ImmutableSet.of( + Pair.of(STACK_CONFIG_TYPE, stackLevelProperty.getName()), + Pair.of(SERVICE_CONFIG_TYPE, passwordProperty.getName()), + Pair.of(SERVICE_CONFIG_TYPE, serviceLevelProperty.getName()) + ); + assertEquals(expected, actualRequiredProperties); + assertEquals(expected.size(), requiredConfigurationProperties.size()); + } - StackServiceComponentRequest stackComponentRequest = stackComponentRequestCapture.getValue().iterator().next(); - assertEquals("service1", stackComponentRequest.getServiceName()); - assertEquals("test", stackComponentRequest.getStackName()); - assertEquals("1.0", stackComponentRequest.getStackVersion()); - assertNull(stackComponentRequest.getComponentName()); + @Test + public void getRequiredPropertiesForServiceAndType() throws Exception { + // GIVEN + Stack stack = new Stack(stackInfo); + + // WHEN + Collection requiredConfigurationProperties = stack.getRequiredConfigurationProperties(serviceInfo.getName(), PropertyInfo.PropertyType.TEXT); + + // THEN + Set> actualRequiredProperties = convertToPropertySet(requiredConfigurationProperties); + Set> expected = ImmutableSet.of( + Pair.of(STACK_CONFIG_TYPE, stackLevelProperty.getName()), + Pair.of(SERVICE_CONFIG_TYPE, serviceLevelProperty.getName()) + ); + assertEquals(expected, actualRequiredProperties); + assertEquals(expected.size(), requiredConfigurationProperties.size()); } @Test public void testConfigPropertyReadsInDependencies() throws Exception { + // FIXME get rid of mock EasyMockSupport mockSupport = new EasyMockSupport(); Set setOfDependencyInfo = new HashSet<>(); @@ -158,221 +265,106 @@ public void testConfigPropertyReadsInDependencies() throws Exception { } @Test - public void testGetRequiredProperties_serviceAndPropertyType() throws Exception { - AmbariManagementController controller = createNiceMock(AmbariManagementController.class); - AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class); - Capture> stackServiceRequestCapture = EasyMock.newCapture(); - StackServiceResponse stackServiceResponse = createNiceMock(StackServiceResponse.class); - Capture> stackComponentRequestCapture = EasyMock.newCapture(); - StackServiceComponentResponse stackComponentResponse = createNiceMock(StackServiceComponentResponse.class); - Capture> stackConfigurationRequestCapture = EasyMock.newCapture(); - Capture> stackLevelConfigurationRequestCapture = EasyMock.newCapture(); - StackLevelConfigurationResponse stackConfigurationResponse = EasyMock.createNiceMock(StackLevelConfigurationResponse.class); - StackLevelConfigurationResponse stackConfigurationResponse2 = EasyMock.createNiceMock(StackLevelConfigurationResponse.class); - - expect(controller.getStackServices(capture(stackServiceRequestCapture))). - andReturn(Collections.singleton(stackServiceResponse)).anyTimes(); - - expect(controller.getAmbariMetaInfo()).andReturn(metaInfo).anyTimes(); - - expect(stackServiceResponse.getServiceName()).andReturn("service1").anyTimes(); - expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.emptySet()); - expect(stackServiceResponse.getConfigTypes()).andReturn(Collections.emptyMap()); - - expect(controller.getStackComponents(capture(stackComponentRequestCapture))). - andReturn(Collections.singleton(stackComponentResponse)).anyTimes(); - - expect(stackComponentResponse.getComponentName()).andReturn("component1").anyTimes(); - expect(stackComponentResponse.getComponentCategory()).andReturn("test-site.xml").anyTimes(); - - expect(controller.getStackConfigurations(capture(stackConfigurationRequestCapture))). - andReturn(new HashSet<>(Arrays.asList( - stackConfigurationResponse, stackConfigurationResponse2))).anyTimes(); - - // no stack level configs for this test - expect(controller.getStackLevelConfigurations(capture(stackLevelConfigurationRequestCapture))). - andReturn(Collections.emptySet()).anyTimes(); - - expect(stackConfigurationResponse.getPropertyName()).andReturn("prop1").anyTimes(); - expect(stackConfigurationResponse.getPropertyValue()).andReturn(null).anyTimes(); - expect(stackConfigurationResponse.getType()).andReturn("test-site.xml").anyTimes(); - expect(stackConfigurationResponse.getPropertyType()).andReturn( - Collections.singleton(PropertyInfo.PropertyType.PASSWORD)).anyTimes(); - expect(stackConfigurationResponse.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse.isRequired()).andReturn(true).anyTimes(); - - // not a PASSWORD property type so shouldn't be returned - expect(stackConfigurationResponse2.getPropertyName()).andReturn("prop2").anyTimes(); - expect(stackConfigurationResponse2.getPropertyValue()).andReturn(null).anyTimes(); - expect(stackConfigurationResponse2.getType()).andReturn("test-site.xml").anyTimes(); - expect(stackConfigurationResponse2.getPropertyType()).andReturn( - Collections.singleton(PropertyInfo.PropertyType.USER)).anyTimes(); - expect(stackConfigurationResponse2.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse2.isRequired()).andReturn(true).anyTimes(); - - expect(metaInfo.getComponentDependencies("test", "1.0", "service1", "component1")). - andReturn(Collections.emptyList()).anyTimes(); - - replay(controller, stackServiceResponse, stackComponentResponse, stackConfigurationResponse, - stackConfigurationResponse2, metaInfo); - - // test - Stack stack = new Stack("test", "1.0", controller); - // get required password properties - Collection requiredPasswordProperties = stack.getRequiredConfigurationProperties( - "service1", PropertyInfo.PropertyType.PASSWORD); - - // assertions - assertEquals(1, requiredPasswordProperties.size()); - Stack.ConfigProperty requiredPasswordConfigProperty = requiredPasswordProperties.iterator().next(); - assertEquals("test-site", requiredPasswordConfigProperty.getType()); - assertEquals("prop1", requiredPasswordConfigProperty.getName()); - assertTrue(requiredPasswordConfigProperty.getPropertyTypes().contains(PropertyInfo.PropertyType.PASSWORD)); - - StackServiceRequest stackServiceRequest = stackServiceRequestCapture.getValue().iterator().next(); - assertEquals("test", stackServiceRequest.getStackName()); - assertEquals("1.0", stackServiceRequest.getStackVersion()); - - StackServiceComponentRequest stackComponentRequest = stackComponentRequestCapture.getValue().iterator().next(); - assertEquals("service1", stackComponentRequest.getServiceName()); - assertEquals("test", stackComponentRequest.getStackName()); - assertEquals("1.0", stackComponentRequest.getStackVersion()); - assertNull(stackComponentRequest.getComponentName()); - } - - // Test that getAllConfigurationTypes returns beside the configuration types that have - // service config properties defined also the empty ones that doesn't have any config - // property defined. - @Test - public void testGetAllConfigurationTypesWithEmptyStackServiceConfigType() throws Exception { - // Given - AmbariManagementController controller = createNiceMock(AmbariManagementController.class); - AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class); - StackServiceResponse stackServiceResponse = createNiceMock(StackServiceResponse.class); - StackServiceComponentResponse stackComponentResponse = createNiceMock(StackServiceComponentResponse.class); - StackLevelConfigurationResponse stackConfigurationResponse1 = createNiceMock(StackLevelConfigurationResponse.class); - StackLevelConfigurationResponse stackConfigurationResponse2 = createNiceMock(StackLevelConfigurationResponse.class); + public void getAllConfigurationTypesReturnsExcludedOnesToo() throws Exception { + // GIVEN + serviceInfo.setExcludedConfigTypes(ImmutableSet.of(SERVICE_CONFIG_TYPE)); + Stack stack = new Stack(stackInfo); - String testServiceName = "service1"; - String testEmptyConfigType = "test-empty-config-type"; - String testSiteConfigFile = "test-site.xml"; - String testSiteConfigType = "test-site"; + // WHEN + Collection allConfigurationTypes = stack.getAllConfigurationTypes(serviceInfo.getName()); + // THEN + Set expected = ImmutableSet.of(SERVICE_CONFIG_TYPE, STACK_CONFIG_TYPE); + assertEquals(expected, allConfigurationTypes); + } - expect(controller.getAmbariMetaInfo()).andReturn(metaInfo).anyTimes(); - - expect(controller.getStackServices(EasyMock.anyObject())).andReturn(Collections.singleton(stackServiceResponse)).anyTimes(); - expect(stackServiceResponse.getServiceName()).andReturn(testServiceName).anyTimes(); - expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.emptySet()); - - // stack components - expect(stackComponentResponse.getComponentName()).andReturn("component1").anyTimes(); - expect(stackComponentResponse.getComponentCategory()).andReturn(testSiteConfigFile).anyTimes(); - expect(controller.getStackComponents(EasyMock.anyObject())).andReturn(Collections.singleton(stackComponentResponse)).anyTimes(); - - // stack configurations + @Test + public void getConfigurationTypesOmitsExcludedOnes() throws Exception { + // GIVEN + serviceInfo.setExcludedConfigTypes(ImmutableSet.of(SERVICE_CONFIG_TYPE)); + Stack stack = new Stack(stackInfo); - // two properties with config type 'test-site' - expect(stackConfigurationResponse1.getPropertyName()).andReturn("prop1").anyTimes(); - expect(stackConfigurationResponse1.getPropertyValue()).andReturn(null).anyTimes(); - expect(stackConfigurationResponse1.getType()).andReturn(testSiteConfigFile).anyTimes(); - expect(stackConfigurationResponse1.getPropertyType()).andReturn(Collections.singleton(PropertyInfo.PropertyType.TEXT)).anyTimes(); - expect(stackConfigurationResponse1.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse1.isRequired()).andReturn(true).anyTimes(); + // WHEN + Collection allConfigurationTypes = stack.getConfigurationTypes(serviceInfo.getName()); - expect(stackConfigurationResponse2.getPropertyName()).andReturn("prop2").anyTimes(); - expect(stackConfigurationResponse2.getPropertyValue()).andReturn(null).anyTimes(); - expect(stackConfigurationResponse2.getType()).andReturn(testSiteConfigFile).anyTimes(); - expect(stackConfigurationResponse2.getPropertyType()).andReturn(Collections.singleton(PropertyInfo.PropertyType.USER)).anyTimes(); - expect(stackConfigurationResponse2.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse2.isRequired()).andReturn(true).anyTimes(); + // THEN + Set expected = ImmutableSet.of(STACK_CONFIG_TYPE); + assertEquals(expected, allConfigurationTypes); + } - expect(controller.getStackConfigurations(EasyMock.anyObject())).andReturn(Sets.newHashSet(stackConfigurationResponse1, stackConfigurationResponse2)).anyTimes(); + @Test + public void findsServiceForValidConfigType() { + // GIVEN + Stack stack = new Stack(stackInfo); - // empty stack service config type - expect(stackServiceResponse.getConfigTypes()).andReturn(Collections.singletonMap(testEmptyConfigType, Collections.emptyMap())); + // WHEN + String service = stack.getServiceForConfigType(SERVICE_CONFIG_TYPE); - // no stack level configs for this test - expect(controller.getStackLevelConfigurations(EasyMock.anyObject())).andReturn(Collections.emptySet()).anyTimes(); + // THEN + assertEquals(serviceInfo.getName(), service); + } - expect(metaInfo.getComponentDependencies("test", "1.0", "service1", "component1")).andReturn(Collections.emptyList()).anyTimes(); + @Test(expected = IllegalArgumentException.class) // THEN + public void serviceIsNotFoundForExcludedConfigType() { + // GIVEN + serviceInfo.setExcludedConfigTypes(ImmutableSet.of(SERVICE_CONFIG_TYPE)); + Stack stack = new Stack(stackInfo); - replay(controller, stackServiceResponse, stackComponentResponse, stackConfigurationResponse1, stackConfigurationResponse2, metaInfo); + // WHEN + stack.getServiceForConfigType(SERVICE_CONFIG_TYPE); + } + @Test(expected = IllegalArgumentException.class) // THEN + public void serviceIsNotFoundForUnknownConfigType() { + // GIVEN + Stack stack = new Stack(stackInfo); - Stack stack = new Stack("test", "1.0", controller); + // WHEN + stack.getServiceForConfigType("no_such_config_type"); + } - // When - Collection allServiceConfigTypes = stack.getAllConfigurationTypes(testServiceName); + @Test + public void findsAllServicesForValidConfigType() { + // GIVEN + ServiceInfo otherMatchingService = new ServiceInfo(); + otherMatchingService.setName("matches"); + Stack stack = new Stack(stackInfo); + + // WHEN + Stream services = stack.getServicesForConfigType(SERVICE_CONFIG_TYPE); + + // THEN + Set expected = ImmutableSet.of(serviceInfo.getName()); + assertEquals(expected, services.collect(toSet())); + } - // Then + @Test + public void noServiceFoundForExcludedConfigType() { + // GIVEN + serviceInfo.setExcludedConfigTypes(ImmutableSet.of(SERVICE_CONFIG_TYPE)); + Stack stack = new Stack(stackInfo); - assertTrue(allServiceConfigTypes.containsAll(ImmutableSet.of(testSiteConfigType, testEmptyConfigType))); - assertEquals(2, allServiceConfigTypes.size()); + // WHEN + Stream services = stack.getServicesForConfigType(SERVICE_CONFIG_TYPE); - verifyAll(); + // THEN + assertEquals(emptySet(), services.collect(toSet())); } - // Test that getServiceForConfigType skips excluded config types. @Test - public void testGetServiceForConfigTypeWithExcludedConfigs() throws Exception { - // Given - AmbariManagementController controller = createNiceMock(AmbariManagementController.class); - AmbariMetaInfo metaInfo = createNiceMock(AmbariMetaInfo.class); - StackServiceResponse stackServiceResponse = createNiceMock(StackServiceResponse.class); - StackServiceComponentResponse stackComponentResponse = createNiceMock(StackServiceComponentResponse.class); - StackLevelConfigurationResponse stackConfigurationResponse1 = createNiceMock(StackLevelConfigurationResponse.class); - - String testServiceName = "service1"; - String testEmptyConfigType = "test-empty-config-type"; - String testSiteConfigFile = "test-site.xml"; - String testSiteConfigType = "test-site"; - - expect(controller.getAmbariMetaInfo()).andReturn(metaInfo).anyTimes(); - - expect(controller.getStackServices(EasyMock.anyObject())).andReturn(Collections.singleton(stackServiceResponse)).anyTimes(); - expect(stackServiceResponse.getServiceName()).andReturn(testServiceName).anyTimes(); - - // Config type test-site is excluded for the service service1 - expect(stackServiceResponse.getExcludedConfigTypes()).andReturn(Collections.singleton(testSiteConfigType)); - - // stack components - expect(stackComponentResponse.getComponentName()).andReturn("component1").anyTimes(); - expect(stackComponentResponse.getComponentCategory()).andReturn(testSiteConfigFile).anyTimes(); - expect(controller.getStackComponents(EasyMock.anyObject())).andReturn(Collections.singleton(stackComponentResponse)).anyTimes(); - - expect(stackConfigurationResponse1.getPropertyName()).andReturn("prop1").anyTimes(); - expect(stackConfigurationResponse1.getPropertyValue()).andReturn(null).anyTimes(); - expect(stackConfigurationResponse1.getType()).andReturn(testSiteConfigFile).anyTimes(); - expect(stackConfigurationResponse1.getPropertyType()).andReturn(Collections.singleton(PropertyInfo.PropertyType.TEXT)).anyTimes(); - expect(stackConfigurationResponse1.getPropertyAttributes()).andReturn(Collections.emptyMap()).anyTimes(); - expect(stackConfigurationResponse1.isRequired()).andReturn(true).anyTimes(); - - expect(controller.getStackConfigurations(EasyMock.anyObject())).andReturn(Collections.singleton(stackConfigurationResponse1)).anyTimes(); - - // empty stack service config type - expect(stackServiceResponse.getConfigTypes()).andReturn(Collections.singletonMap(testEmptyConfigType, Collections.emptyMap())); - - // no stack level configs for this test - expect(controller.getStackLevelConfigurations(EasyMock.anyObject())).andReturn(Collections.emptySet()).anyTimes(); - expect(metaInfo.getComponentDependencies("test", "1.0", "service1", "component1")).andReturn(Collections.emptyList()).anyTimes(); - - replay(controller, stackServiceResponse, stackComponentResponse, stackConfigurationResponse1, metaInfo); - - Stack stack = new Stack("test", "1.0", controller); - - // When - try { - stack.getServiceForConfigType(testSiteConfigType); - fail("Exception not thrown"); - } catch (IllegalArgumentException e) { - // Expected - } + public void noServiceFoundForUnknownConfigType() { + // GIVEN + Stack stack = new Stack(stackInfo); - // Not excluded config type - assertEquals(testServiceName, stack.getServiceForConfigType(testEmptyConfigType)); + // WHEN + Stream services = stack.getServicesForConfigType("no_such_config_type"); - verifyAll(); + // THEN + assertEquals(emptySet(), services.collect(toSet())); } + private static Set> convertToPropertySet(Collection requiredConfigurationProperties) { + return requiredConfigurationProperties + .stream().map(p -> Pair.of(p.getType(), p.getName())).collect(toSet()); + } } diff --git a/ambari-server/src/test/java/org/apache/ambari/server/metadata/RoleCommandOrderTest.java b/ambari-server/src/test/java/org/apache/ambari/server/metadata/RoleCommandOrderTest.java index 5b647e762dc..a5404641920 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/metadata/RoleCommandOrderTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/metadata/RoleCommandOrderTest.java @@ -97,6 +97,7 @@ public void testInitializeAtGLUSTERFSCluster() throws AmbariException { ClusterImpl cluster = createMock(ClusterImpl.class); Service service = createMock(Service.class); expect(service.getDesiredStackId()).andReturn(stackId); + expect(service.getServiceComponents()).andReturn(Collections.emptyMap()); expect(cluster.getClusterId()).andReturn(1L); expect(cluster.getService("GLUSTERFS")).andReturn(service); expect(cluster.getService("HDFS")).andReturn(null); @@ -149,6 +150,7 @@ public void testInitializeAtHDFSCluster() throws AmbariException { expect(cluster.getService("HDFS")).andReturn(hdfsService).atLeastOnce(); expect(cluster.getService("YARN")).andReturn(null).atLeastOnce(); + expect(hdfsService.getServiceComponents()).andReturn(Collections.emptyMap()); expect(hdfsService.getServiceComponent("JOURNALNODE")).andReturn(null); expect(hdfsService.getDesiredStackId()).andReturn(new StackId("HDP", "2.0.6")); // expect(cluster.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.0.6")); @@ -196,6 +198,7 @@ public void testInitializeAtHaHDFSCluster() throws AmbariException { expect(cluster.getService("HDFS")).andReturn(hdfsService).atLeastOnce(); expect(cluster.getService("YARN")).andReturn(null); + expect(hdfsService.getServiceComponents()).andReturn(Collections.emptyMap()); expect(hdfsService.getServiceComponent("JOURNALNODE")).andReturn(journalnodeSC); expect(hdfsService.getDesiredStackId()).andReturn(new StackId("HDP", "2.0.6")); // expect(cluster.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.0.6")); @@ -245,6 +248,7 @@ public void testInitializeAtHaRMCluster() throws AmbariException { expect(cluster.getService("YARN")).andReturn(yarnService).atLeastOnce(); expect(cluster.getService("HDFS")).andReturn(null); + expect(yarnService.getServiceComponents()).andReturn(Collections.emptyMap()); expect(yarnService.getServiceComponent("RESOURCEMANAGER")).andReturn(resourcemanagerSC).anyTimes(); expect(resourcemanagerSC.getServiceComponentHosts()).andReturn(hostComponents).anyTimes(); // expect(cluster.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.0.6")); @@ -301,6 +305,7 @@ public void testMissingRestartDependenciesAdded() throws Exception { expect(cluster.getService("YARN")).andReturn(yarnService).atLeastOnce(); expect(cluster.getService("HDFS")).andReturn(null); + expect(yarnService.getServiceComponents()).andReturn(Collections.emptyMap()); expect(yarnService.getServiceComponent("RESOURCEMANAGER")).andReturn(resourcemanagerSC).anyTimes(); expect(yarnService.getDesiredStackId()).andReturn(new StackId("HDP", "2.0.6")).anyTimes(); expect(resourcemanagerSC.getServiceComponentHosts()).andReturn(hostComponents).anyTimes(); @@ -398,6 +403,7 @@ public void testInitializeDefault() throws IOException { expect(cluster.getService("HDFS")).andReturn(hdfsService).atLeastOnce(); expect(cluster.getService("YARN")).andReturn(null); + expect(hdfsService.getServiceComponents()).andReturn(Collections.emptyMap()); expect(hdfsService.getServiceComponent("JOURNALNODE")).andReturn(null); //There is no rco file in this stack, should use default // expect(cluster.getCurrentStackVersion()).andReturn(new StackId("HDP", "2.0.5")); @@ -490,6 +496,7 @@ public void testOverride() throws Exception { expect(cluster.getService("HDFS")).andReturn(hdfsService).atLeastOnce(); expect(cluster.getService("YARN")).andReturn(null).atLeastOnce(); + expect(hdfsService.getServiceComponents()).andReturn(Collections.emptyMap()).anyTimes(); expect(hdfsService.getServiceComponent("JOURNALNODE")).andReturn(null); expect(hdfsService.getDesiredStackId()).andReturn(new StackId("HDP", "2.2.0")).anyTimes(); expect(cluster.getServices()).andReturn(ImmutableMap.builder() diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java index 1bd79e190fa..b56835424eb 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/AmbariContextTest.java @@ -43,6 +43,7 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.ClusterRequest; @@ -99,11 +100,11 @@ public class AmbariContextTest { private static final long CLUSTER_ID = 1L; private static final String STACK_NAME = "testStack"; private static final String STACK_VERSION = "testVersion"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); private static final String HOST_GROUP_1 = "group1"; private static final String HOST_GROUP_2 = "group2"; private static final String HOST1 = "host1"; private static final String HOST2 = "host2"; - StackId stackId = new StackId(STACK_NAME, STACK_VERSION); private static final AmbariContext context = new AmbariContext(); private static final AmbariManagementController controller = createNiceMock(AmbariManagementController.class); @@ -242,9 +243,12 @@ public void setUp() throws Exception { expect(blueprint.getName()).andReturn(BP_NAME).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(Collections.singleton(STACK_ID)).anyTimes(); expect(blueprint.getServices()).andReturn(blueprintServices).anyTimes(); expect(blueprint.getComponentNames("service1")).andReturn(Arrays.asList("s1Component1", "s1Component2")).anyTimes(); expect(blueprint.getComponentNames("service2")).andReturn(Collections.singleton("s2Component1")).anyTimes(); + expect(blueprint.getStackIdsForService("service1")).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); + expect(blueprint.getStackIdsForService("service2")).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(blueprint.getConfiguration()).andReturn(bpConfiguration).anyTimes(); expect(blueprint.getCredentialStoreEnabled("service1")).andReturn("true").anyTimes(); @@ -252,7 +256,7 @@ public void setUp() throws Exception { expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); for (Map.Entry entry : configTypeServiceMapping.entrySet()) { - expect(stack.getServicesForConfigType(entry.getKey())).andReturn(singletonList(entry.getValue())).anyTimes(); + expect(stack.getServicesForConfigType(entry.getKey())).andReturn(Stream.of(entry.getValue())).anyTimes(); } expect(controller.getClusters()).andReturn(clusters).anyTimes(); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java index c2bf712ae93..722345fc0fa 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintFactoryTest.java @@ -26,7 +26,6 @@ import static org.easymock.EasyMock.expect; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.powermock.api.easymock.PowerMock.createStrictMock; import static org.powermock.api.easymock.PowerMock.expectNew; @@ -36,11 +35,10 @@ import java.lang.reflect.Field; import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.apache.ambari.server.ObjectNotFoundException; import org.apache.ambari.server.controller.AmbariManagementController; @@ -51,6 +49,7 @@ import org.apache.ambari.server.orm.entities.BlueprintConfigEntity; import org.apache.ambari.server.orm.entities.BlueprintEntity; import org.apache.ambari.server.stack.NoSuchStackException; +import org.apache.ambari.server.state.StackId; import org.easymock.EasyMockSupport; import org.junit.After; import org.junit.Before; @@ -59,7 +58,6 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; @@ -85,19 +83,18 @@ public class BlueprintFactoryTest { public void init() throws Exception { setPrivateField(factory, "blueprintDAO", dao); - Map> componentMap = new HashMap<>(); - Collection components1 = new HashSet<>(); - componentMap.put("test-service1", components1); - components1.add("component1"); - Collection components2 = new HashSet<>(); - componentMap.put("test-service2", components2); - components2.add("component2"); + Set stackIds = ImmutableSet.of(new StackId("stack", "0.1")); + Collection services = ImmutableSet.of("test-service1", "test-service2"); + Collection components = ImmutableSet.of("component1", "component2"); - expect(stack.getComponents()).andReturn(componentMap).anyTimes(); + expect(stack.getServices()).andReturn(services).anyTimes(); + expect(stack.getComponents()).andReturn(components).anyTimes(); expect(stack.isMasterComponent("component1")).andReturn(true).anyTimes(); expect(stack.isMasterComponent("component2")).andReturn(false).anyTimes(); expect(stack.getServiceForComponent("component1")).andReturn("test-service1").anyTimes(); expect(stack.getServiceForComponent("component2")).andReturn("test-service2").anyTimes(); + expect(stack.getStacksForService(anyString())).andReturn(stackIds).anyTimes(); + expect(stack.getStacksForComponent(anyString())).andReturn(stackIds).anyTimes(); } @After @@ -131,13 +128,15 @@ public void testGetMultiInstanceBlueprint() throws Exception { reset(dao); expect(dao.findByName(BLUEPRINT_NAME)).andReturn(expectedBlueprint.toEntity()); Stack hdpStack = createNiceMock(Stack.class); - expect(hdpStack.getName()).andReturn("HDPCORE").anyTimes(); - expect(hdpStack.getVersion()).andReturn("3.0.0.0").anyTimes(); + StackId hdp = new StackId("HDPCORE-3.0", "3.0.0.0"); + StackId edw = new StackId("EDW-3.1", "3.1.0.0"); + expect(hdpStack.getName()).andReturn(hdp.getStackName()).anyTimes(); + expect(hdpStack.getVersion()).andReturn(hdp.getStackVersion()).anyTimes(); Stack edwStack = createNiceMock(Stack.class); - expect(edwStack.getName()).andReturn("EDW").anyTimes(); - expect(edwStack.getVersion()).andReturn("3.1.0.0").anyTimes(); - expectNew(Stack.class, eq("HDPCORE-3.0"), anyString(), anyObject(AmbariManagementController.class)).andReturn(hdpStack).anyTimes(); - expectNew(Stack.class, eq("EDW-3.1"), anyString(), anyObject(AmbariManagementController.class)).andReturn(edwStack).anyTimes(); + expect(edwStack.getName()).andReturn(edw.getStackName()).anyTimes(); + expect(edwStack.getVersion()).andReturn(edw.getStackVersion()).anyTimes(); + expectNew(Stack.class, eq(hdp.getStackName()), eq(hdp.getStackVersion()), anyObject(AmbariManagementController.class)).andReturn(hdpStack).anyTimes(); + expectNew(Stack.class, eq(edw.getStackVersion()), eq(edw.getStackVersion()), anyObject(AmbariManagementController.class)).andReturn(edwStack).anyTimes(); replay(Stack.class, hdpStack, edwStack, dao); // test @@ -153,9 +152,8 @@ public void testGetMultiInstanceBlueprint() throws Exception { Set serviceInstanceTypes = hdpCore.getServiceInstances().stream().map(ServiceInstance::getType).collect(toSet()); assertEquals(ImmutableSet.of("ZOOKEEPER"), serviceInstanceTypes); - Set stackNames = - blueprint.getStacks().stream().map(Stack::getName).collect(Collectors.toSet()); - assertEquals(ImmutableSet.of("HDPCORE", "EDW"), stackNames); + Set stackIds = blueprint.getStackIds(); + assertEquals(ImmutableSet.of(hdp, edw), stackIds); assertEquals(1, blueprint.getHostGroups().size()); } @@ -175,7 +173,6 @@ public void testCreateBlueprint() throws Exception { Blueprint blueprint = testFactory.createBlueprint(props, null); assertEquals(BLUEPRINT_NAME, blueprint.getName()); - assertSame(stack, blueprint.getStack()); assertEquals(2, blueprint.getHostGroups().size()); Map hostGroups = blueprint.getHostGroups(); @@ -221,11 +218,11 @@ public void testCreateMultiInstanceBlueprint() throws Exception { } public Blueprint createMultiInstanceBlueprint() throws Exception { - Map> allComponents = ImmutableMap.of( - "HDFS", ImmutableSet.of("NAMENODE", "SECONDARY_NAMENODE"), - "ZOOKEEPER", ImmutableSet.of("ZOOKEEPER_SERVER") - ); + Collection allComponents = ImmutableSet.of("NAMENODE", "SECONDARY_NAMENODE", "ZOOKEEPER_SERVER"); + Collection services = ImmutableSet.of("HDFS", "ZOOKEEPER"); + Set stackIds = ImmutableSet.of(new StackId("HDPCORE-3.0", "3.0.0.0")); reset(stack); + expect(stack.getServices()).andReturn(services).anyTimes(); expect(stack.getComponents()).andReturn(allComponents).anyTimes(); expect(stack.isMasterComponent("NAMENODE")).andReturn(true).anyTimes(); expect(stack.isMasterComponent("ZOOKEEPER_SERVER")).andReturn(true).anyTimes(); @@ -233,6 +230,8 @@ public Blueprint createMultiInstanceBlueprint() throws Exception { expect(stack.getServiceForComponent("NAMENODE")).andReturn("HDFS").anyTimes(); expect(stack.getServiceForComponent("SECONDARY_NAMENODE")).andReturn("HDFS").anyTimes(); expect(stack.getServiceForComponent("ZOOKEEPER_SERVER")).andReturn("ZOOKEEPER").anyTimes(); + expect(stack.getStacksForService(anyString())).andReturn(stackIds).anyTimes(); + expect(stack.getStacksForComponent(anyString())).andReturn(stackIds).anyTimes(); replay(stack, dao, entity, configEntity); @@ -259,13 +258,13 @@ public void testCreateInvalidStack() throws Exception { mockSupport.createMock(BlueprintFactory.StackFactory.class); // setup mock to throw exception, to simulate invalid stack request - expect(mockStackFactory.createStack("null", "null", null)).andThrow(new ObjectNotFoundException("Invalid Stack")); + expect(mockStackFactory.createStack(new StackId(), null)).andThrow(new ObjectNotFoundException("Invalid Stack")); mockSupport.replayAll(); BlueprintFactory factoryUnderTest = new BlueprintFactory(mockStackFactory); - factoryUnderTest.createStack(new HashMap<>()); + factoryUnderTest.createStack(new StackId()); mockSupport.verifyAll(); } @@ -321,6 +320,47 @@ public void testCreate_HostGroupWithInvalidComponent() throws Exception { testFactory.createBlueprint(props, null); } + @Test(expected = IllegalArgumentException.class) // THEN + public void verifyDefinitionsDisjointShouldRejectDuplication() { + // GIVEN + final String service1 = "unique service"; + final String service2 = "duplicated service"; + StackId stack1 = new StackId("a_stack", "1.0"); + StackId stack2 = new StackId("another_stack", "0.9"); + Stream stream = ImmutableSet.of(service1, service2).stream(); + + // WHEN + BlueprintFactory.verifyStackDefinitionsAreDisjoint(stream, "Services", service -> { + switch (service) { + case service1: return ImmutableSet.of(stack1); + case service2: return ImmutableSet.of(stack1, stack2); + default: return null; + } + }); + } + + @Test + public void verifyStackDefinitionsAreDisjointShouldAllowDisjointStacks() { + // GIVEN + final String service1 = "unique service"; + final String service2 = "another service"; + StackId stack1 = new StackId("a_stack", "1.0"); + StackId stack2 = new StackId("another_stack", "0.9"); + Stream stream = ImmutableSet.of(service1, service2).stream(); + + // WHEN + BlueprintFactory.verifyStackDefinitionsAreDisjoint(stream, "Services", service -> { + switch (service) { + case service1: return ImmutableSet.of(stack1); + case service2: return ImmutableSet.of(stack2); + default: return null; + } + }); + + // THEN + // no exception expected + } + private class TestBlueprintFactory extends BlueprintFactory { private Stack stack; @@ -329,7 +369,7 @@ public TestBlueprintFactory(Stack stack) { } @Override - protected Stack loadStack(String stackName, String stackVersion) throws NoSuchStackException { + protected Stack createStack(StackId stackId) throws NoSuchStackException { return stack; } } diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java index 057332d4013..362f4e4889f 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintImplTest.java @@ -18,16 +18,16 @@ package org.apache.ambari.server.topology; +import static java.util.Collections.emptySet; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.mock; import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.reset; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import java.lang.reflect.Field; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -61,7 +61,8 @@ public class BlueprintImplTest { Map> properties = new HashMap<>(); Map hdfsProps = new HashMap<>(); Configuration configuration = new Configuration(properties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION); - org.apache.ambari.server.configuration.Configuration serverConfig; + private final org.apache.ambari.server.configuration.Configuration serverConfig = createNiceMock(org.apache.ambari.server.configuration.Configuration.class); + private final BlueprintValidator blueprintValidator = new BlueprintValidatorImpl(serverConfig); @Before public void setup() throws NoSuchFieldException, IllegalAccessException { @@ -106,7 +107,7 @@ public void setup() throws NoSuchFieldException, IllegalAccessException { expect(stack.getRequiredConfigurationProperties("HDFS")).andReturn(requiredHDFSProperties).anyTimes(); expect(stack.getRequiredConfigurationProperties("SERVICE2")).andReturn(requiredService2Properties).anyTimes(); - serverConfig = setupConfigurationWithGPLLicense(true); + setupConfigurationWithGPLLicense(true); } @Test @@ -124,8 +125,8 @@ public void testValidateConfigurations__basic_positive() throws Exception { category2Props.put("prop2", "val"); SecurityConfiguration securityConfiguration = new SecurityConfiguration(SecurityType.KERBEROS, "testRef", null); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, securityConfiguration, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, securityConfiguration, null); + blueprintValidator.validateRequiredProperties(blueprint); BlueprintEntity entity = blueprint.toEntity(); verify(stack, group1, group2, serverConfig); @@ -162,8 +163,8 @@ public void testValidateConfigurations__hostGroupConfig() throws Exception { hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP:group1%"); hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP:group2%"); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); BlueprintEntity entity = blueprint.toEntity(); verify(stack, group1, group2, serverConfig); assertTrue(entity.getSecurityType() == SecurityType.NONE); @@ -202,8 +203,8 @@ public void testValidateConfigurations__hostGroupConfigForNameNodeHAPositive() t hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group2%"); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); BlueprintEntity entity = blueprint.toEntity(); verify(stack, group1, group2, serverConfig); @@ -243,8 +244,8 @@ public void testValidateConfigurations__hostGroupConfigForNameNodeHAInCorrectHos hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP::group2%"); hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group3%"); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @Test(expected= IllegalArgumentException.class) @@ -279,8 +280,8 @@ public void testValidateConfigurations__hostGroupConfigForNameNodeHAMappedSameHo hadoopProps.put("dfs_ha_initial_namenode_active", "%HOSTGROUP::group2%"); hadoopProps.put("dfs_ha_initial_namenode_standby", "%HOSTGROUP::group2%"); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @Test(expected = InvalidTopologyException.class) @@ -294,8 +295,8 @@ public void testValidateConfigurations__secretReference() throws InvalidTopology hdfsProps.put("secret", "SECRET:hdfs-site:1:test"); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @@ -308,11 +309,11 @@ public void testValidateConfigurations__gplIsNotAllowedCodecsProperty() throws I }}); Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION); - serverConfig = setupConfigurationWithGPLLicense(false); + setupConfigurationWithGPLLicense(false); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), lzoUsageConfiguration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @@ -325,11 +326,11 @@ public void testValidateConfigurations__gplIsNotAllowedLZOProperty() throws Inva }}); Configuration lzoUsageConfiguration = new Configuration(lzoProperties, EMPTY_ATTRIBUTES, EMPTY_CONFIGURATION); - serverConfig = setupConfigurationWithGPLLicense(false); + setupConfigurationWithGPLLicense(false); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), lzoUsageConfiguration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @@ -346,8 +347,8 @@ public void testValidateConfigurations__gplISAllowed() throws InvalidTopologyExc expect(group2.getConfiguration()).andReturn(EMPTY_CONFIGURATION).atLeastOnce(); replay(stack, group1, group2, serverConfig); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, lzoUsageConfiguration, null, null); - blueprint.validateRequiredProperties(); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), lzoUsageConfiguration, null, null); + blueprintValidator.validateRequiredProperties(blueprint); verify(stack, group1, group2, serverConfig); } @@ -360,7 +361,7 @@ public void testAutoSkipFailureEnabled() { expect(setting.getSettingValue(Setting.SETTING_NAME_DEPLOYMENT_SETTINGS)).andReturn(Collections.singleton(skipFailureSetting)); replay(stack, setting); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, setting); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, setting); assertTrue(blueprint.shouldSkipFailure()); verify(stack, setting); @@ -373,21 +374,15 @@ public void testAutoSkipFailureDisabled() { expect(setting.getSettingValue(Setting.SETTING_NAME_DEPLOYMENT_SETTINGS)).andReturn(Collections.singleton(skipFailureSetting)); replay(stack, setting); - Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, configuration, null, setting); + Blueprint blueprint = new BlueprintImpl("test", hostGroups, stack, emptySet(), emptySet(), configuration, null, setting); assertFalse(blueprint.shouldSkipFailure()); verify(stack, setting); } - public static org.apache.ambari.server.configuration.Configuration setupConfigurationWithGPLLicense(boolean isGPLAllowed) - throws NoSuchFieldException, IllegalAccessException { - org.apache.ambari.server.configuration.Configuration serverConfig = - mock(org.apache.ambari.server.configuration.Configuration.class); + private org.apache.ambari.server.configuration.Configuration setupConfigurationWithGPLLicense(boolean isGPLAllowed) { + reset(serverConfig); expect(serverConfig.getGplLicenseAccepted()).andReturn(isGPLAllowed).atLeastOnce(); - - Field field = BlueprintValidatorImpl.class.getDeclaredField("configuration"); - field.setAccessible(true); - field.set(null, serverConfig); return serverConfig; } @@ -501,7 +496,7 @@ public static org.apache.ambari.server.configuration.Configuration setupConfigur // // // set expectations // expect(blueprintFactory.createBlueprint(setProperties.iterator().next())).andReturn(blueprint).once(); -// expect(blueprint.validateRequiredProperties()).andReturn(Collections.>>emptyMap()).once(); +// expect(blueprintValidator.validateRequiredProperties()).andReturn(Collections.>>emptyMap()).once(blueprint); // expect(blueprint.toEntity()).andReturn(entity); // expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).atLeastOnce(); // expect(managementController.getStackServices(capture(stackServiceRequestCapture))).andReturn( diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java index e88bef3d237..46396877906 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/BlueprintValidatorImplTest.java @@ -38,6 +38,7 @@ import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.DependencyConditionInfo; import org.apache.ambari.server.state.DependencyInfo; +import org.apache.ambari.server.state.StackId; import org.easymock.EasyMock; import org.easymock.EasyMockRule; import org.easymock.Mock; @@ -47,6 +48,8 @@ import org.junit.Rule; import org.junit.Test; +import com.google.common.collect.ImmutableSet; + /** * BlueprintValidatorImpl unit tests. */ @@ -99,6 +102,7 @@ public void setup() { autoDeploy.setEnabled(true); autoDeploy.setCoLocate("service1/component2"); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(new StackId("HDP", "2.2"))).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); expect(blueprint.getHostGroups()).andReturn(hostGroups).anyTimes(); expect(blueprint.getServices()).andReturn(services).anyTimes(); @@ -124,7 +128,6 @@ public void setup() { dependenciesConditionInfos1.add(dependencyConditionInfo2); expect(blueprint.getConfiguration()).andReturn(configuration).anyTimes(); - expect(blueprint.isAllMpacksResolved()).andReturn(true); } @After @@ -146,8 +149,8 @@ public void testValidateTopology_basic() throws Exception { expect(blueprint.getHostGroupsForComponent("component2")).andReturn(Arrays.asList(group1, group2)).anyTimes(); replay(blueprint, stack, group1, group2, dependency1); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); } @Test(expected = InvalidTopologyException.class) @@ -162,8 +165,8 @@ public void testValidateTopology_basic_negative() throws Exception { expect(blueprint.getHostGroupsForComponent("component2")).andReturn(Arrays.asList(group1, group2)).anyTimes(); replay(blueprint, stack, group1, group2, dependency1); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); } @Test @@ -180,8 +183,8 @@ public void testValidateTopology_autoDeploy() throws Exception { expect(group1.addComponent(new Component("component1"))).andReturn(true).once(); replay(blueprint, stack, group1, group2, dependency1); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); verify(group1); } @@ -218,66 +221,12 @@ public void testValidateTopology_autoDeploy_hasDependency() throws Exception { replay(blueprint, stack, group1, group2, dependency1, dependencyComponentInfo); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); verify(group1); } - @Test(expected=InvalidTopologyException.class) - public void testValidateRequiredProperties_SqlaInHiveStackHdp22() throws Exception { - Map hiveEnvConfig = new HashMap<>(); - hiveEnvConfig.put("hive_database","Existing SQL Anywhere Database"); - configProperties.put("hive-env", hiveEnvConfig); - - group1Components.add("HIVE_METASTORE"); - - services.addAll(Arrays.asList("HIVE")); - - org.apache.ambari.server.configuration.Configuration serverConfig = - BlueprintImplTest.setupConfigurationWithGPLLicense(true); - - Configuration config = new Configuration(new HashMap<>(), new HashMap<>()); - expect(group1.getConfiguration()).andReturn(config).anyTimes(); - - expect(stack.getComponents("HIVE")).andReturn(Collections.singleton("HIVE_METASTORE")).anyTimes(); - expect(stack.getVersion()).andReturn("2.2").once(); - expect(stack.getName()).andReturn("HDP").once(); - - expect(blueprint.getHostGroupsForComponent("HIVE_METASTORE")).andReturn(Collections.singleton(group1)).anyTimes(); - - replay(blueprint, stack, group1, group2, dependency1, serverConfig); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateRequiredProperties(); - } - - @Test(expected=InvalidTopologyException.class) - public void testValidateRequiredProperties_SqlaInOozieStackHdp22() throws Exception { - Map hiveEnvConfig = new HashMap<>(); - hiveEnvConfig.put("oozie_database","Existing SQL Anywhere Database"); - configProperties.put("oozie-env", hiveEnvConfig); - - group1Components.add("OOZIE_SERVER"); - - services.addAll(Arrays.asList("OOZIE")); - - org.apache.ambari.server.configuration.Configuration serverConfig = - BlueprintImplTest.setupConfigurationWithGPLLicense(true); - - Configuration config = new Configuration(new HashMap<>(), new HashMap<>()); - expect(group1.getConfiguration()).andReturn(config).anyTimes(); - - expect(stack.getComponents("OOZIE")).andReturn(Collections.singleton("OOZIE_SERVER")).anyTimes(); - expect(stack.getVersion()).andReturn("2.2").once(); - expect(stack.getName()).andReturn("HDP").once(); - - expect(blueprint.getHostGroupsForComponent("OOZIE_SERVER")).andReturn(Collections.singleton(group1)).anyTimes(); - - replay(blueprint, stack, group1, group2, dependency1, serverConfig); - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateRequiredProperties(); - } - @Test public void testShouldDependencyBeExcludedWenRelatedServiceIsNotInBlueprint() throws Exception { // GIVEN @@ -317,8 +266,8 @@ public void testShouldDependencyBeExcludedWenRelatedServiceIsNotInBlueprint() th replay(blueprint, stack, group1, group2, dependency1, dependencyComponentInfo); // WHEN - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); // THEN verify(group1); @@ -360,8 +309,8 @@ public void testShouldThrowErrorWhenDependentComponentIsNotInBlueprint() throws replay(blueprint, stack, group1, group2, dependency1, dependencyComponentInfo); // WHEN - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); // THEN verify(group1); @@ -422,8 +371,8 @@ public void testWhenComponentIsConditionallyDependentAndOnlyOneOfTheConditionsIs replay(blueprint, stack, group1, group2, dependency1, dependency2, dependencyComponentInfo,dependencyConditionInfo1,dependencyConditionInfo2); // WHEN - BlueprintValidator validator = new BlueprintValidatorImpl(blueprint); - validator.validateTopology(); + BlueprintValidator validator = new BlueprintValidatorImpl(null); + validator.validateTopology(blueprint); // THEN verify(group1); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java index 28ce72a167f..c4ba8d8ca6c 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterConfigurationRequestTest.java @@ -64,6 +64,7 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; /** @@ -107,8 +108,9 @@ public class ClusterConfigurationRequestTest { @Mock(type = MockType.NICE) private ConfigHelper configHelper; - private final String STACK_NAME = "testStack"; - private final String STACK_VERSION = "1"; + private static final String STACK_NAME = "testStack"; + private static final String STACK_VERSION = "1"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); private final Map> stackProperties = new HashMap<>(); private final Map defaultClusterEnvProperties = new HashMap<>(); @@ -235,6 +237,7 @@ private Capture> testProcessWithKerberos(String blueprintP expect(clusters.getCluster("testCluster")).andReturn(cluster).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); expect(stack.getServiceForConfigType("testConfigType")).andReturn("KERBEROS").anyTimes(); @@ -278,7 +281,7 @@ private Capture> testProcessWithKerberos(String blueprintP .emptyList()).anyTimes(); expect(configHelper.getDefaultStackProperties( - EasyMock.eq(new StackId(STACK_NAME, STACK_VERSION)))).andReturn(stackProperties).anyTimes(); + EasyMock.eq(STACK_ID))).andReturn(stackProperties).anyTimes(); if (kerberosConfig == null) { kerberosConfig = new HashMap<>(); @@ -327,6 +330,7 @@ public void testProcessClusterConfigRequestDontIncludeKererosConfigs() throws Ex expect(clusters.getCluster("testCluster")).andReturn(cluster).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); expect(stack.getAllConfigurationTypes(anyString())).andReturn(Collections.singletonList("testConfigType")).anyTimes(); @@ -363,7 +367,7 @@ public void testProcessClusterConfigRequestDontIncludeKererosConfigs() throws Ex .emptyList()).anyTimes(); expect(configHelper.getDefaultStackProperties( - EasyMock.eq(new StackId(STACK_NAME, STACK_VERSION)))).andReturn(stackProperties).anyTimes(); + EasyMock.eq(STACK_ID))).andReturn(stackProperties).anyTimes(); PowerMock.replay(stack, blueprint, topology, controller, clusters, ambariContext, AmbariContext.class, configHelper); @@ -404,7 +408,7 @@ public void testProcessClusterConfigRequestRemoveUnusedConfigTypes() throws Exce expect(ambariContext.getConfigHelper()).andReturn(configHelper).anyTimes(); expect(configHelper.getDefaultStackProperties( - EasyMock.eq(new StackId(STACK_NAME, STACK_VERSION)))).andReturn(stackProperties).anyTimes(); + EasyMock.eq(STACK_ID))).andReturn(stackProperties).anyTimes(); EasyMock.replay(stack, blueprint, topology, ambariContext, configHelper); // WHEN @@ -452,7 +456,7 @@ public void testProcessClusterConfigRequestWithOnlyHostGroupConfigRemoveUnusedCo expect(ambariContext.getConfigHelper()).andReturn(configHelper).anyTimes(); expect(configHelper.getDefaultStackProperties( - EasyMock.eq(new StackId(STACK_NAME, STACK_VERSION)))).andReturn(stackProperties).anyTimes(); + EasyMock.eq(STACK_ID))).andReturn(stackProperties).anyTimes(); EasyMock.replay(stack, blueprint, topology, ambariContext, configHelper); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java index d0e82f21a56..55db7684f18 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterDeployWithStartOnlyTest.java @@ -61,6 +61,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; import org.apache.ambari.server.topology.validators.TopologyValidatorService; @@ -79,6 +80,8 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import com.google.common.collect.ImmutableSet; + @RunWith(PowerMockRunner.class) @PrepareForTest(AmbariServer.class) public class ClusterDeployWithStartOnlyTest extends EasyMockSupport { @@ -87,6 +90,7 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport { private static final String BLUEPRINT_NAME = "test-bp"; private static final String STACK_NAME = "test-stack"; private static final String STACK_VERSION = "test-stack-version"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); @Rule public EasyMockRule mocks = new EasyMockRule(this); @@ -189,8 +193,6 @@ public class ClusterDeployWithStartOnlyTest extends EasyMockSupport { private Map> group1ServiceComponents = new HashMap<>(); private Map> group2ServiceComponents = new HashMap<>(); - private Map> serviceComponents = new HashMap<>(); - private String predicate = "Hosts/host_name=foo"; private List topologyValidators = new ArrayList<>(); @@ -231,8 +233,9 @@ public void setup() throws Exception { groupMap.put("group1", group1); groupMap.put("group2", group2); - serviceComponents.put("service1", Arrays.asList("component1", "component3")); - serviceComponents.put("service2", Arrays.asList("component2", "component4")); + Collection components1 = ImmutableSet.of("component1", "component3"); + Collection components2 = ImmutableSet.of("component2", "component4"); + Collection components = ImmutableSet.builder().addAll(components1).addAll(components2).build(); group1ServiceComponents.put("service1", Arrays.asList("component1", "component3")); group1ServiceComponents.put("service2", Collections.singleton("component2")); @@ -254,6 +257,7 @@ public void setup() throws Exception { expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(blueprint.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(blueprint.isValidConfigType(anyString())).andReturn(true).anyTimes(); expect(blueprint.getRepositorySettings()).andReturn(new ArrayList<>()).anyTimes(); // don't expect toEntity() @@ -277,9 +281,9 @@ public void setup() throws Exception { expect(stack.getCardinality("component2")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component3")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getCardinality("component4")).andReturn(new Cardinality("1+")).anyTimes(); - expect(stack.getComponents()).andReturn(serviceComponents).anyTimes(); - expect(stack.getComponents("service1")).andReturn(serviceComponents.get("service1")).anyTimes(); - expect(stack.getComponents("service2")).andReturn(serviceComponents.get("service2")).anyTimes(); + expect(stack.getComponents()).andReturn(components).anyTimes(); + expect(stack.getComponents("service1")).andReturn(components1).anyTimes(); + expect(stack.getComponents("service2")).andReturn(components2).anyTimes(); expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); @@ -391,7 +395,7 @@ public void setup() throws Exception { ambariContext.setConfigurationOnCluster(capture(updateClusterConfigRequestCapture)); expectLastCall().times(3); - ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION); + ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_ID); expectLastCall().once(); expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java index c64e2281100..bac0e5b9f56 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartOnComponentLevelTest.java @@ -18,7 +18,6 @@ package org.apache.ambari.server.topology; -import static java.util.stream.Collectors.toList; import static org.apache.ambari.server.controller.internal.ProvisionAction.INSTALL_AND_START; import static org.easymock.EasyMock.anyBoolean; import static org.easymock.EasyMock.anyLong; @@ -63,6 +62,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; import org.apache.ambari.server.topology.validators.TopologyValidatorService; @@ -81,6 +81,8 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import com.google.common.collect.ImmutableSet; + @RunWith(PowerMockRunner.class) @PrepareForTest(AmbariServer.class) public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupport { @@ -89,6 +91,7 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp private static final String BLUEPRINT_NAME = "test-bp"; private static final String STACK_NAME = "test-stack"; private static final String STACK_VERSION = "test-stack-version"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); @Rule public EasyMockRule mocks = new EasyMockRule(this); @@ -186,9 +189,6 @@ public class ClusterInstallWithoutStartOnComponentLevelTest extends EasyMockSupp private Map> group1ServiceComponents = new HashMap<>(); private Map> group2ServiceComponents = new HashMap<>(); - private Map> serviceComponents = new HashMap<>(); - private Map> serviceComponentNames = new HashMap<>(); - private String predicate = "Hosts/host_name=foo"; private List topologyValidators = new ArrayList<>(); @@ -230,12 +230,9 @@ public void setup() throws Exception { groupMap.put("group1", group1); groupMap.put("group2", group2); - serviceComponents.put("service1", Arrays.asList(new Component("component1"), new Component("component3"))); - serviceComponents.put("service2", Arrays.asList(new Component("component2"), new Component("component4"))); - - for(Map.Entry> entry: serviceComponents.entrySet()) { - serviceComponentNames.put(entry.getKey(), entry.getValue().stream().map(Component::getName).collect(toList())); - } + Collection components1 = ImmutableSet.of("component1", "component3"); + Collection components2 = ImmutableSet.of("component2", "component4"); + Collection components = ImmutableSet.builder().addAll(components1).addAll(components2).build(); group1ServiceComponents.put("service1", Arrays.asList(new Component("component1"), new Component("component3"))); group1ServiceComponents.put("service2", Collections.singleton(new Component("component2"))); @@ -257,6 +254,7 @@ public void setup() throws Exception { expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(blueprint.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(blueprint.isValidConfigType(anyString())).andReturn(true).anyTimes(); expect(blueprint.getRepositorySettings()).andReturn(new ArrayList<>()).anyTimes(); // don't expect toEntity() @@ -280,9 +278,9 @@ public void setup() throws Exception { expect(stack.getCardinality("component2")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component3")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getCardinality("component4")).andReturn(new Cardinality("1+")).anyTimes(); - expect(stack.getComponents()).andReturn(serviceComponentNames).anyTimes(); - expect(stack.getComponents("service1")).andReturn(serviceComponentNames.get("service1")).anyTimes(); - expect(stack.getComponents("service2")).andReturn(serviceComponentNames.get("service2")).anyTimes(); + expect(stack.getComponents()).andReturn(components).anyTimes(); + expect(stack.getComponents("service1")).andReturn(components1).anyTimes(); + expect(stack.getComponents("service2")).andReturn(components2).anyTimes(); expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); @@ -375,7 +373,7 @@ public void setup() throws Exception { ambariContext.setConfigurationOnCluster(capture(updateClusterConfigRequestCapture)); expectLastCall().times(3); - ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION); + ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_ID); expectLastCall().once(); expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java index 6fa70b437a2..3e11f5e8c64 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/ClusterInstallWithoutStartTest.java @@ -62,6 +62,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.ComponentInfo; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; import org.apache.ambari.server.topology.validators.TopologyValidatorService; @@ -80,6 +81,8 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import com.google.common.collect.ImmutableSet; + @RunWith(PowerMockRunner.class) @PrepareForTest(AmbariServer.class) public class ClusterInstallWithoutStartTest extends EasyMockSupport { @@ -89,6 +92,7 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport { private static final String BLUEPRINT_NAME = "test-bp"; private static final String STACK_NAME = "test-stack"; private static final String STACK_VERSION = "test-stack-version"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); @Rule public EasyMockRule mocks = new EasyMockRule(this); @@ -187,8 +191,6 @@ public class ClusterInstallWithoutStartTest extends EasyMockSupport { private Map> group1ServiceComponents = new HashMap<>(); private Map> group2ServiceComponents = new HashMap<>(); - private Map> serviceComponents = new HashMap<>(); - private String predicate = "Hosts/host_name=foo"; private List topologyValidators = new ArrayList<>(); @@ -229,8 +231,9 @@ public void setup() throws Exception { groupMap.put("group1", group1); groupMap.put("group2", group2); - serviceComponents.put("service1", Arrays.asList("component1", "component3")); - serviceComponents.put("service2", Arrays.asList("component2", "component4")); + Collection components1 = ImmutableSet.of("component1", "component3"); + Collection components2 = ImmutableSet.of("component2", "component4"); + Collection components = ImmutableSet.builder().addAll(components1).addAll(components2).build(); group1ServiceComponents.put("service1", Arrays.asList("component1", "component3")); group1ServiceComponents.put("service2", Collections.singleton("component2")); @@ -252,6 +255,7 @@ public void setup() throws Exception { expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(blueprint.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(blueprint.isValidConfigType(anyString())).andReturn(true).anyTimes(); expect(blueprint.getRepositorySettings()).andReturn(new ArrayList<>()).anyTimes(); // don't expect toEntity() @@ -275,9 +279,9 @@ public void setup() throws Exception { expect(stack.getCardinality("component2")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component3")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getCardinality("component4")).andReturn(new Cardinality("1+")).anyTimes(); - expect(stack.getComponents()).andReturn(serviceComponents).anyTimes(); - expect(stack.getComponents("service1")).andReturn(serviceComponents.get("service1")).anyTimes(); - expect(stack.getComponents("service2")).andReturn(serviceComponents.get("service2")).anyTimes(); + expect(stack.getComponents()).andReturn(components).anyTimes(); + expect(stack.getComponents("service1")).andReturn(components1).anyTimes(); + expect(stack.getComponents("service2")).andReturn(components2).anyTimes(); expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes(); expect(stack.getName()).andReturn(STACK_NAME).anyTimes(); expect(stack.getVersion()).andReturn(STACK_VERSION).anyTimes(); @@ -363,7 +367,7 @@ public void setup() throws Exception { ambariContext.setConfigurationOnCluster(capture(updateClusterConfigRequestCapture)); expectLastCall().times(3); - ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION); + ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_ID); expectLastCall().once(); expect(configureClusterTaskFactory.createConfigureClusterTask(anyObject(), anyObject(), anyObject())).andReturn(configureClusterTask); diff --git a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java index 0daf20fcf7b..7174a5e4f8e 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/topology/TopologyManagerTest.java @@ -65,6 +65,7 @@ import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.stack.NoSuchStackException; import org.apache.ambari.server.state.SecurityType; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.quicklinksprofile.QuickLinksProfile; import org.apache.ambari.server.topology.tasks.ConfigureClusterTask; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; @@ -86,6 +87,8 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import com.google.common.collect.ImmutableSet; + /** * TopologyManager unit tests */ @@ -98,6 +101,7 @@ public class TopologyManagerTest { private static final String BLUEPRINT_NAME = "test-bp"; private static final String STACK_NAME = "test-stack"; private static final String STACK_VERSION = "test-stack-version"; + private static final StackId STACK_ID = new StackId(STACK_NAME, STACK_VERSION); private static final String SAMPLE_QUICKLINKS_PROFILE_1 = "{\"filters\":[{\"visible\":true}],\"services\":[]}"; private static final String SAMPLE_QUICKLINKS_PROFILE_2 = "{\"filters\":[],\"services\":[{\"name\":\"HDFS\",\"components\":[],\"filters\":[{\"visible\":true}]}]}"; @@ -192,8 +196,6 @@ public class TopologyManagerTest { private Map> group1ServiceComponents = new HashMap<>(); private Map> group2ServiceComponents = new HashMap<>(); - private Map> serviceComponents = new HashMap<>(); - private String predicate = "Hosts/host_name=foo"; private List topologyValidators = new ArrayList<>(); @@ -234,8 +236,9 @@ public void setup() throws Exception { groupMap.put("group1", group1); groupMap.put("group2", group2); - serviceComponents.put("service1", Arrays.asList("component1", "component3")); - serviceComponents.put("service2", Arrays.asList("component2", "component4")); + Collection components1 = ImmutableSet.of("component1", "component3"); + Collection components2 = ImmutableSet.of("component2", "component4"); + Collection components = ImmutableSet.builder().addAll(components1).addAll(components2).build(); group1ServiceComponents.put("service1", Arrays.asList("component1", "component3")); group1ServiceComponents.put("service2", Collections.singleton("component2")); @@ -257,6 +260,7 @@ public void setup() throws Exception { expect(blueprint.getName()).andReturn(BLUEPRINT_NAME).anyTimes(); expect(blueprint.getServices()).andReturn(Arrays.asList("service1", "service2")).anyTimes(); expect(blueprint.getStack()).andReturn(stack).anyTimes(); + expect(blueprint.getStackIds()).andReturn(ImmutableSet.of(STACK_ID)).anyTimes(); expect(blueprint.getRepositorySettings()).andReturn(new ArrayList<>()).anyTimes(); // don't expect toEntity() @@ -270,9 +274,9 @@ public void setup() throws Exception { expect(stack.getCardinality("component2")).andReturn(new Cardinality("1")).anyTimes(); expect(stack.getCardinality("component3")).andReturn(new Cardinality("1+")).anyTimes(); expect(stack.getCardinality("component4")).andReturn(new Cardinality("1+")).anyTimes(); - expect(stack.getComponents()).andReturn(serviceComponents).anyTimes(); - expect(stack.getComponents("service1")).andReturn(serviceComponents.get("service1")).anyTimes(); - expect(stack.getComponents("service2")).andReturn(serviceComponents.get("service2")).anyTimes(); + expect(stack.getComponents()).andReturn(components).anyTimes(); + expect(stack.getComponents("service1")).andReturn(components1).anyTimes(); + expect(stack.getComponents("service2")).andReturn(components2).anyTimes(); expect(stack.getServiceForConfigType("service1-site")).andReturn("service1").anyTimes(); expect(stack.getServiceForConfigType("service2-site")).andReturn("service2").anyTimes(); expect(stack.getConfiguration()).andReturn(stackConfig).anyTimes(); @@ -340,7 +344,7 @@ public void setup() throws Exception { ambariContext.setConfigurationOnCluster(capture(updateClusterConfigRequestCapture)); expectLastCall().anyTimes(); - ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_NAME, STACK_VERSION); + ambariContext.persistInstallStateForUI(CLUSTER_NAME, STACK_ID); expectLastCall().anyTimes(); expect(clusterController.ensureResourceProvider(anyObject(Resource.Type.class))).andReturn(resourceProvider);