diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..de1db2b4 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,17 @@ +{ + "name": "Ruby SDK", + + "image": "mcr.microsoft.com/devcontainers/ruby:1-3.3-bullseye", + + "postCreateCommand": "set -e && bundle install && gem install optimizely-sdk && rake build && gem install pkg/* && gem install solargraph", + + "customizations": { + "vscode": { + "extensions": [ + "eamodio.gitlens", + "github.vscode-github-actions", + "castwide.solargraph" + ] + } + } +} diff --git a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml new file mode 100644 index 00000000..d4b638dc --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml @@ -0,0 +1,94 @@ +name: 🐞 Bug +description: File a bug/issue +title: "[BUG] " +labels: ["bug", "needs-triage"] +body: +- type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists for the bug you encountered. + options: + - label: I have searched the existing issues + required: true +- type: textarea + attributes: + label: SDK Version + description: Version of the SDK in use? + validations: + required: true +- type: textarea + attributes: + label: Current Behavior + description: A concise description of what you're experiencing. + validations: + required: true +- type: textarea + attributes: + label: Expected Behavior + description: A concise description of what you expected to happen. + validations: + required: true +- type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 1. With this config... + 1. Run '...' + 1. See error... + validations: + required: true +- type: textarea + attributes: + label: Ruby Version + description: What version of Ruby are you using? + validations: + required: false +- type: textarea + attributes: + label: Rails + description: If you're using Rail, what version? + validations: + required: false +- type: textarea + attributes: + label: Link + description: Link to code demonstrating the problem. + validations: + required: false +- type: textarea + attributes: + label: Logs + description: Logs/stack traces related to the problem (⚠️do not include sensitive information). + validations: + required: false +- type: dropdown + attributes: + label: Severity + description: What is the severity of the problem? + multiple: true + options: + - Blocking development + - Affecting users + - Minor issue + validations: + required: false +- type: textarea + attributes: + label: Workaround/Solution + description: Do you have any workaround or solution in mind for the problem? + validations: + required: false +- type: textarea + attributes: + label: Recent Change + description: Has this issue started happening after an update or experiment change? + validations: + required: false +- type: textarea + attributes: + label: Conflicts + description: Are there other libraries/dependencies potentially in conflict? + validations: + required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/ENHANCEMENT.yml b/.github/ISSUE_TEMPLATE/ENHANCEMENT.yml new file mode 100644 index 00000000..42d8a302 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/ENHANCEMENT.yml @@ -0,0 +1,45 @@ +name: ✨Enhancement +description: Create a new ticket for a Enhancement/Tech-initiative for the benefit of the SDK which would be considered for a minor version update. +title: "[ENHANCEMENT] <title>" +labels: ["enhancement"] +body: + - type: textarea + id: description + attributes: + label: Description + description: Briefly describe the enhancement in a few sentences. + placeholder: Short description... + validations: + required: true + - type: textarea + id: benefits + attributes: + label: Benefits + description: How would the enhancement benefit to your product or usage? + placeholder: Benefits... + validations: + required: true + - type: textarea + id: detail + attributes: + label: Detail + description: How would you like the enhancement to work? Please provide as much detail as possible + placeholder: Detailed description... + validations: + required: false + - type: textarea + id: examples + attributes: + label: Examples + description: Are there any examples of this enhancement in other products/services? If so, please provide links or references. + placeholder: Links/References... + validations: + required: false + - type: textarea + id: risks + attributes: + label: Risks/Downsides + description: Do you think this enhancement could have any potential downsides or risks? + placeholder: Risks/Downsides... + validations: + required: false \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.md new file mode 100644 index 00000000..a061f335 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE-REQUEST.md @@ -0,0 +1,4 @@ +<!-- + Thanks for filing in issue! Are you requesting a new feature? If so, please share your feedback with us on the following link. +--> +## Feedback requesting a new feature can be shared [here.](https://feedback.optimizely.com/) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..d28ef3dd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: 💡Feature Requests + url: https://feedback.optimizely.com/ + about: Feedback requesting a new feature can be shared here. \ No newline at end of file diff --git a/.github/workflows/integration_test.yml b/.github/workflows/integration_test.yml index 03a424f2..0b8d086e 100644 --- a/.github/workflows/integration_test.yml +++ b/.github/workflows/integration_test.yml @@ -23,15 +23,19 @@ jobs: path: 'home/runner/travisci-tools' ref: 'master' - name: set SDK Branch if PR + env: + HEAD_REF: ${{ github.head_ref }} if: ${{ github.event_name == 'pull_request' }} run: | - echo "SDK_BRANCH=${{ github.head_ref }}" >> $GITHUB_ENV - echo "TRAVIS_BRANCH=${{ github.head_ref }}" >> $GITHUB_ENV + echo "SDK_BRANCH=$HEAD_REF" >> $GITHUB_ENV + echo "TRAVIS_BRANCH=$HEAD_REF" >> $GITHUB_ENV - name: set SDK Branch if not pull request + env: + REF_NAME: ${{ github.ref_name }} if: ${{ github.event_name != 'pull_request' }} run: | - echo "SDK_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV - echo "TRAVIS_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV + echo "SDK_BRANCH=$REF_NAME" >> $GITHUB_ENV + echo "TRAVIS_BRANCH=$REF_NAME" >> $GITHUB_ENV - name: Trigger build env: SDK: ruby diff --git a/.github/workflows/ruby.yml b/.github/workflows/ruby.yml index 1e22e74e..971eea92 100644 --- a/.github/workflows/ruby.yml +++ b/.github/workflows/ruby.yml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - ruby: [ '2.7.0', '3.0.0', '3.1.0' ] + ruby: [ '3.0.0', '3.1.0', '3.2.0', '3.3.0' ] steps: - uses: actions/checkout@v3 - name: Set up Ruby ${{ matrix.ruby }} @@ -33,6 +33,13 @@ jobs: with: ruby-version: ${{ matrix.ruby }} bundler-cache: true + - name: Install rubocop 1.78.0 for Ruby 3.0.0 + if: matrix.ruby == '3.0.0' + run: | + echo "Installing rubocop 1.78.0 for Ruby 3.0.0" + bundle add rubocop --version 1.78.0 || true + bundle install + bundle exec rubocop -A Gemfile || true - name: Run linting run: | bundle exec rubocop diff --git a/.rubocop.yml b/.rubocop.yml index ea105dd6..1bbc4a4a 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,7 +1,7 @@ inherit_from: .rubocop_todo.yml AllCops: - TargetRubyVersion: 2.7 + TargetRubyVersion: 3.0 Layout/SpaceInsideHashLiteralBraces: EnforcedStyle: no_space diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c54b808..9e603182 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,106 @@ # Optimizely Ruby SDK Changelog +## 5.2.1 +December 17th, 2025 + +### New Features +- Resolved issues with Holdout impression event handling and notification delivery. ([#382](https://github.com/optimizely/ruby-sdk/pull/382)) + + +## 5.2.0 +November 13th, 2025 + +### New Features +- Added CMAB client implementation to support contextual multi-armed bandit decisioning. ([#364](https://github.com/optimizely/ruby-sdk/pull/364)) +- Implemented CMAB service to manage contextual decision logic. ([#367](https://github.com/optimizely/ruby-sdk/pull/367)) +- Added SDK multi-region support for data hosting. ([#365](https://github.com/optimizely/ruby-sdk/pull/365)) + +### Enhancements +- Added `experiment_id` and `variation_id` to event payloads. ([#361](https://github.com/optimizely/ruby-sdk/pull/361)) +- Updated project config to track CMAB properties. ([#362](https://github.com/optimizely/ruby-sdk/pull/362)) +- Added `remove` method in LRU Cache for CMAB service. ([#366](https://github.com/optimizely/ruby-sdk/pull/366)) +- Implemented Decision Service methods to handle CMAB logic. ([#369](https://github.com/optimizely/ruby-sdk/pull/369)) +- Updated impression events to include CMAB UUID. ([#370](https://github.com/optimizely/ruby-sdk/pull/370)) +- Exposed CMAB prediction endpoint in URL template. ([#378](https://github.com/optimizely/ruby-sdk/pull/378)) + +### Bug Fixes +- Fixed Rubocop failures on Ruby 3.0.0. ([#371](https://github.com/optimizely/ruby-sdk/pull/371)) +- Fixed concurrency issue in CMAB service. ([#375](https://github.com/optimizely/ruby-sdk/pull/375)) +- Minor bugbash updates and stability improvements. ([#377](https://github.com/optimizely/ruby-sdk/pull/377)) + + +## 5.1.0 +January 10th, 2025 + +Added support for batch processing in DecideAll and DecideForKeys, enabling more efficient handling of multiple decisions in the User Profile Service.([#353](https://github.com/optimizely/ruby-sdk/pull/353)) + +## 5.0.1 +February 8th, 2024 + +The 5.0.1 minor release introduces update of metadata in gemspec. + +## 5.0.0 +January 18th, 2024 + +### New Features + +The 5.0.0 release introduces a new primary feature, [Advanced Audience Targeting]( https://docs.developers.optimizely.com/feature-experimentation/docs/optimizely-data-platform-advanced-audience-targeting) enabled through integration with [Optimizely Data Platform (ODP)](https://docs.developers.optimizely.com/optimizely-data-platform/docs) +([#303](https://github.com/optimizely/ruby-sdk/pull/303), +[#308](https://github.com/optimizely/ruby-sdk/pull/308), +[#310](https://github.com/optimizely/ruby-sdk/pull/310), +[#311](https://github.com/optimizely/ruby-sdk/pull/311), +[#312](https://github.com/optimizely/ruby-sdk/pull/312), +[#314](https://github.com/optimizely/ruby-sdk/pull/314), +[#316](https://github.com/optimizely/ruby-sdk/pull/316)). +You can use ODP, a high-performance [Customer Data Platform (CDP)]( https://www.optimizely.com/optimization-glossary/customer-data-platform/), to easily create complex real-time segments (RTS) using first-party and 50+ third-party data sources out of the box. You can create custom schemas that support the user attributes important for your business, and stitch together user behavior done on different devices to better understand and target your customers for personalized user experiences. ODP can be used as a single source of truth for these segments in any Optimizely or 3rd party tool. + +With ODP accounts integrated into Optimizely projects, you can build audiences using segments pre-defined in ODP. The SDK will fetch the segments for given users and make decisions using the segments. For access to ODP audience targeting in your Feature Experimentation account, please contact your Optimizely Customer Success Manager. + +This version includes the following changes: + +* New API added to `OptimizelyUserContext`: + + * `fetch_qualified_segments()`: this API will retrieve user segments from the ODP server. The fetched segments will be used for audience evaluation. The fetched data will be stored in the local cache to avoid repeated network delays. + + * When an `OptimizelyUserContext` is created, the SDK will automatically send an identify request to the ODP server to facilitate observing user activities. + +* New APIs added to `Optimizely::Project`: + + * `send_odp_event()`: customers can build/send arbitrary ODP events that will bind user identifiers and data to user profiles in ODP. + +For details, refer to our documentation pages: + +* [Advanced Audience Targeting](https://docs.developers.optimizely.com/feature-experimentation/docs/optimizely-data-platform-advanced-audience-targeting) + +* [Server SDK Support](https://docs.developers.optimizely.com/feature-experimentation/v1.0/docs/advanced-audience-targeting-for-server-side-sdks) + +* [Initialize Ruby SDK](https://docs.developers.optimizely.com/feature-experimentation/docs/initialize-sdk-ruby) + +* [OptimizelyUserContext Ruby SDK](https://docs.developers.optimizely.com/feature-experimentation/docs/optimizelyusercontext-ruby) + +* [Advanced Audience Targeting segment qualification methods](https://docs.developers.optimizely.com/feature-experimentation/docs/advanced-audience-targeting-segment-qualification-methods-ruby) + +* [Send Optimizely Data Platform data using Advanced Audience Targeting](https://docs.developers.optimizely.com/feature-experimentation/docs/send-odp-data-using-advanced-audience-targeting-ruby) + +### Logging + +* Add warning to polling intervals below 30 seconds ([#338](https://github.com/optimizely/ruby-sdk/pull/338)) +* Add warning to duplicate experiment keys ([#343](https://github.com/optimizely/ruby-sdk/pull/343)) + +### Enhancements +* Removed polling config manager stop restriction, allowing it to be restarted ([#340](https://github.com/optimizely/ruby-sdk/pull/340)). +* Include object id/key in invalid object errors ([#301](https://github.com/optimizely/ruby-sdk/pull/301)). + +### Breaking Changes + +* Updated required Ruby version from 2.7 -> 3.0 +* `Optimizely::Project` initialization arguments have been changed from positional to keyword ([#342](https://github.com/optimizely/ruby-sdk/pull/342)). +* `ODPManager` in the SDK is enabled by default. Unless an ODP account is integrated into the Optimizely projects, most `ODPManager` functions will be ignored. If needed, `ODPManager` can be disabled when `Optimizely::Project` is instantiated. + +* `ProjectConfigManager` interface now requires a `sdk_key` method ([#323](https://github.com/optimizely/ruby-sdk/pull/323)). +* `HTTPProjectConfigManager` requires either the `sdk_key` parameter or a datafile containing an sdkKey ([#323](https://github.com/optimizely/ruby-sdk/pull/323)). +* `BatchEventProcessor` is now the default `EventProcessor` when `Optimizely::Project` is instantiated ([#325](https://github.com/optimizely/ruby-sdk/pull/325)). + ## 5.0.0-beta April 28th, 2023 diff --git a/LICENSE b/LICENSE index 006d13d5..e2d14477 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2016, Optimizely and contributors + © Optimizely 2016 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index a10ba8d9..be5e0613 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Refer to the [Ruby SDK's developer documentation](https://docs.developers.optimi ### Requirements -* Ruby 2.7+ +* Ruby 3.0+ ### Install the SDK @@ -41,7 +41,7 @@ You can initialize the Optimizely instance in two ways: directly with a datafile Initialize Optimizely with a datafile. This datafile will be used as ProjectConfig throughout the life of the Optimizely instance. ```ruby - optimizely_instance = Optimizely::Project.new(datafile) + optimizely_instance = Optimizely::Project.new(datafile: datafile) ``` #### Initialization by OptimizelyFactory @@ -78,6 +78,8 @@ You can initialize the Optimizely instance in two ways: directly with a datafile ) ``` +**Note:** The SDK spawns multiple threads when initialized. These threads have infinite loops that are used for fetching the datafile, as well as batching and dispatching events in the background. When using in a web server that spawn multiple child processes, you need to initialize the SDK after those child processes or workers have been spawned. + #### HTTP Config Manager The `HTTPConfigManager` asynchronously polls for datafiles from a specified URL at regular intervals by making HTTP requests. diff --git a/lib/optimizely.rb b/lib/optimizely.rb index 93f4fc3c..3d787c88 100644 --- a/lib/optimizely.rb +++ b/lib/optimizely.rb @@ -42,6 +42,9 @@ require_relative 'optimizely/odp/lru_cache' require_relative 'optimizely/odp/odp_manager' require_relative 'optimizely/helpers/sdk_settings' +require_relative 'optimizely/user_profile_tracker' +require_relative 'optimizely/cmab/cmab_client' +require_relative 'optimizely/cmab/cmab_service' module Optimizely class Project @@ -70,20 +73,21 @@ class Project # @param event_processor_options: Optional hash of options to be passed to the default batch event processor. # @param settings: Optional instance of OptimizelySdkSettings for sdk configuration. - def initialize( # rubocop:disable Metrics/ParameterLists - datafile = nil, - event_dispatcher = nil, - logger = nil, - error_handler = nil, - skip_json_validation = false, # rubocop:disable Style/OptionalBooleanParameter - user_profile_service = nil, - sdk_key = nil, - config_manager = nil, - notification_center = nil, - event_processor = nil, - default_decide_options = [], - event_processor_options = {}, - settings = nil + def initialize( + datafile: nil, + event_dispatcher: nil, + logger: nil, + error_handler: nil, + skip_json_validation: false, + user_profile_service: nil, + sdk_key: nil, + config_manager: nil, + notification_center: nil, + event_processor: nil, + default_decide_options: [], + event_processor_options: {}, + settings: nil, + cmab_service: nil ) @logger = logger || NoOpLogger.new @error_handler = error_handler || NoOpErrorHandler.new @@ -130,7 +134,24 @@ def initialize( # rubocop:disable Metrics/ParameterLists setup_odp!(@config_manager.sdk_key) - @decision_service = DecisionService.new(@logger, @user_profile_service) + # Initialize CMAB components if cmab service is nil + if cmab_service.nil? + @cmab_client = DefaultCmabClient.new( + http_client: nil, + retry_config: CmabRetryConfig.new, + logger: @logger + ) + @cmab_cache = LRUCache.new(Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_SIZE, Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_TIMEOUT) + @cmab_service = DefaultCmabService.new( + @cmab_cache, + @cmab_client, + @logger + ) + else + @cmab_service = cmab_service + end + + @decision_service = DecisionService.new(@logger, @cmab_service, @user_profile_service) @event_processor = if event_processor.respond_to?(:process) event_processor @@ -172,84 +193,42 @@ def create_user_context(user_id, attributes = nil) OptimizelyUserContext.new(self, user_id, attributes) end - def decide(user_context, key, decide_options = []) - # raising on user context as it is internal and not provided directly by the user. - raise if user_context.class != OptimizelyUserContext - - reasons = [] - - # check if SDK is ready - unless is_valid - @logger.log(Logger::ERROR, InvalidProjectConfigError.new('decide').message) - reasons.push(OptimizelyDecisionMessage::SDK_NOT_READY) - return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) - end - - # validate that key is a string - unless key.is_a?(String) - @logger.log(Logger::ERROR, 'Provided key is invalid') - reasons.push(format(OptimizelyDecisionMessage::FLAG_KEY_INVALID, key)) - return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) - end - - # validate that key maps to a feature flag - config = project_config - feature_flag = config.get_feature_flag_from_key(key) - unless feature_flag - @logger.log(Logger::ERROR, "No feature flag was found for key '#{key}'.") - reasons.push(format(OptimizelyDecisionMessage::FLAG_KEY_INVALID, key)) - return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) - end - - # merge decide_options and default_decide_options - if decide_options.is_a? Array - decide_options += @default_decide_options - else - @logger.log(Logger::DEBUG, 'Provided decide options is not an array. Using default decide options.') - decide_options = @default_decide_options - end - + def create_optimizely_decision(user_context, flag_key, decision, reasons, decide_options, config) # Create Optimizely Decision Result. user_id = user_context.user_id attributes = user_context.user_attributes variation_key = nil feature_enabled = false rule_key = nil - flag_key = key all_variables = {} decision_event_dispatched = false + feature_flag = config.get_feature_flag_from_key(flag_key) experiment = nil decision_source = Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] - context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(key, nil) - variation, reasons_received = @decision_service.validated_forced_decision(config, context, user_context) - reasons.push(*reasons_received) - - if variation - decision = Optimizely::DecisionService::Decision.new(nil, variation, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST']) - else - decision, reasons_received = @decision_service.get_variation_for_feature(config, feature_flag, user_context, decide_options) - reasons.push(*reasons_received) - end + experiment_id = nil + variation_id = nil # Send impression event if Decision came from a feature test and decide options doesn't include disableDecisionEvent if decision.is_a?(Optimizely::DecisionService::Decision) experiment = decision.experiment rule_key = experiment ? experiment['key'] : nil - variation = decision['variation'] + experiment_id = experiment ? experiment['id'] : nil + variation = decision.variation variation_key = variation ? variation['key'] : nil + variation_id = variation ? variation['id'] : nil feature_enabled = variation ? variation['featureEnabled'] : false decision_source = decision.source end - if !decide_options.include?(OptimizelyDecideOption::DISABLE_DECISION_EVENT) && (decision_source == Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] || config.send_flag_decisions) - send_impression(config, experiment, variation_key || '', flag_key, rule_key || '', feature_enabled, decision_source, user_id, attributes) + if !decide_options.include?(OptimizelyDecideOption::DISABLE_DECISION_EVENT) && (decision_source == Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] || decision_source == Optimizely::DecisionService::DECISION_SOURCES['HOLDOUT'] || config.send_flag_decisions) + send_impression(config, experiment, variation_key || '', flag_key, rule_key || '', feature_enabled, decision_source, user_id, attributes, decision&.cmab_uuid) decision_event_dispatched = true end # Generate all variables map if decide options doesn't include excludeVariables unless decide_options.include? OptimizelyDecideOption::EXCLUDE_VARIABLES feature_flag['variables'].each do |variable| - variable_value = get_feature_variable_for_variation(key, feature_enabled, variation, variable, user_id) + variable_value = get_feature_variable_for_variation(flag_key, feature_enabled, variation, variable, user_id) all_variables[variable['key']] = Helpers::VariableType.cast_value_to_type(variable_value, variable['type'], @logger) end end @@ -260,14 +239,16 @@ def decide(user_context, key, decide_options = []) @notification_center.send_notifications( NotificationCenter::NOTIFICATION_TYPES[:DECISION], Helpers::Constants::DECISION_NOTIFICATION_TYPES['FLAG'], - user_id, (attributes || {}), + user_id, attributes || {}, flag_key: flag_key, enabled: feature_enabled, variables: all_variables, variation_key: variation_key, rule_key: rule_key, reasons: should_include_reasons ? reasons : [], - decision_event_dispatched: decision_event_dispatched + decision_event_dispatched: decision_event_dispatched, + experiment_id: experiment_id, + variation_id: variation_id ) OptimizelyDecision.new( @@ -281,6 +262,47 @@ def decide(user_context, key, decide_options = []) ) end + def decide(user_context, key, decide_options = []) + # raising on user context as it is internal and not provided directly by the user. + raise if user_context.class != OptimizelyUserContext + + reasons = [] + + # check if SDK is ready + unless is_valid + @logger.log(Logger::ERROR, InvalidProjectConfigError.new('decide').message) + reasons.push(OptimizelyDecisionMessage::SDK_NOT_READY) + return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) + end + + # validate that key is a string + unless key.is_a?(String) + @logger.log(Logger::ERROR, 'Provided key is invalid') + reasons.push(format(OptimizelyDecisionMessage::FLAG_KEY_INVALID, key)) + return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) + end + + # validate that key maps to a feature flag + config = project_config + feature_flag = config.get_feature_flag_from_key(key) + unless feature_flag + @logger.log(Logger::ERROR, "No feature flag was found for key '#{key}'.") + reasons.push(format(OptimizelyDecisionMessage::FLAG_KEY_INVALID, key)) + return OptimizelyDecision.new(flag_key: key, user_context: user_context, reasons: reasons) + end + + # merge decide_options and default_decide_options + if decide_options.is_a? Array + decide_options += @default_decide_options + else + @logger.log(Logger::DEBUG, 'Provided decide options is not an array. Using default decide options.') + decide_options = @default_decide_options + end + + decide_options.delete(OptimizelyDecideOption::ENABLED_FLAGS_ONLY) if decide_options.include?(OptimizelyDecideOption::ENABLED_FLAGS_ONLY) + decide_for_keys(user_context, [key], decide_options, true)[key] + end + def decide_all(user_context, decide_options = []) # raising on user context as it is internal and not provided directly by the user. raise if user_context.class != OptimizelyUserContext @@ -298,7 +320,7 @@ def decide_all(user_context, decide_options = []) decide_for_keys(user_context, keys, decide_options) end - def decide_for_keys(user_context, keys, decide_options = []) + def decide_for_keys(user_context, keys, decide_options = [], ignore_default_options = false) # rubocop:disable Style/OptionalBooleanParameter # raising on user context as it is internal and not provided directly by the user. raise if user_context.class != OptimizelyUserContext @@ -308,13 +330,87 @@ def decide_for_keys(user_context, keys, decide_options = []) return {} end - enabled_flags_only = (!decide_options.nil? && (decide_options.include? OptimizelyDecideOption::ENABLED_FLAGS_ONLY)) || (@default_decide_options.include? OptimizelyDecideOption::ENABLED_FLAGS_ONLY) + # merge decide_options and default_decide_options + unless ignore_default_options + if decide_options.is_a?(Array) + decide_options += @default_decide_options + else + @logger.log(Logger::DEBUG, 'Provided decide options is not an array. Using default decide options.') + decide_options = @default_decide_options + end + end + + # enabled_flags_only = (!decide_options.nil? && (decide_options.include? OptimizelyDecideOption::ENABLED_FLAGS_ONLY)) || (@default_decide_options.include? OptimizelyDecideOption::ENABLED_FLAGS_ONLY) decisions = {} + valid_keys = [] + decision_reasons_dict = {} + config = project_config + return decisions unless config + + flags_without_forced_decision = [] + flag_decisions = {} + keys.each do |key| - decision = decide(user_context, key, decide_options) - decisions[key] = decision unless enabled_flags_only && !decision.enabled + # Retrieve the feature flag from the project's feature flag key map + feature_flag = config.feature_flag_key_map[key] + + # If the feature flag is nil, create a default OptimizelyDecision and move to the next key + if feature_flag.nil? + decisions[key] = OptimizelyDecision.new(variation_key: nil, enabled: false, variables: nil, rule_key: nil, flag_key: key, user_context: user_context, reasons: []) + next + end + valid_keys.push(key) + decision_reasons = [] + decision_reasons_dict[key] = decision_reasons + + config = project_config + context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(key, nil) + variation, reasons_received = @decision_service.validated_forced_decision(config, context, user_context) + decision_reasons_dict[key].push(*reasons_received) + if variation + decision = Optimizely::DecisionService::Decision.new(nil, variation, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST']) + flag_decisions[key] = decision + else + flags_without_forced_decision.push(feature_flag) + end + end + decision_list = @decision_service.get_variations_for_feature_list(config, flags_without_forced_decision, user_context, decide_options) + + flags_without_forced_decision.each_with_index do |flag, i| + decision = decision_list[i].decision + reasons = decision_list[i].reasons + error = decision_list[i].error + flag_key = flag['key'] + # store error decision against key and remove key from valid keys + if error + optimizely_decision = OptimizelyDecision.new_error_decision(flag_key, user_context, reasons) + decisions[flag_key] = optimizely_decision + valid_keys.delete(flag_key) if valid_keys.include?(flag_key) + next + end + flag_decisions[flag_key] = decision + decision_reasons_dict[flag_key] ||= [] + decision_reasons_dict[flag_key].push(*reasons) end + valid_keys.each do |key| + flag_decision = flag_decisions[key] + decision_reasons = decision_reasons_dict[key] + optimizely_decision = create_optimizely_decision( + user_context, + key, + flag_decision, + decision_reasons, + decide_options, + config + ) + + enabled_flags_only_missing = !decide_options.include?(OptimizelyDecideOption::ENABLED_FLAGS_ONLY) + is_enabled = optimizely_decision.enabled + + decisions[key] = optimizely_decision if enabled_flags_only_missing || is_enabled + end + decisions end @@ -531,14 +627,14 @@ def is_feature_enabled(feature_flag_key, user_id, attributes = nil) end user_context = OptimizelyUserContext.new(self, user_id, attributes, identify: false) - decision, = @decision_service.get_variation_for_feature(config, feature_flag, user_context) - + decision_result = @decision_service.get_variation_for_feature(config, feature_flag, user_context) + decision = decision_result.decision feature_enabled = false source_string = Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] if decision.is_a?(Optimizely::DecisionService::Decision) variation = decision['variation'] feature_enabled = variation['featureEnabled'] - if decision.source == Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] + if decision.source == Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] || decision.source == Optimizely::DecisionService::DECISION_SOURCES['HOLDOUT'] source_string = Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] source_info = { experiment_key: decision.experiment['key'], @@ -564,7 +660,7 @@ def is_feature_enabled(feature_flag_key, user_id, attributes = nil) @notification_center.send_notifications( NotificationCenter::NOTIFICATION_TYPES[:DECISION], Helpers::Constants::DECISION_NOTIFICATION_TYPES['FEATURE'], - user_id, (attributes || {}), + user_id, attributes || {}, feature_key: feature_flag_key, feature_enabled: feature_enabled, source: source_string, @@ -771,7 +867,8 @@ def get_all_feature_variables(feature_flag_key, user_id, attributes = nil) end user_context = OptimizelyUserContext.new(self, user_id, attributes, identify: false) - decision, = @decision_service.get_variation_for_feature(config, feature_flag, user_context) + decision_result = @decision_service.get_variation_for_feature(config, feature_flag, user_context) + decision = decision_result.decision variation = decision ? decision['variation'] : nil feature_enabled = variation ? variation['featureEnabled'] : false all_variables = {} @@ -792,7 +889,7 @@ def get_all_feature_variables(feature_flag_key, user_id, attributes = nil) @notification_center.send_notifications( NotificationCenter::NOTIFICATION_TYPES[:DECISION], - Helpers::Constants::DECISION_NOTIFICATION_TYPES['ALL_FEATURE_VARIABLES'], user_id, (attributes || {}), + Helpers::Constants::DECISION_NOTIFICATION_TYPES['ALL_FEATURE_VARIABLES'], user_id, attributes || {}, feature_key: feature_flag_key, feature_enabled: feature_enabled, source: source_string, @@ -889,7 +986,7 @@ def get_optimizely_config if @config_manager.respond_to?(:optimizely_config) @config_manager.optimizely_config else - OptimizelyConfig.new(project_config).config + OptimizelyConfig.new(project_config, @logger).config end end @@ -959,7 +1056,11 @@ def get_variation_with_config(experiment_key, user_id, attributes, config) return nil unless user_inputs_valid?(attributes) user_context = OptimizelyUserContext.new(self, user_id, attributes, identify: false) - variation_id, = @decision_service.get_variation(config, experiment_id, user_context) + user_profile_tracker = UserProfileTracker.new(user_id, @user_profile_service, @logger) + user_profile_tracker.load_user_profile + variation_result = @decision_service.get_variation(config, experiment_id, user_context, user_profile_tracker) + variation_id = variation_result.variation_id + user_profile_tracker.save_user_profile variation = config.get_variation_from_id(experiment_key, variation_id) unless variation_id.nil? variation_key = variation['key'] if variation decision_notification_type = if config.feature_experiment?(experiment_id) @@ -969,7 +1070,7 @@ def get_variation_with_config(experiment_key, user_id, attributes, config) end @notification_center.send_notifications( NotificationCenter::NOTIFICATION_TYPES[:DECISION], - decision_notification_type, user_id, (attributes || {}), + decision_notification_type, user_id, attributes || {}, experiment_key: experiment_key, variation_key: variation_key ) @@ -1026,7 +1127,8 @@ def get_feature_variable_for_type(feature_flag_key, variable_key, variable_type, end user_context = OptimizelyUserContext.new(self, user_id, attributes, identify: false) - decision, = @decision_service.get_variation_for_feature(config, feature_flag, user_context) + decision_result = @decision_service.get_variation_for_feature(config, feature_flag, user_context) + decision = decision_result.decision variation = decision ? decision['variation'] : nil feature_enabled = variation ? variation['featureEnabled'] : false @@ -1044,7 +1146,7 @@ def get_feature_variable_for_type(feature_flag_key, variable_key, variable_type, @notification_center.send_notifications( NotificationCenter::NOTIFICATION_TYPES[:DECISION], - Helpers::Constants::DECISION_NOTIFICATION_TYPES['FEATURE_VARIABLE'], user_id, (attributes || {}), + Helpers::Constants::DECISION_NOTIFICATION_TYPES['FEATURE_VARIABLE'], user_id, attributes || {}, feature_key: feature_flag_key, feature_enabled: feature_enabled, source: source_string, @@ -1144,7 +1246,7 @@ def validate_instantiation_options raise InvalidInputError, 'event_dispatcher' end - def send_impression(config, experiment, variation_key, flag_key, rule_key, enabled, rule_type, user_id, attributes = nil) + def send_impression(config, experiment, variation_key, flag_key, rule_key, enabled, rule_type, user_id, attributes = nil, cmab_uuid = nil) if experiment.nil? experiment = { 'id' => '', @@ -1176,6 +1278,7 @@ def send_impression(config, experiment, variation_key, flag_key, rule_key, enabl variation_key: variation_key, enabled: enabled } + metadata[:cmab_uuid] = cmab_uuid unless cmab_uuid.nil? user_event = UserEventFactory.create_impression_event(config, experiment, variation_id, metadata, user_id, attributes) @event_processor.process(user_event) diff --git a/lib/optimizely/audience.rb b/lib/optimizely/audience.rb index 4c57261a..77e5d179 100644 --- a/lib/optimizely/audience.rb +++ b/lib/optimizely/audience.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2016-2017, 2019-2020, Optimizely and contributors +# Copyright 2016-2017, 2019-2020, 2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -49,7 +49,7 @@ def user_meets_audience_conditions?(config, experiment, user_context, logger, lo logger.log(Logger::DEBUG, message) # Return true if there are no audiences - if audience_conditions.empty? + if audience_conditions.nil? || audience_conditions.empty? message = format(logs_hash['AUDIENCE_EVALUATION_RESULT_COMBINED'], logging_key, 'TRUE') logger.log(Logger::INFO, message) decide_reasons.push(message) @@ -59,7 +59,7 @@ def user_meets_audience_conditions?(config, experiment, user_context, logger, lo user_condition_evaluator = UserConditionEvaluator.new(user_context, logger) evaluate_user_conditions = lambda do |condition| - return user_condition_evaluator.evaluate(condition) + user_condition_evaluator.evaluate(condition) end evaluate_audience = lambda do |audience_id| @@ -72,6 +72,20 @@ def user_meets_audience_conditions?(config, experiment, user_context, logger, lo decide_reasons.push(message) audience_conditions = JSON.parse(audience_conditions) if audience_conditions.is_a?(String) + # Convert all symbol keys to string keys in the parsed conditions + stringify_keys = lambda do |obj| + case obj + when Hash + obj.transform_keys(&:to_s).transform_values { |v| stringify_keys.call(v) } + when Array + obj.map { |item| stringify_keys.call(item) } + else + obj + end + end + + audience_conditions = stringify_keys.call(audience_conditions) + result = ConditionTreeEvaluator.evaluate(audience_conditions, evaluate_user_conditions) result_str = result.nil? ? 'UNKNOWN' : result.to_s.upcase message = format(logs_hash['AUDIENCE_EVALUATION_RESULT'], audience_id, result_str) diff --git a/lib/optimizely/bucketer.rb b/lib/optimizely/bucketer.rb index ba502833..4943d38c 100644 --- a/lib/optimizely/bucketer.rb +++ b/lib/optimizely/bucketer.rb @@ -44,6 +44,31 @@ def bucket(project_config, experiment, bucketing_id, user_id) # user_id - String ID for user. # # Returns variation in which visitor with ID user_id has been placed. Nil if no variation. + + if experiment.nil? || experiment['key'].to_s.strip.empty? + message = 'Invalid entity key provided for bucketing. Returning nil.' + @logger.log(Logger::DEBUG, message) + return nil, [] + end + + variation_id, decide_reasons = bucket_to_entity_id(project_config, experiment, bucketing_id, user_id) + if variation_id && variation_id != '' + experiment_id = experiment['id'] + variation = project_config.get_variation_from_id_by_experiment_id(experiment_id, variation_id) + return variation, decide_reasons + end + + # Handle the case when the traffic range is empty due to sticky bucketing + if variation_id == '' + message = 'Bucketed into an empty traffic range. Returning nil.' + @logger.log(Logger::DEBUG, message) + decide_reasons.push(message) + end + + [nil, decide_reasons] + end + + def bucket_to_entity_id(project_config, experiment, bucketing_id, user_id) return nil, [] if experiment.nil? decide_reasons = [] @@ -84,22 +109,18 @@ def bucket(project_config, experiment, bucketing_id, user_id) end traffic_allocations = experiment['trafficAllocation'] + if experiment['cmab'] + traffic_allocations = [ + { + 'entityId' => '$', + 'endOfRange' => experiment['cmab']['trafficAllocation'] + } + ] + end variation_id, find_bucket_reasons = find_bucket(bucketing_id, user_id, experiment_id, traffic_allocations) decide_reasons.push(*find_bucket_reasons) - if variation_id && variation_id != '' - variation = project_config.get_variation_from_id_by_experiment_id(experiment_id, variation_id) - return variation, decide_reasons - end - - # Handle the case when the traffic range is empty due to sticky bucketing - if variation_id == '' - message = 'Bucketed into an empty traffic range. Returning nil.' - @logger.log(Logger::DEBUG, message) - decide_reasons.push(message) - end - - [nil, decide_reasons] + [variation_id, decide_reasons] end def find_bucket(bucketing_id, user_id, parent_id, traffic_allocations) @@ -110,8 +131,8 @@ def find_bucket(bucketing_id, user_id, parent_id, traffic_allocations) # parent_id - String entity ID to use for bucketing ID # traffic_allocations - Array of traffic allocations # - # Returns and array of two values where first value is the entity ID corresponding to the provided bucket value - # or nil if no match is found. The second value contains the array of reasons stating how the deicision was taken + # Returns an array of two values where first value is the entity ID corresponding to the provided bucket value + # or nil if no match is found. The second value contains the array of reasons stating how the decision was taken decide_reasons = [] bucketing_key = format(BUCKETING_ID_TEMPLATE, bucketing_id: bucketing_id, entity_id: parent_id) bucket_value = generate_bucket_value(bucketing_key) diff --git a/lib/optimizely/cmab/cmab_client.rb b/lib/optimizely/cmab/cmab_client.rb new file mode 100644 index 00000000..229eb11a --- /dev/null +++ b/lib/optimizely/cmab/cmab_client.rb @@ -0,0 +1,230 @@ +# frozen_string_literal: true + +# +# Copyright 2025 Optimizely and contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +require 'optimizely/helpers/http_utils' +require 'optimizely/helpers/constants' + +module Optimizely + # Default constants for CMAB requests + DEFAULT_MAX_RETRIES = 1 + DEFAULT_INITIAL_BACKOFF = 0.1 # in seconds (100 ms) + DEFAULT_MAX_BACKOFF = 10 # in seconds + DEFAULT_BACKOFF_MULTIPLIER = 2.0 + MAX_WAIT_TIME = 10 + + class CmabRetryConfig + # Configuration for retrying CMAB requests. + # Contains parameters for maximum retries, backoff intervals, and multipliers. + attr_reader :max_retries, :initial_backoff, :max_backoff, :backoff_multiplier + + def initialize(max_retries: DEFAULT_MAX_RETRIES, initial_backoff: DEFAULT_INITIAL_BACKOFF, max_backoff: DEFAULT_MAX_BACKOFF, backoff_multiplier: DEFAULT_BACKOFF_MULTIPLIER) + @max_retries = max_retries + @initial_backoff = initial_backoff + @max_backoff = max_backoff + @backoff_multiplier = backoff_multiplier + end + end + + class DefaultCmabClient + # Client for interacting with the CMAB service. + # Provides methods to fetch decisions with optional retry logic. + + def initialize(http_client: nil, retry_config: nil, logger: nil, prediction_endpoint: nil) + # Initialize the CMAB client. + # Args: + # http_client: HTTP client for making requests. + # retry_config: Configuration for retry settings. + # logger: Logger for logging errors and info. + # prediction_endpoint: Custom prediction endpoint URL template. + # Use #{rule_id} as placeholder for rule_id. + @http_client = http_client || DefaultHttpClient.new + @retry_config = retry_config || CmabRetryConfig.new + @logger = logger || NoOpLogger.new + @prediction_endpoint = if prediction_endpoint.to_s.strip.empty? + 'https://prediction.cmab.optimizely.com/predict/%s' + else + prediction_endpoint + end + end + + def fetch_decision(rule_id, user_id, attributes, cmab_uuid, timeout: MAX_WAIT_TIME) + # Fetches a decision from the CMAB service. + # Args: + # rule_id: The rule ID for the experiment. + # user_id: The user ID for the request. + # attributes: User attributes for the request. + # cmab_uuid: Unique identifier for the CMAB request. + # timeout: Maximum wait time for the request to respond in seconds. (default is 10 seconds). + # Returns: + # The variation ID. + url = format(@prediction_endpoint, rule_id) + cmab_attributes = attributes.map { |key, value| {'id' => key.to_s, 'value' => value, 'type' => 'custom_attribute'} } + + request_body = { + instances: [{ + visitorId: user_id, + experimentId: rule_id, + attributes: cmab_attributes, + cmabUUID: cmab_uuid + }] + } + + if @retry_config && @retry_config.max_retries.to_i.positive? + _do_fetch_with_retry(url, request_body, @retry_config, timeout) + else + _do_fetch(url, request_body, timeout) + end + end + + def _do_fetch(url, request_body, timeout) + # Perform a single fetch request to the CMAB prediction service. + + # Args: + # url: The endpoint URL. + # request_body: The request payload. + # timeout: Maximum wait time for the request to respond in seconds. + # Returns: + # The variation ID from the response. + + headers = {'Content-Type' => 'application/json'} + begin + response = @http_client.post(url, json: request_body, headers: headers, timeout: timeout.to_i) + rescue StandardError => e + error_message = Optimizely::Helpers::Constants::CMAB_FETCH_FAILED % e.message + @logger.log(Logger::ERROR, error_message) + raise CmabFetchError, error_message + end + + unless (200..299).include?(response.status_code) + error_message = Optimizely::Helpers::Constants::CMAB_FETCH_FAILED % response.status_code + @logger.log(Logger::ERROR, error_message) + raise CmabFetchError, error_message + end + + begin + body = response.json + rescue JSON::ParserError, Optimizely::CmabInvalidResponseError + error_message = Optimizely::Helpers::Constants::INVALID_CMAB_FETCH_RESPONSE + @logger.log(Logger::ERROR, error_message) + raise CmabInvalidResponseError, error_message + end + + unless validate_response(body) + error_message = Optimizely::Helpers::Constants::INVALID_CMAB_FETCH_RESPONSE + @logger.log(Logger::ERROR, error_message) + raise CmabInvalidResponseError, error_message + end + + body['predictions'][0]['variation_id'] + end + + def validate_response(body) + # Validate the response structure from the CMAB service. + # Args: + # body: The JSON response body to validate. + # Returns: + # true if valid, false otherwise. + + body.is_a?(Hash) && + body.key?('predictions') && + body['predictions'].is_a?(Array) && + !body['predictions'].empty? && + body['predictions'][0].is_a?(Hash) && + body['predictions'][0].key?('variation_id') + end + + def _do_fetch_with_retry(url, request_body, retry_config, timeout) + # Perform a fetch request with retry logic. + # Args: + # url: The endpoint URL. + # request_body: The request payload. + # retry_config: Configuration for retry settings. + # timeout: Maximum wait time for the request to respond in seconds. + # Returns: + # The variation ID from the response. + + backoff = retry_config.initial_backoff + + (0..retry_config.max_retries).each do |attempt| + variation_id = _do_fetch(url, request_body, timeout) + return variation_id + rescue StandardError => e + if attempt < retry_config.max_retries + @logger.log(Logger::INFO, "Retrying CMAB request (attempt #{attempt + 1}) after #{backoff} seconds...") + Kernel.sleep(backoff) + + backoff = [ + backoff * retry_config.backoff_multiplier, + retry_config.max_backoff + ].min + else + @logger.log(Logger::ERROR, "Max retries exceeded for CMAB request: #{e.message}") + raise Optimizely::CmabFetchError, "CMAB decision fetch failed (#{e.message})." + end + end + end + end + + class DefaultHttpClient + # Default HTTP client for making requests. + # Uses Optimizely::Helpers::HttpUtils to make requests. + + def post(url, json: nil, headers: {}, timeout: nil) + # Makes a POST request to the specified URL with JSON body and headers. + # Args: + # url: The endpoint URL. + # json: The JSON payload to send in the request body. + # headers: Additional headers for the request. + # timeout: Maximum wait time for the request to respond in seconds. + # Returns: + # The response object. + + response = Optimizely::Helpers::HttpUtils.make_request(url, :post, json.to_json, headers, timeout) + + HttpResponseAdapter.new(response) + end + + class HttpResponseAdapter + # Adapter for HTTP response to provide a consistent interface. + # Args: + # response: The raw HTTP response object. + + def initialize(response) + @response = response + end + + def status_code + @response.code.to_i + end + + def json + JSON.parse(@response.body) + rescue JSON::ParserError + raise Optimizely::CmabInvalidResponseError, Optimizely::Helpers::Constants::INVALID_CMAB_FETCH_RESPONSE + end + + def body + @response.body + end + end + end + + class NoOpLogger + # A no-operation logger that does nothing. + def log(_level, _message); end + end +end diff --git a/lib/optimizely/cmab/cmab_service.rb b/lib/optimizely/cmab/cmab_service.rb new file mode 100644 index 00000000..9254b7d3 --- /dev/null +++ b/lib/optimizely/cmab/cmab_service.rb @@ -0,0 +1,218 @@ +# frozen_string_literal: true + +# +# Copyright 2025 Optimizely and contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +require 'optimizely/odp/lru_cache' +require 'optimizely/decide/optimizely_decide_option' +require 'optimizely/logger' +require 'digest' +require 'json' +require 'securerandom' +require 'murmurhash3' + +module Optimizely + CmabDecision = Struct.new(:variation_id, :cmab_uuid, keyword_init: true) + CmabCacheValue = Struct.new(:attributes_hash, :variation_id, :cmab_uuid, keyword_init: true) + + class DefaultCmabCacheOptions + # CMAB Constants + DEFAULT_CMAB_CACHE_TIMEOUT = (30 * 60) # in seconds + DEFAULT_CMAB_CACHE_SIZE = 10_000 + end + + # Default CMAB service implementation + class DefaultCmabService + # Initializes a new instance of the CmabService. + # + # @param cmab_cache [LRUCache] The cache object used for storing CMAB data. Must be an instance of LRUCache. + # @param cmab_client [DefaultCmabClient] The client used to interact with the CMAB service. Must be an instance of DefaultCmabClient. + # @param logger [Logger, nil] Optional logger for logging messages. Defaults to nil. + # + # @raise [ArgumentError] If cmab_cache is not an instance of LRUCache. + # @raise [ArgumentError] If cmab_client is not an instance of DefaultCmabClient. + + NUM_LOCK_STRIPES = 1000 + + def initialize(cmab_cache, cmab_client, logger = nil) + @cmab_cache = cmab_cache + @cmab_client = cmab_client + @logger = logger || NoOpLogger.new + @locks = Array.new(NUM_LOCK_STRIPES) { Mutex.new } + end + + def get_decision(project_config, user_context, rule_id, options) + lock_index = get_lock_index(user_context.user_id, rule_id) + + @locks[lock_index].synchronize do + get_decision_impl(project_config, user_context, rule_id, options) + end + end + + private + + def get_lock_index(user_id, rule_id) + # Create a hash of user_id + rule_id for consistent lock selection + hash_input = "#{user_id}#{rule_id}" + hash_value = MurmurHash3::V32.str_hash(hash_input, 1) & 0xFFFFFFFF # Convert to unsigned 32-bit equivalent + hash_value % NUM_LOCK_STRIPES + end + + def get_decision_impl(project_config, user_context, rule_id, options) + # Retrieves a decision for a given user and rule, utilizing a cache for efficiency. + # + # This method filters user attributes, checks for various cache-related options, + # and either fetches a fresh decision or returns a cached one if appropriate. + # It supports options to ignore the cache, reset the cache, or invalidate a specific user's cache entry. + # + # @param project_config [Object] The project configuration object. + # @param user_context [Object] The user context containing user_id and attributes. + # @param rule_id [String] The identifier for the decision rule. + # @param options [Array<Symbol>, nil] Optional flags to control cache behavior. Supported options: + # - OptimizelyDecideOption::IGNORE_CMAB_CACHE: Bypass cache and fetch a new decision. + # - OptimizelyDecideOption::RESET_CMAB_CACHE: Reset the entire cache. + # - OptimizelyDecideOption::INVALIDATE_USER_CMAB_CACHE: Invalidate cache for the specific user and rule. + # + # @return [CmabDecision] The decision object containing variation_id and cmab_uuid. + + filtered_attributes = filter_attributes(project_config, user_context, rule_id) + reasons = [] + + if options&.include?(Decide::OptimizelyDecideOption::IGNORE_CMAB_CACHE) + reason = "Ignoring CMAB cache for user '#{user_context.user_id}' and rule '#{rule_id}'" + @logger.log(Logger::DEBUG, reason) + reasons << reason + cmab_decision = fetch_decision(rule_id, user_context.user_id, filtered_attributes) + return [cmab_decision, reasons] + end + + if options&.include?(Decide::OptimizelyDecideOption::RESET_CMAB_CACHE) + reason = "Resetting CMAB cache for user '#{user_context.user_id}' and rule '#{rule_id}'" + @logger.log(Logger::DEBUG, reason) + reasons << reason + @cmab_cache.reset + end + + cache_key = get_cache_key(user_context.user_id, rule_id) + + if options&.include?(Decide::OptimizelyDecideOption::INVALIDATE_USER_CMAB_CACHE) + reason = "Invalidating CMAB cache for user '#{user_context.user_id}' and rule '#{rule_id}'" + @logger.log(Logger::DEBUG, reason) + reasons << reason + @cmab_cache.remove(cache_key) + end + + cached_value = @cmab_cache.lookup(cache_key) + attributes_hash = hash_attributes(filtered_attributes) + + if cached_value + if cached_value.attributes_hash == attributes_hash + reason = "CMAB cache hit for user '#{user_context.user_id}' and rule '#{rule_id}'" + @logger.log(Logger::DEBUG, reason) + reasons << reason + return [CmabDecision.new(variation_id: cached_value.variation_id, cmab_uuid: cached_value.cmab_uuid), reasons] + else + reason = "CMAB cache attributes mismatch for user '#{user_context.user_id}' and rule '#{rule_id}', fetching new decision." + @logger.log(Logger::DEBUG, reason) + reasons << reason + @cmab_cache.remove(cache_key) + end + else + reason = "CMAB cache miss for user '#{user_context.user_id}' and rule '#{rule_id}'" + @logger.log(Logger::DEBUG, reason) + reasons << reason + end + + cmab_decision = fetch_decision(rule_id, user_context.user_id, filtered_attributes) + reason = "CMAB decision is #{cmab_decision.to_h}" + @logger.log(Logger::DEBUG, reason) + reasons << reason + + @cmab_cache.save(cache_key, + CmabCacheValue.new( + attributes_hash: attributes_hash, + variation_id: cmab_decision.variation_id, + cmab_uuid: cmab_decision.cmab_uuid + )) + [cmab_decision, reasons] + end + + def fetch_decision(rule_id, user_id, attributes) + # Fetches a decision for a given rule and user, along with user attributes. + # + # Generates a unique UUID for the decision request, then delegates to the CMAB client + # to fetch the variation ID. Returns a CmabDecision object containing the variation ID + # and the generated UUID. + # + # @param rule_id [String] The identifier for the rule to evaluate. + # @param user_id [String] The identifier for the user. + # @param attributes [Hash] A hash of user attributes to be used in decision making. + # @return [CmabDecision] The decision object containing the variation ID and UUID. + cmab_uuid = SecureRandom.uuid + variation_id = @cmab_client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + CmabDecision.new(variation_id: variation_id, cmab_uuid: cmab_uuid) + end + + def filter_attributes(project_config, user_context, rule_id) + # Filters the user attributes based on the CMAB attribute IDs defined in the experiment. + # + # @param project_config [Object] The project configuration object containing experiment and attribute mappings. + # @param user_context [Object] The user context object containing user attributes. + # @param rule_id [String] The ID of the experiment (rule) to filter attributes for. + # @return [Hash] A hash of filtered user attributes whose keys match the CMAB attribute IDs for the given experiment. + user_attributes = user_context.user_attributes + filtered_user_attributes = {} + + experiment = project_config.experiment_id_map[rule_id] + return filtered_user_attributes if experiment.nil? || experiment['cmab'].nil? + + cmab_attribute_ids = experiment['cmab']['attributeIds'] + cmab_attribute_ids.each do |attribute_id| + attribute = project_config.attribute_id_map[attribute_id] + next unless attribute + + attribute_key = attribute['key'] + filtered_user_attributes[attribute_key] = user_attributes[attribute_key] if user_attributes.key?(attribute_key) + end + + filtered_user_attributes + end + + def get_cache_key(user_id, rule_id) + # Generates a cache key string based on the provided user ID and rule ID. + # + # The cache key is constructed in the format: "<user_id_length>-<user_id>-<rule_id>", + # where <user_id_length> is the length of the user_id string. + # + # @param user_id [String] The unique identifier for the user. + # @param rule_id [String] The unique identifier for the rule. + # @return [String] The generated cache key. + "#{user_id.length}-#{user_id}-#{rule_id}" + end + + def hash_attributes(attributes) + # Generates an MD5 hash for a given attributes hash. + # + # The method sorts the attributes by key, serializes them to a JSON string, + # and then computes the MD5 hash of the resulting string. This ensures that + # the hash is consistent regardless of the original key order in the input hash. + # + # @param attributes [Hash] The attributes to be hashed. + # @return [String] The MD5 hash of the sorted and serialized attributes. + sorted_attrs = JSON.generate(attributes.sort.to_h) + Digest::MD5.hexdigest(sorted_attrs) + end + end +end diff --git a/lib/optimizely/config/datafile_project_config.rb b/lib/optimizely/config/datafile_project_config.rb index d8d78975..cfb67f24 100644 --- a/lib/optimizely/config/datafile_project_config.rb +++ b/lib/optimizely/config/datafile_project_config.rb @@ -27,12 +27,14 @@ class DatafileProjectConfig < ProjectConfig attr_reader :datafile, :account_id, :attributes, :audiences, :typed_audiences, :events, :experiments, :feature_flags, :groups, :project_id, :bot_filtering, :revision, :sdk_key, :environment_key, :rollouts, :version, :send_flag_decisions, - :attribute_key_map, :audience_id_map, :event_key_map, :experiment_feature_map, + :attribute_key_map, :attribute_id_to_key_map, :attribute_id_map, + :audience_id_map, :event_key_map, :experiment_feature_map, :experiment_id_map, :experiment_key_map, :feature_flag_key_map, :feature_variable_key_map, :group_id_map, :rollout_id_map, :rollout_experiment_id_map, :variation_id_map, :variation_id_to_variable_usage_map, :variation_key_map, :variation_id_map_by_experiment_id, :variation_key_map_by_experiment_id, :flag_variation_map, :integration_key_map, :integrations, - :public_key_for_odp, :host_for_odp, :all_segments + :public_key_for_odp, :host_for_odp, :all_segments, :region, :holdouts, :holdout_id_map, + :global_holdouts, :included_holdouts, :excluded_holdouts, :flag_holdouts_map # Boolean - denotes if Optimizely should remove the last block of visitors' IP address before storing event data attr_reader :anonymize_ip @@ -68,6 +70,11 @@ def initialize(datafile, logger, error_handler) @rollouts = config.fetch('rollouts', []) @send_flag_decisions = config.fetch('sendFlagDecisions', false) @integrations = config.fetch('integrations', []) + @region = config.fetch('region', 'US') + @holdouts = config.fetch('holdouts', []) + + # Default to US region if not specified + @region = 'US' if @region.nil? || @region.empty? # Json type is represented in datafile as a subtype of string for the sake of backwards compatibility. # Converting it to a first-class json type while creating Project Config @@ -82,6 +89,11 @@ def initialize(datafile, logger, error_handler) # Utility maps for quick lookup @attribute_key_map = generate_key_map(@attributes, 'key') + @attribute_id_map = generate_key_map(@attributes, 'id') + @attribute_id_to_key_map = {} + @attributes.each do |attribute| + @attribute_id_to_key_map[attribute['id']] = attribute['key'] + end @event_key_map = generate_key_map(@events, 'key') @group_id_map = generate_key_map(@groups, 'id') @group_id_map.each do |key, group| @@ -102,6 +114,45 @@ def initialize(datafile, logger, error_handler) @variation_id_to_variable_usage_map = {} @variation_id_to_experiment_map = {} @flag_variation_map = {} + @holdout_id_map = {} + @global_holdouts = [] + @included_holdouts = {} + @excluded_holdouts = {} + @flag_holdouts_map = {} + + @holdouts.each do |holdout| + next unless holdout['status'] == 'Running' + + # Ensure holdout has layerId field (holdouts don't have campaigns) + holdout['layerId'] ||= '' + + @holdout_id_map[holdout['id']] = holdout + + included_flags = holdout['includedFlags'] || [] + excluded_flags = holdout['excludedFlags'] || [] + + case [included_flags.empty?, excluded_flags.empty?] + when [true, true] + # No included or excluded flags - this is a global holdout + @global_holdouts << holdout + + when [false, true], [false, false] + # Has included flags - add to included_holdouts map + included_flags.each do |flag_id| + @included_holdouts[flag_id] ||= [] + @included_holdouts[flag_id] << holdout + end + + when [true, false] + # No included flags but has excluded flags - global with exclusions + @global_holdouts << holdout + + excluded_flags.each do |flag_id| + @excluded_holdouts[flag_id] ||= [] + @excluded_holdouts[flag_id] << holdout + end + end + end @experiment_id_map.each_value do |exp| # Excludes experiments from rollouts @@ -155,6 +206,31 @@ def initialize(datafile, logger, error_handler) @experiment_feature_map[experiment_id] = [feature_flag['id']] end end + + # Adding Holdout variations in variation id and key maps + return unless @holdouts && !@holdouts.empty? + + @holdouts.each do |holdout| + next unless holdout['status'] == 'Running' + + holdout_key = holdout['key'] + holdout_id = holdout['id'] + + @variation_key_map[holdout_key] = {} + @variation_id_map[holdout_key] = {} + @variation_id_map_by_experiment_id[holdout_id] = {} + @variation_key_map_by_experiment_id[holdout_id] = {} + + variations = holdout['variations'] + next unless variations && !variations.empty? + + variations.each do |variation| + @variation_key_map[holdout_key][variation['key']] = variation + @variation_id_map[holdout_key][variation['id']] = variation + @variation_key_map_by_experiment_id[holdout_id][variation['key']] = variation + @variation_id_map_by_experiment_id[holdout_id][variation['id']] = variation + end + end end def get_rules_for_flag(feature_flag) @@ -223,8 +299,9 @@ def get_experiment_from_key(experiment_key) experiment = @experiment_key_map[experiment_key] return experiment if experiment - @logger.log Logger::ERROR, "Experiment key '#{experiment_key}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_key: experiment_key) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -238,8 +315,9 @@ def get_experiment_from_id(experiment_id) experiment = @experiment_id_map[experiment_id] return experiment if experiment - @logger.log Logger::ERROR, "Experiment id '#{experiment_id}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_id: experiment_id) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -253,8 +331,9 @@ def get_experiment_key(experiment_id) experiment = @experiment_id_map[experiment_id] return experiment['key'] unless experiment.nil? - @logger.log Logger::ERROR, "Experiment id '#{experiment_id}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_id: experiment_id) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -268,8 +347,9 @@ def get_event_from_key(event_key) event = @event_key_map[event_key] return event if event - @logger.log Logger::ERROR, "Event '#{event_key}' is not in datafile." - @error_handler.handle_error InvalidEventError + invalid_event_error = InvalidEventError.new(event_key) + @logger.log Logger::ERROR, invalid_event_error.message + @error_handler.handle_error invalid_event_error nil end @@ -283,8 +363,9 @@ def get_audience_from_id(audience_id) audience = @audience_id_map[audience_id] return audience if audience - @logger.log Logger::ERROR, "Audience '#{audience_id}' is not in datafile." - @error_handler.handle_error InvalidAudienceError + invalid_audience_error = InvalidAudienceError.new(audience_id) + @logger.log Logger::ERROR, invalid_audience_error.message + @error_handler.handle_error invalid_audience_error nil end @@ -308,13 +389,15 @@ def get_variation_from_id(experiment_key, variation_id) variation = variation_id_map[variation_id] return variation if variation - @logger.log Logger::ERROR, "Variation id '#{variation_id}' is not in datafile." - @error_handler.handle_error InvalidVariationError + invalid_variation_error = InvalidVariationError.new(variation_id: variation_id) + @logger.log Logger::ERROR, invalid_variation_error.message + @error_handler.handle_error invalid_variation_error return nil end - @logger.log Logger::ERROR, "Experiment key '#{experiment_key}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_key: experiment_key) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -331,13 +414,15 @@ def get_variation_from_id_by_experiment_id(experiment_id, variation_id) variation = variation_id_map_by_experiment_id[variation_id] return variation if variation - @logger.log Logger::ERROR, "Variation id '#{variation_id}' is not in datafile." - @error_handler.handle_error InvalidVariationError + invalid_variation_error = InvalidVariationError.new(variation_id: variation_id) + @logger.log Logger::ERROR, invalid_variation_error.message + @error_handler.handle_error invalid_variation_error return nil end - @logger.log Logger::ERROR, "Experiment id '#{experiment_id}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_id: experiment_id) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -354,13 +439,15 @@ def get_variation_id_from_key_by_experiment_id(experiment_id, variation_key) variation = variation_key_map[variation_key] return variation['id'] if variation - @logger.log Logger::ERROR, "Variation key '#{variation_key}' is not in datafile." - @error_handler.handle_error InvalidVariationError + invalid_variation_error = InvalidVariationError.new(variation_key: variation_key) + @logger.log Logger::ERROR, invalid_variation_error.message + @error_handler.handle_error invalid_variation_error return nil end - @logger.log Logger::ERROR, "Experiment id '#{experiment_id}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_id: experiment_id) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -377,13 +464,15 @@ def get_variation_id_from_key(experiment_key, variation_key) variation = variation_key_map[variation_key] return variation['id'] if variation - @logger.log Logger::ERROR, "Variation key '#{variation_key}' is not in datafile." - @error_handler.handle_error InvalidVariationError + invalid_variation_error = InvalidVariationError.new(variation_key: variation_key) + @logger.log Logger::ERROR, invalid_variation_error.message + @error_handler.handle_error invalid_variation_error return nil end - @logger.log Logger::ERROR, "Experiment key '#{experiment_key}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_key: experiment_key) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error nil end @@ -397,8 +486,9 @@ def get_whitelisted_variations(experiment_id) experiment = @experiment_id_map[experiment_id] return experiment['forcedVariations'] if experiment - @logger.log Logger::ERROR, "Experiment ID '#{experiment_id}' is not in datafile." - @error_handler.handle_error InvalidExperimentError + invalid_experiment_error = InvalidExperimentError.new(experiment_id: experiment_id) + @logger.log Logger::ERROR, invalid_experiment_error.message + @error_handler.handle_error invalid_experiment_error end def get_attribute_id(attribute_key) @@ -420,8 +510,43 @@ def get_attribute_id(attribute_key) end return attribute_key if has_reserved_prefix - @logger.log Logger::ERROR, "Attribute key '#{attribute_key}' is not in datafile." - @error_handler.handle_error InvalidAttributeError + invalid_attribute_error = InvalidAttributeError.new(attribute_key) + @logger.log Logger::ERROR, invalid_attribute_error.message + @error_handler.handle_error invalid_attribute_error + nil + end + + def get_attribute_by_key(attribute_key) + # Get attribute for the provided attribute key. + # + # Args: + # Attribute key for which attribute is to be fetched. + # + # Returns: + # Attribute corresponding to the provided attribute key. + attribute = @attribute_key_map[attribute_key] + return attribute if attribute + + invalid_attribute_error = InvalidAttributeError.new(attribute_key) + @logger.log Logger::ERROR, invalid_attribute_error.message + @error_handler.handle_error invalid_attribute_error + nil + end + + def get_attribute_key_by_id(attribute_id) + # Get attribute key for the provided attribute ID. + # + # Args: + # Attribute ID for which attribute is to be fetched. + # + # Returns: + # Attribute key corresponding to the provided attribute ID. + attribute = @attribute_id_to_key_map[attribute_id] + return attribute if attribute + + invalid_attribute_error = InvalidAttributeError.new(attribute_id) + @logger.log Logger::ERROR, invalid_attribute_error.message + @error_handler.handle_error invalid_attribute_error nil end @@ -439,8 +564,9 @@ def variation_id_exists?(experiment_id, variation_id) variation = variation_id_map[variation_id] return true if variation - @logger.log Logger::ERROR, "Variation ID '#{variation_id}' is not in datafile." - @error_handler.handle_error InvalidVariationError + invalid_variation_error = InvalidVariationError.new(variation_id: variation_id) + @logger.log Logger::ERROR, invalid_variation_error.message + @error_handler.handle_error invalid_variation_error end false @@ -508,6 +634,60 @@ def rollout_experiment?(experiment_id) @rollout_experiment_id_map.key?(experiment_id) end + def get_holdouts_for_flag(flag_id) + # Helper method to get holdouts from an applied feature flag + # + # flag_id - (REQUIRED) ID of the feature flag + # This parameter is required and should not be null/nil + # + # Returns the holdouts that apply for a specific flag + + return [] if @holdouts.nil? || @holdouts.empty? + + # Check cache first (before validation, so we cache the validation result too) + return @flag_holdouts_map[flag_id] if @flag_holdouts_map.key?(flag_id) + + # Validate that the flag exists in the datafile + flag_exists = @feature_flags.any? { |flag| flag['id'] == flag_id } + unless flag_exists + # Cache the empty result for non-existent flags + @flag_holdouts_map[flag_id] = [] + return [] + end + + # Prioritize global holdouts first + excluded = @excluded_holdouts[flag_id] || [] + + active_holdouts = if excluded.any? + @global_holdouts.reject { |holdout| excluded.include?(holdout) } + else + @global_holdouts.dup + end + + # Append included holdouts + included = @included_holdouts[flag_id] || [] + active_holdouts.concat(included) + + # Cache the result + @flag_holdouts_map[flag_id] = active_holdouts + + @flag_holdouts_map[flag_id] || [] + end + + def get_holdout(holdout_id) + # Helper method to get holdout from holdout ID + # + # holdout_id - ID of the holdout + # + # Returns the holdout + + holdout = @holdout_id_map[holdout_id] + return holdout if holdout + + @logger.log Logger::ERROR, "Holdout with ID '#{holdout_id}' not found." + nil + end + private def generate_feature_variation_map(feature_flags) diff --git a/lib/optimizely/config_manager/http_project_config_manager.rb b/lib/optimizely/config_manager/http_project_config_manager.rb index 0da73c1f..48f09014 100644 --- a/lib/optimizely/config_manager/http_project_config_manager.rb +++ b/lib/optimizely/config_manager/http_project_config_manager.rb @@ -102,11 +102,6 @@ def ready? end def start! - if @stopped - @logger.log(Logger::WARN, 'Not starting. Already stopped.') - return - end - @async_scheduler.start! @stopped = false end @@ -146,7 +141,7 @@ def config end def optimizely_config - @optimizely_config = OptimizelyConfig.new(@config).config if @optimizely_config.nil? + @optimizely_config = OptimizelyConfig.new(@config, @logger).config if @optimizely_config.nil? @optimizely_config end @@ -268,6 +263,13 @@ def polling_interval(polling_interval) return end + if polling_interval < 30 + @logger.log( + Logger::WARN, + 'Polling intervals below 30 seconds are not recommended.' + ) + end + @polling_interval = polling_interval end @@ -324,7 +326,7 @@ def get_datafile_url(sdk_key, url, url_template) unless url url_template ||= @access_token.nil? ? Helpers::Constants::CONFIG_MANAGER['DATAFILE_URL_TEMPLATE'] : Helpers::Constants::CONFIG_MANAGER['AUTHENTICATED_DATAFILE_URL_TEMPLATE'] begin - return (url_template % sdk_key) + return url_template % sdk_key rescue error_msg = "Invalid url_template #{url_template} provided." @logger.log(Logger::ERROR, error_msg) diff --git a/lib/optimizely/config_manager/static_project_config_manager.rb b/lib/optimizely/config_manager/static_project_config_manager.rb index 38829ce4..200126f8 100644 --- a/lib/optimizely/config_manager/static_project_config_manager.rb +++ b/lib/optimizely/config_manager/static_project_config_manager.rb @@ -41,12 +41,13 @@ def initialize(datafile, logger, error_handler, skip_json_validation) error_handler, skip_json_validation ) + @logger = logger @sdk_key = @config&.sdk_key @optimizely_config = nil end def optimizely_config - @optimizely_config = OptimizelyConfig.new(@config).config if @optimizely_config.nil? + @optimizely_config = OptimizelyConfig.new(@config, @logger).config if @optimizely_config.nil? @optimizely_config end diff --git a/lib/optimizely/decide/optimizely_decide_option.rb b/lib/optimizely/decide/optimizely_decide_option.rb index f89dcd51..1b6781c2 100644 --- a/lib/optimizely/decide/optimizely_decide_option.rb +++ b/lib/optimizely/decide/optimizely_decide_option.rb @@ -23,6 +23,9 @@ module OptimizelyDecideOption IGNORE_USER_PROFILE_SERVICE = 'IGNORE_USER_PROFILE_SERVICE' INCLUDE_REASONS = 'INCLUDE_REASONS' EXCLUDE_VARIABLES = 'EXCLUDE_VARIABLES' + IGNORE_CMAB_CACHE = 'IGNORE_CMAB_CACHE' + RESET_CMAB_CACHE = 'RESET_CMAB_CACHE' + INVALIDATE_USER_CMAB_CACHE = 'INVALIDATE_USER_CMAB_CACHE' end end end diff --git a/lib/optimizely/decide/optimizely_decision.rb b/lib/optimizely/decide/optimizely_decision.rb index 06b109b3..ea1964d3 100644 --- a/lib/optimizely/decide/optimizely_decision.rb +++ b/lib/optimizely/decide/optimizely_decision.rb @@ -55,6 +55,25 @@ def as_json def to_json(*args) as_json.to_json(*args) end + + # Create a new OptimizelyDecision representing an error state. + # + # @param key [String] The flag key + # @param user [OptimizelyUserContext] The user context + # @param reasons [Array<String>] List of reasons explaining the error + # + # @return [OptimizelyDecision] OptimizelyDecision with error state values + def self.new_error_decision(key, user, reasons = []) + new( + variation_key: nil, + enabled: false, + variables: {}, + rule_key: nil, + flag_key: key, + user_context: user, + reasons: reasons + ) + end end end end diff --git a/lib/optimizely/decision_service.rb b/lib/optimizely/decision_service.rb index 3dbbf1d0..17a97358 100644 --- a/lib/optimizely/decision_service.rb +++ b/lib/optimizely/decision_service.rb @@ -29,7 +29,8 @@ class DecisionService # 3. Check whitelisting # 4. Check user profile service for past bucketing decisions (sticky bucketing) # 5. Check audience targeting - # 6. Use Murmurhash3 to bucket the user + # 6. Check cmab service + # 7. Use Murmurhash3 to bucket the user attr_reader :bucketer @@ -37,32 +38,39 @@ class DecisionService # This contains all the forced variations set by the user by calling setForcedVariation. attr_reader :forced_variation_map - Decision = Struct.new(:experiment, :variation, :source) + Decision = Struct.new(:experiment, :variation, :source, :cmab_uuid) + CmabDecisionResult = Struct.new(:error, :result, :reasons) + VariationResult = Struct.new(:cmab_uuid, :error, :reasons, :variation_id) + DecisionResult = Struct.new(:decision, :error, :reasons) DECISION_SOURCES = { 'EXPERIMENT' => 'experiment', 'FEATURE_TEST' => 'feature-test', - 'ROLLOUT' => 'rollout' + 'ROLLOUT' => 'rollout', + 'HOLDOUT' => 'holdout' }.freeze - def initialize(logger, user_profile_service = nil) + def initialize(logger, cmab_service, user_profile_service = nil) @logger = logger @user_profile_service = user_profile_service @bucketer = Bucketer.new(logger) @forced_variation_map = {} + @cmab_service = cmab_service end - def get_variation(project_config, experiment_id, user_context, decide_options = []) + def get_variation(project_config, experiment_id, user_context, user_profile_tracker = nil, decide_options = [], reasons = []) # Determines variation into which user will be bucketed. # # project_config - project_config - Instance of ProjectConfig # experiment_id - Experiment for which visitor variation needs to be determined # user_context - Optimizely user context instance + # user_profile_tracker: Tracker for reading and updating user profile of the user. + # reasons: Decision reasons. # - # Returns variation ID where visitor will be bucketed - # (nil if experiment is inactive or user does not meet audience conditions) - + # Returns VariationResult struct + user_profile_tracker = UserProfileTracker.new(user_context.user_id, @user_profile_service, @logger) unless user_profile_tracker.is_a?(Optimizely::UserProfileTracker) decide_reasons = [] + decide_reasons.push(*reasons) user_id = user_context.user_id attributes = user_context.user_attributes # By default, the bucketing ID should be the user ID @@ -70,38 +78,36 @@ def get_variation(project_config, experiment_id, user_context, decide_options = decide_reasons.push(*bucketing_id_reasons) # Check to make sure experiment is active experiment = project_config.get_experiment_from_id(experiment_id) - return nil, decide_reasons if experiment.nil? + return VariationResult.new(nil, false, decide_reasons, nil) if experiment.nil? experiment_key = experiment['key'] unless project_config.experiment_running?(experiment) message = "Experiment '#{experiment_key}' is not running." @logger.log(Logger::INFO, message) decide_reasons.push(message) - return nil, decide_reasons + return VariationResult.new(nil, false, decide_reasons, nil) end # Check if a forced variation is set for the user forced_variation, reasons_received = get_forced_variation(project_config, experiment['key'], user_id) decide_reasons.push(*reasons_received) - return forced_variation['id'], decide_reasons if forced_variation + return VariationResult.new(nil, false, decide_reasons, forced_variation['id']) if forced_variation # Check if user is in a white-listed variation whitelisted_variation_id, reasons_received = get_whitelisted_variation_id(project_config, experiment_id, user_id) decide_reasons.push(*reasons_received) - return whitelisted_variation_id, decide_reasons if whitelisted_variation_id + return VariationResult.new(nil, false, decide_reasons, whitelisted_variation_id) if whitelisted_variation_id should_ignore_user_profile_service = decide_options.include? Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE # Check for saved bucketing decisions if decide_options do not include ignoreUserProfileService - unless should_ignore_user_profile_service - user_profile, reasons_received = get_user_profile(user_id) - decide_reasons.push(*reasons_received) - saved_variation_id, reasons_received = get_saved_variation_id(project_config, experiment_id, user_profile) + unless should_ignore_user_profile_service && user_profile_tracker + saved_variation_id, reasons_received = get_saved_variation_id(project_config, experiment_id, user_profile_tracker.user_profile) decide_reasons.push(*reasons_received) if saved_variation_id message = "Returning previously activated variation ID #{saved_variation_id} of experiment '#{experiment_key}' for user '#{user_id}' from user profile." @logger.log(Logger::INFO, message) decide_reasons.push(message) - return saved_variation_id, decide_reasons + return VariationResult.new(nil, false, decide_reasons, saved_variation_id) end end @@ -112,27 +118,47 @@ def get_variation(project_config, experiment_id, user_context, decide_options = message = "User '#{user_id}' does not meet the conditions to be in experiment '#{experiment_key}'." @logger.log(Logger::INFO, message) decide_reasons.push(message) - return nil, decide_reasons + return VariationResult.new(nil, false, decide_reasons, nil) end - # Bucket normally - variation, bucket_reasons = @bucketer.bucket(project_config, experiment, bucketing_id, user_id) - decide_reasons.push(*bucket_reasons) - variation_id = variation ? variation['id'] : nil + # Check if this is a CMAB experiment + # If so, handle CMAB-specific traffic allocation and decision logic. + # Otherwise, proceed with standard bucketing logic for non-CMAB experiments. + if experiment.key?('cmab') + cmab_decision_result = get_decision_for_cmab_experiment(project_config, experiment, user_context, bucketing_id, decide_options) + decide_reasons.push(*cmab_decision_result.reasons) + if cmab_decision_result.error + # CMAB decision failed, return error + return VariationResult.new(nil, true, decide_reasons, nil) + end - message = '' - if variation_id - variation_key = variation['key'] - message = "User '#{user_id}' is in variation '#{variation_key}' of experiment '#{experiment_id}'." + @logger.log(Logger::DEBUG, "Skipping user profile service for CMAB experiment '#{experiment_key}'. CMAB decisions are dynamic and not stored for sticky bucketing.") + should_ignore_user_profile_service = true + cmab_decision = cmab_decision_result.result + variation_id = cmab_decision&.variation_id + cmab_uuid = cmab_decision&.cmab_uuid + variation = variation_id ? project_config.get_variation_from_id_by_experiment_id(experiment_id, variation_id) : nil else - message = "User '#{user_id}' is in no variation." + # Bucket normally + variation, bucket_reasons = @bucketer.bucket(project_config, experiment, bucketing_id, user_id) + decide_reasons.push(*bucket_reasons) + variation_id = variation ? variation['id'] : nil + cmab_uuid = nil end + + variation_key = variation['key'] if variation + message = if variation_id + "User '#{user_id}' is in variation '#{variation_key}' of experiment '#{experiment_id}'." + else + "User '#{user_id}' is in no variation." + end + @logger.log(Logger::INFO, message) - decide_reasons.push(message) + decide_reasons.push(message) if message # Persist bucketing decision - save_user_profile(user_profile, experiment_id, variation_id) unless should_ignore_user_profile_service - [variation_id, decide_reasons] + user_profile_tracker.update_user_profile(experiment_id, variation_id) unless should_ignore_user_profile_service && user_profile_tracker + VariationResult.new(cmab_uuid, false, decide_reasons, variation_id) end def get_variation_for_feature(project_config, feature_flag, user_context, decide_options = []) @@ -142,30 +168,171 @@ def get_variation_for_feature(project_config, feature_flag, user_context, decide # feature_flag - The feature flag the user wants to access # user_context - Optimizely user context instance # - # Returns Decision struct (nil if the user is not bucketed into any of the experiments on the feature) + # Returns DecisionResult struct. + holdouts = project_config.get_holdouts_for_flag(feature_flag['id']) + + if holdouts && !holdouts.empty? + # Has holdouts - use get_decision_for_flag which checks holdouts first + get_decision_for_flag(feature_flag, user_context, project_config, decide_options) + else + get_variations_for_feature_list(project_config, [feature_flag], user_context, decide_options).first + end + end + + def get_decision_for_flag(feature_flag, user_context, project_config, decide_options = [], user_profile_tracker = nil, decide_reasons = nil) + # Get the decision for a single feature flag. + # Processes holdouts, experiments, and rollouts in that order. + # + # feature_flag - The feature flag to get a decision for + # user_context - The user context + # project_config - The project config + # decide_options - Array of decide options + # user_profile_tracker - The user profile tracker + # decide_reasons - Array of decision reasons to merge + # + # Returns a DecisionResult for the feature flag + + reasons = decide_reasons ? decide_reasons.dup : [] + user_id = user_context.user_id + + # Check holdouts + holdouts = project_config.get_holdouts_for_flag(feature_flag['id']) + + holdouts.each do |holdout| + holdout_decision = get_variation_for_holdout(holdout, user_context, project_config) + reasons.push(*holdout_decision.reasons) + + next unless holdout_decision.decision + + message = "The user '#{user_id}' is bucketed into holdout '#{holdout['key']}' for feature flag '#{feature_flag['key']}'." + @logger.log(Logger::INFO, message) + reasons.push(message) + return DecisionResult.new(holdout_decision.decision, false, reasons) + end + + # Check if the feature flag has an experiment and the user is bucketed into that experiment + experiment_decision = get_variation_for_feature_experiment(project_config, feature_flag, user_context, user_profile_tracker, decide_options) + reasons.push(*experiment_decision.reasons) + + return DecisionResult.new(experiment_decision.decision, experiment_decision.error, reasons) if experiment_decision.decision + + # If there's an error (e.g., CMAB error), return immediately without falling back to rollout + return DecisionResult.new(nil, experiment_decision.error, reasons) if experiment_decision.error + + # Check if the feature flag has a rollout and the user is bucketed into that rollout + rollout_decision = get_variation_for_feature_rollout(project_config, feature_flag, user_context) + reasons.push(*rollout_decision.reasons) + + if rollout_decision.decision + # Check if this was a forced decision (last reason contains "forced decision map") + is_forced_decision = reasons.last&.include?('forced decision map') + + unless is_forced_decision + # Only add the "bucketed into rollout" message for normal bucketing + message = "The user '#{user_id}' is bucketed into a rollout for feature flag '#{feature_flag['key']}'." + @logger.log(Logger::INFO, message) + reasons.push(message) + end + + DecisionResult.new(rollout_decision.decision, rollout_decision.error, reasons) + else + message = "The user '#{user_id}' is not bucketed into a rollout for feature flag '#{feature_flag['key']}'." + @logger.log(Logger::INFO, message) + DecisionResult.new(nil, false, reasons) + end + end + + def get_variation_for_holdout(holdout, user_context, project_config) + # Get the variation for holdout + # + # holdout - The holdout configuration + # user_context - The user context + # project_config - The project config + # + # Returns a DecisionResult for the holdout decide_reasons = [] + user_id = user_context.user_id + attributes = user_context.user_attributes - # check if the feature is being experiment on and whether the user is bucketed into the experiment - decision, reasons_received = get_variation_for_feature_experiment(project_config, feature_flag, user_context, decide_options) - decide_reasons.push(*reasons_received) - return decision, decide_reasons unless decision.nil? + if holdout.nil? || holdout['status'].nil? || holdout['status'] != 'Running' + key = holdout && holdout['key'] ? holdout['key'] : 'unknown' + message = "Holdout '#{key}' is not running." + @logger.log(Logger::INFO, message) + decide_reasons.push(message) + return DecisionResult.new(nil, false, decide_reasons) + end + + bucketing_id, bucketing_id_reasons = get_bucketing_id(user_id, attributes) + decide_reasons.push(*bucketing_id_reasons) - decision, reasons_received = get_variation_for_feature_rollout(project_config, feature_flag, user_context) + # Check audience conditions + user_meets_audience_conditions, reasons_received = Audience.user_meets_audience_conditions?(project_config, holdout, user_context, @logger) decide_reasons.push(*reasons_received) - [decision, decide_reasons] + unless user_meets_audience_conditions + message = "User '#{user_id}' does not meet the conditions for holdout '#{holdout['key']}'." + @logger.log(Logger::DEBUG, message) + decide_reasons.push(message) + return DecisionResult.new(nil, false, decide_reasons) + end + + # Bucket user into holdout variation + variation, bucket_reasons = @bucketer.bucket(project_config, holdout, bucketing_id, user_id) + decide_reasons.push(*bucket_reasons) + + if variation && !variation['key'].nil? && !variation['key'].empty? + message = "The user '#{user_id}' is bucketed into variation '#{variation['key']}' of holdout '#{holdout['key']}'." + @logger.log(Logger::INFO, message) + decide_reasons.push(message) + + holdout_decision = Decision.new(holdout, variation, DECISION_SOURCES['HOLDOUT'], nil) + DecisionResult.new(holdout_decision, false, decide_reasons) + else + message = "The user '#{user_id}' is not bucketed into holdout '#{holdout['key']}'." + @logger.log(Logger::DEBUG, message) + decide_reasons.push(message) + DecisionResult.new(nil, false, decide_reasons) + end end - def get_variation_for_feature_experiment(project_config, feature_flag, user_context, decide_options = []) + def get_variations_for_feature_list(project_config, feature_flags, user_context, decide_options = []) + # Returns the list of experiment/variation the user is bucketed in for the given list of features. + # + # Args: + # project_config: Instance of ProjectConfig. + # feature_flags: Array of features for which we are determining if it is enabled or not for the given user. + # user_context: User context for user. + # decide_options: Decide options. + # + # Returns: + # Array of DecisionResult struct. + ignore_ups = decide_options.include? Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE + user_profile_tracker = nil + unless ignore_ups && @user_profile_service + user_id = user_context.user_id + user_profile_tracker = UserProfileTracker.new(user_id, @user_profile_service, @logger) + user_profile_tracker.load_user_profile + end + + decisions = [] + feature_flags.each do |feature_flag| + decision = get_decision_for_flag(feature_flag, user_context, project_config, decide_options, user_profile_tracker) + decisions << decision + end + user_profile_tracker&.save_user_profile + decisions + end + + def get_variation_for_feature_experiment(project_config, feature_flag, user_context, user_profile_tracker, decide_options = []) # Gets the variation the user is bucketed into for the feature flag's experiment. # # project_config - project_config - Instance of ProjectConfig # feature_flag - The feature flag the user wants to access # user_context - Optimizely user context instance # - # Returns Decision struct (nil if the user is not bucketed into any of the experiments on the feature) - # or nil if the user is not bucketed into any of the experiments on the feature + # Returns a DecisionResult containing the decision (or nil if not bucketed), + # an error flag, and an array of decision reasons. decide_reasons = [] user_id = user_context.user_id feature_flag_key = feature_flag['key'] @@ -173,7 +340,7 @@ def get_variation_for_feature_experiment(project_config, feature_flag, user_cont message = "The feature flag '#{feature_flag_key}' is not used in any experiments." @logger.log(Logger::DEBUG, message) decide_reasons.push(message) - return nil, decide_reasons + return DecisionResult.new(nil, false, decide_reasons) end # Evaluate each experiment and return the first bucketed experiment variation @@ -183,26 +350,34 @@ def get_variation_for_feature_experiment(project_config, feature_flag, user_cont message = "Feature flag experiment with ID '#{experiment_id}' is not in the datafile." @logger.log(Logger::DEBUG, message) decide_reasons.push(message) - return nil, decide_reasons + return DecisionResult.new(nil, false, decide_reasons) end experiment_id = experiment['id'] - variation_id, reasons_received = get_variation_from_experiment_rule(project_config, feature_flag_key, experiment, user_context, decide_options) + variation_result = get_variation_from_experiment_rule(project_config, feature_flag_key, experiment, user_context, user_profile_tracker, decide_options) + error = variation_result.error + reasons_received = variation_result.reasons + variation_id = variation_result.variation_id + cmab_uuid = variation_result.cmab_uuid decide_reasons.push(*reasons_received) + # If there's an error, return immediately instead of falling back to next experiment + return DecisionResult.new(nil, error, decide_reasons) if error + next unless variation_id variation = project_config.get_variation_from_id_by_experiment_id(experiment_id, variation_id) variation = project_config.get_variation_from_flag(feature_flag['key'], variation_id, 'id') if variation.nil? - return Decision.new(experiment, variation, DECISION_SOURCES['FEATURE_TEST']), decide_reasons + decision = Decision.new(experiment, variation, DECISION_SOURCES['FEATURE_TEST'], cmab_uuid) + return DecisionResult.new(decision, error, decide_reasons) end message = "The user '#{user_id}' is not bucketed into any of the experiments on the feature '#{feature_flag_key}'." @logger.log(Logger::INFO, message) decide_reasons.push(message) - [nil, decide_reasons] + DecisionResult.new(nil, false, decide_reasons) end def get_variation_for_feature_rollout(project_config, feature_flag, user_context) @@ -213,7 +388,8 @@ def get_variation_for_feature_rollout(project_config, feature_flag, user_context # feature_flag - The feature flag the user wants to access # user_context - Optimizely user context instance # - # Returns the Decision struct or nil if not bucketed into any of the targeting rules + # Returns a DecisionResult containing the decision (or nil if not bucketed), + # an error flag, and an array of decision reasons. decide_reasons = [] rollout_id = feature_flag['rolloutId'] @@ -222,7 +398,7 @@ def get_variation_for_feature_rollout(project_config, feature_flag, user_context message = "Feature flag '#{feature_flag_key}' is not used in a rollout." @logger.log(Logger::DEBUG, message) decide_reasons.push(message) - return nil, decide_reasons + return DecisionResult.new(nil, false, decide_reasons) end rollout = project_config.get_rollout_from_id(rollout_id) @@ -230,10 +406,10 @@ def get_variation_for_feature_rollout(project_config, feature_flag, user_context message = "Rollout with ID '#{rollout_id}' is not in the datafile '#{feature_flag['key']}'" @logger.log(Logger::DEBUG, message) decide_reasons.push(message) - return nil, decide_reasons + return DecisionResult.new(nil, false, decide_reasons) end - return nil, decide_reasons if rollout['experiments'].empty? + return DecisionResult.new(nil, false, decide_reasons) if rollout['experiments'].empty? index = 0 rollout_rules = rollout['experiments'] @@ -242,17 +418,17 @@ def get_variation_for_feature_rollout(project_config, feature_flag, user_context decide_reasons.push(*reasons_received) if variation rule = rollout_rules[index] - feature_decision = Decision.new(rule, variation, DECISION_SOURCES['ROLLOUT']) - return [feature_decision, decide_reasons] + feature_decision = Decision.new(rule, variation, DECISION_SOURCES['ROLLOUT'], nil) + return DecisionResult.new(feature_decision, false, decide_reasons) end index = skip_to_everyone_else ? (rollout_rules.length - 1) : (index + 1) end - [nil, decide_reasons] + DecisionResult.new(nil, false, decide_reasons) end - def get_variation_from_experiment_rule(project_config, flag_key, rule, user, options = []) + def get_variation_from_experiment_rule(project_config, flag_key, rule, user, user_profile_tracker, options = []) # Determine which variation the user is in for a given rollout. # Returns the variation from experiment rules. # @@ -267,13 +443,11 @@ def get_variation_from_experiment_rule(project_config, flag_key, rule, user, opt context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(flag_key, rule['key']) variation, forced_reasons = validated_forced_decision(project_config, context, user) reasons.push(*forced_reasons) + return VariationResult.new(nil, false, reasons, variation['id']) if variation - return [variation['id'], reasons] if variation - - variation_id, response_reasons = get_variation(project_config, rule['id'], user, options) - reasons.push(*response_reasons) - - [variation_id, reasons] + variation_result = get_variation(project_config, rule['id'], user, user_profile_tracker, options) + variation_result.reasons = reasons + variation_result.reasons + variation_result end def get_variation_from_delivery_rule(project_config, flag_key, rules, rule_index, user_context) @@ -441,6 +615,51 @@ def validated_forced_decision(project_config, context, user_context) private + def get_decision_for_cmab_experiment(project_config, experiment, user_context, bucketing_id, decide_options = []) + # Determines the CMAB (Contextual Multi-Armed Bandit) decision for a given experiment and user context. + # + # This method first checks if the user is bucketed into the CMAB experiment based on traffic allocation. + # If the user is not bucketed, it returns a CmabDecisionResult indicating exclusion. + # If the user is bucketed, it attempts to fetch a CMAB decision using the CMAB service. + # In case of errors during CMAB decision retrieval, it logs the error and returns a result indicating failure. + # + # @param project_config [ProjectConfig] The current project configuration. + # @param experiment [Hash] The experiment configuration hash. + # @param user_context [OptimizelyUserContext] The user context object containing user information. + # @param bucketing_id [String] The bucketing ID used for traffic allocation. + # @param decide_options [Array] Optional array of decision options. + # + # @return [CmabDecisionResult] The result of the CMAB decision process, including decision error status, decision data, and reasons. + decide_reasons = [] + user_id = user_context.user_id + + # Check if user is in CMAB traffic allocation + bucketed_entity_id, bucket_reasons = @bucketer.bucket_to_entity_id( + project_config, experiment, bucketing_id, user_id + ) + decide_reasons.push(*bucket_reasons) + unless bucketed_entity_id + message = "User \"#{user_context.user_id}\" not in CMAB experiment \"#{experiment['key']}\" due to traffic allocation." + @logger.log(Logger::INFO, message) + decide_reasons.push(message) + return CmabDecisionResult.new(false, nil, decide_reasons) + end + + # User is in CMAB allocation, proceed to CMAB decision + begin + cmab_decision, reasons = @cmab_service.get_decision( + project_config, user_context, experiment['id'], decide_options + ) + decide_reasons.push(*reasons) + CmabDecisionResult.new(false, cmab_decision, decide_reasons) + rescue StandardError => e + error_message = "Failed to fetch CMAB data for experiment #{experiment['key']}." + decide_reasons.push(error_message) + @logger&.log(Logger::ERROR, "#{error_message} #{e}") + CmabDecisionResult.new(true, nil, decide_reasons) + end + end + def get_whitelisted_variation_id(project_config, experiment_id, user_id) # Determine if a user is whitelisted into a variation for the given experiment and return the ID of that variation # diff --git a/lib/optimizely/event/batch_event_processor.rb b/lib/optimizely/event/batch_event_processor.rb index 52ec0533..428c4abf 100644 --- a/lib/optimizely/event/batch_event_processor.rb +++ b/lib/optimizely/event/batch_event_processor.rb @@ -172,20 +172,35 @@ def flush_queue! return if @current_batch.empty? log_event = Optimizely::EventFactory.create_log_event(@current_batch, @logger) - begin - @logger.log( - Logger::INFO, - 'Flushing Queue.' - ) - - @event_dispatcher.dispatch_event(log_event) - @notification_center&.send_notifications( - NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], - log_event - ) - rescue StandardError => e - @logger.log(Logger::ERROR, "Error dispatching event: #{log_event} #{e.message}.") + @logger.log( + Logger::INFO, + 'Flushing Queue.' + ) + + retry_count = 0 + max_retries = Optimizely::Helpers::Constants::EVENT_DISPATCH_CONFIG[:MAX_RETRIES] + + while retry_count < max_retries + begin + @event_dispatcher.dispatch_event(log_event) + @notification_center&.send_notifications( + NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], + log_event + ) + # Success - break out of retry loop + break + rescue StandardError => e + @logger.log(Logger::ERROR, "Error dispatching event: #{log_event} #{e.message}.") + retry_count += 1 + + if retry_count < max_retries + delay = calculate_retry_interval(retry_count - 1) + @logger.log(Logger::DEBUG, "Retrying event dispatch (attempt #{retry_count + 1} of #{max_retries}) after #{delay}s") + sleep(delay) + end + end end + @current_batch = [] end @@ -231,5 +246,16 @@ def positive_number?(value) # false otherwise. Helpers::Validator.finite_number?(value) && value.positive? end + + # Calculate exponential backoff interval: 200ms, 400ms, 800ms, ... capped at 1s + # + # @param retry_count - Zero-based retry count + # @return [Float] - Delay in seconds + def calculate_retry_interval(retry_count) + initial_interval = Helpers::Constants::EVENT_DISPATCH_CONFIG[:INITIAL_RETRY_INTERVAL] + max_interval = Helpers::Constants::EVENT_DISPATCH_CONFIG[:MAX_RETRY_INTERVAL] + interval = initial_interval * (2**retry_count) + [interval, max_interval].min + end end end diff --git a/lib/optimizely/event/entity/event_context.rb b/lib/optimizely/event/entity/event_context.rb index 65f8f18e..f26b30a2 100644 --- a/lib/optimizely/event/entity/event_context.rb +++ b/lib/optimizely/event/entity/event_context.rb @@ -26,7 +26,8 @@ def initialize( anonymize_ip:, revision:, client_name:, - client_version: + client_version:, + region: ) @account_id = account_id @project_id = project_id @@ -34,6 +35,7 @@ def initialize( @revision = revision @client_name = client_name @client_version = client_version + @region = region end def as_json @@ -43,7 +45,8 @@ def as_json anonymize_ip: @anonymize_ip, revision: @revision, client_name: @client_name, - client_version: @client_version + client_version: @client_version, + region: @region } end end diff --git a/lib/optimizely/event/event_factory.rb b/lib/optimizely/event/event_factory.rb index d8d5062e..b1afa103 100644 --- a/lib/optimizely/event/event_factory.rb +++ b/lib/optimizely/event/event_factory.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2019-2020, 2022, Optimizely and contributors +# Copyright 2019-2020, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +28,10 @@ class EventFactory # EventFactory builds LogEvent objects from a given user_event. class << self CUSTOM_ATTRIBUTE_FEATURE_TYPE = 'custom' - ENDPOINT = 'https://logx.optimizely.com/v1/events' + ENDPOINTS = { + US: 'https://logx.optimizely.com/v1/events', + EU: 'https://eu.logx.optimizely.com/v1/events' + }.freeze POST_HEADERS = {'Content-Type' => 'application/json'}.freeze ACTIVATE_EVENT_KEY = 'campaign_activated' @@ -67,12 +70,15 @@ def create_log_event(user_events, logger) builder.with_visitors(visitors) event_batch = builder.build - Event.new(:post, ENDPOINT, event_batch.as_json, POST_HEADERS) + + endpoint = ENDPOINTS[user_context[:region].to_s.upcase.to_sym] || ENDPOINTS[:US] + + Event.new(:post, endpoint, event_batch.as_json, POST_HEADERS) end def build_attribute_list(user_attributes, project_config) visitor_attributes = [] - user_attributes&.keys&.each do |attribute_key| + user_attributes&.each_key do |attribute_key| # Omit attribute values that are not supported by the log endpoint. attribute_value = user_attributes[attribute_key] next unless Helpers::Validator.attribute_valid?(attribute_key, attribute_value) diff --git a/lib/optimizely/event/user_event_factory.rb b/lib/optimizely/event/user_event_factory.rb index f7852341..872a70b8 100644 --- a/lib/optimizely/event/user_event_factory.rb +++ b/lib/optimizely/event/user_event_factory.rb @@ -33,6 +33,7 @@ def self.create_impression_event(project_config, experiment, variation_id, metad # # Returns Event encapsulating the impression event. event_context = Optimizely::EventContext.new( + region: project_config.region, account_id: project_config.account_id, project_id: project_config.project_id, anonymize_ip: project_config.anonymize_ip, @@ -67,6 +68,7 @@ def self.create_conversion_event(project_config, event, user_id, user_attributes # Returns Event encapsulating the conversion event. event_context = Optimizely::EventContext.new( + region: project_config.region, account_id: project_config.account_id, project_id: project_config.project_id, anonymize_ip: project_config.anonymize_ip, diff --git a/lib/optimizely/event_builder.rb b/lib/optimizely/event_builder.rb index 9b87413e..a5ee82a9 100644 --- a/lib/optimizely/event_builder.rb +++ b/lib/optimizely/event_builder.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2016-2019, 2022, Optimizely and contributors +# Copyright 2016-2019, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -62,23 +62,23 @@ def get_common_params(project_config, user_id, attributes) visitor_attributes = [] - attributes&.keys&.each do |attribute_key| + attributes&.each_key do |attribute_key| # Omit attribute values that are not supported by the log endpoint. attribute_value = attributes[attribute_key] - if Helpers::Validator.attribute_valid?(attribute_key, attribute_value) - attribute_id = project_config.get_attribute_id attribute_key - if attribute_id - visitor_attributes.push( - entity_id: attribute_id, - key: attribute_key, - type: CUSTOM_ATTRIBUTE_FEATURE_TYPE, - value: attribute_value - ) - end - end + next unless Helpers::Validator.attribute_valid?(attribute_key, attribute_value) + + attribute_id = project_config.get_attribute_id attribute_key + next unless attribute_id + + visitor_attributes.push( + entity_id: attribute_id, + key: attribute_key, + type: CUSTOM_ATTRIBUTE_FEATURE_TYPE, + value: attribute_value + ) end # Append Bot Filtering Attribute - if project_config.bot_filtering == true || project_config.bot_filtering == false + if [true, false].include?(project_config.bot_filtering) visitor_attributes.push( entity_id: Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BOT_FILTERING'], key: Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BOT_FILTERING'], @@ -101,13 +101,17 @@ def get_common_params(project_config, user_id, attributes) revision: project_config.revision, client_name: CLIENT_ENGINE, enrich_decisions: true, - client_version: VERSION + client_version: VERSION, + region: project_config.region || 'US' } end end class EventBuilder < BaseEventBuilder - ENDPOINT = 'https://logx.optimizely.com/v1/events' + ENDPOINTS = { + US: 'https://logx.optimizely.com/v1/events', + EU: 'https://eu.logx.optimizely.com/v1/events' + }.freeze POST_HEADERS = {'Content-Type' => 'application/json'}.freeze ACTIVATE_EVENT_KEY = 'campaign_activated' @@ -122,11 +126,14 @@ def create_impression_event(project_config, experiment, variation_id, user_id, a # # Returns +Event+ encapsulating the impression event. + region = project_config.region || 'US' event_params = get_common_params(project_config, user_id, attributes) impression_params = get_impression_params(project_config, experiment, variation_id) event_params[:visitors][0][:snapshots].push(impression_params) - Event.new(:post, ENDPOINT, event_params, POST_HEADERS) + endpoint = ENDPOINTS[region.to_s.upcase.to_sym] + + Event.new(:post, endpoint, event_params, POST_HEADERS) end def create_conversion_event(project_config, event, user_id, attributes, event_tags) @@ -140,11 +147,14 @@ def create_conversion_event(project_config, event, user_id, attributes, event_ta # # Returns +Event+ encapsulating the conversion event. + region = project_config.region || 'US' event_params = get_common_params(project_config, user_id, attributes) conversion_params = get_conversion_params(event, event_tags) event_params[:visitors][0][:snapshots] = [conversion_params] - Event.new(:post, ENDPOINT, event_params, POST_HEADERS) + endpoint = ENDPOINTS[region.to_s.upcase.to_sym] + + Event.new(:post, endpoint, event_params, POST_HEADERS) end private diff --git a/lib/optimizely/exceptions.rb b/lib/optimizely/exceptions.rb index 50ef62c0..073433af 100644 --- a/lib/optimizely/exceptions.rb +++ b/lib/optimizely/exceptions.rb @@ -42,16 +42,28 @@ def initialize(msg = 'SDK key not provided/cannot be found in the datafile.') class InvalidAudienceError < Error # Raised when an invalid audience is provided - def initialize(msg = 'Provided audience is not in datafile.') - super + attr_reader :audience_id + + def initialize(audience_id) + raise ArgumentError, 'audience_id must be provided' if audience_id.nil? + + super("Audience id '#{audience_id}' is not in datafile.") + + @audience_id = audience_id end end class InvalidAttributeError < Error # Raised when an invalid attribute is provided - def initialize(msg = 'Provided attribute is not in datafile.') - super + attr_reader :attribute_key + + def initialize(attribute_key) + raise ArgumentError, 'attribute_key must be provided' if attribute_key.nil? + + super("Attribute key '#{attribute_key}' is not in datafile.") + + @attribute_key = attribute_key end end @@ -74,24 +86,56 @@ def initialize(msg = 'Event tags provided are in an invalid format.') class InvalidExperimentError < Error # Raised when an invalid experiment key is provided - def initialize(msg = 'Provided experiment is not in datafile.') - super + attr_reader :experiment_id, :experiment_key + + def initialize(experiment_id: nil, experiment_key: nil) + raise ArgumentError, 'Either experiment_id or experiment_key must be provided.' if experiment_id.nil? && experiment_key.nil? + raise ArgumentError, 'Cannot provide both experiment_id and experiment_key.' if !experiment_id.nil? && !experiment_key.nil? + + if experiment_id.nil? + @experiment_key = experiment_key + identifier = "key '#{@experiment_key}'" + else + @experiment_id = experiment_id + identifier = "id '#{@experiment_id}'" + end + + super("Experiment #{identifier} is not in datafile.") end end class InvalidEventError < Error # Raised when an invalid event key is provided - def initialize(msg = 'Provided event is not in datafile.') - super + attr_reader :event_key + + def initialize(event_key) + raise ArgumentError, 'event_key must be provided.' if event_key.nil? + + super("Event key '#{event_key}' is not in datafile.") + + @event_key = event_key end end class InvalidVariationError < Error # Raised when an invalid variation key or ID is provided - def initialize(msg = 'Provided variation is not in datafile.') - super + attr_reader :variation_id, :variation_key + + def initialize(variation_id: nil, variation_key: nil) + raise ArgumentError, 'Either variation_id or variation_key must be provided.' if variation_id.nil? && variation_key.nil? + raise ArgumentError, 'Cannot provide both variation_id and variation_key.' if !variation_id.nil? && !variation_key.nil? + + if variation_id.nil? + identifier = "key '#{variation_key}'" + @variation_key = variation_key + else + identifier = "id '#{variation_id}'" + @variation_id = variation_id + end + + super("Variation #{identifier} is not in datafile.") end end @@ -146,4 +190,28 @@ def initialize(msg = 'Provided semantic version is invalid.') super end end + + class CmabError < Error + # Base exception for CMAB errors + + def initialize(msg = 'CMAB error occurred.') + super + end + end + + class CmabFetchError < CmabError + # Exception raised when CMAB fetch fails + + def initialize(msg = 'CMAB decision fetch failed with status:') + super + end + end + + class CmabInvalidResponseError < CmabError + # Exception raised when CMAB fetch returns an invalid response + + def initialize(msg = 'Invalid CMAB fetch response') + super + end + end end diff --git a/lib/optimizely/helpers/constants.rb b/lib/optimizely/helpers/constants.rb index 02b815ae..4334f56d 100644 --- a/lib/optimizely/helpers/constants.rb +++ b/lib/optimizely/helpers/constants.rb @@ -201,6 +201,12 @@ module Constants }, 'forcedVariations' => { 'type' => 'object' + }, + 'cmab' => { + 'type' => 'object' + }, + 'holdouts' => { + 'type' => 'array' } }, 'required' => %w[ @@ -303,6 +309,43 @@ module Constants }, 'required' => %w[key] } + }, + 'cmab' => { + 'type' => 'object', + 'properties' => { + 'attributeIds' => { + 'type' => 'array', + 'items' => {'type' => 'string'} + }, + 'trafficAllocation' => { + 'type' => 'integer' + } + } + }, + 'holdouts' => { + 'type' => 'array', + 'items' => { + 'type' => 'object', + 'properties' => { + 'id' => { + 'type' => 'string' + }, + 'key' => { + 'type' => 'string' + }, + 'status' => { + 'type' => 'string' + }, + 'includedFlags' => { + 'type' => 'array', + 'items' => {'type' => 'string'} + }, + 'excludedFlags' => { + 'type' => 'array', + 'items' => {'type' => 'string'} + } + } + } } }, 'required' => %w[ @@ -416,7 +459,10 @@ module Constants }.freeze EVENT_DISPATCH_CONFIG = { - REQUEST_TIMEOUT: 10 + REQUEST_TIMEOUT: 10, + MAX_RETRIES: 3, + INITIAL_RETRY_INTERVAL: 0.2, # 200ms in seconds + MAX_RETRY_INTERVAL: 1.0 # 1 second }.freeze ODP_GRAPHQL_API_CONFIG = { @@ -447,13 +493,18 @@ module Constants DEFAULT_QUEUE_CAPACITY: 10_000, DEFAULT_BATCH_SIZE: 10, DEFAULT_FLUSH_INTERVAL_SECONDS: 1, - DEFAULT_RETRY_COUNT: 3 + DEFAULT_RETRY_COUNT: 3, + INITIAL_RETRY_INTERVAL: 0.2, # 200ms in seconds + MAX_RETRY_INTERVAL: 1.0 # 1 second }.freeze HTTP_HEADERS = { 'IF_MODIFIED_SINCE' => 'If-Modified-Since', 'LAST_MODIFIED' => 'Last-Modified' }.freeze + + CMAB_FETCH_FAILED = 'CMAB decision fetch failed (%s).' + INVALID_CMAB_FETCH_RESPONSE = 'Invalid CMAB fetch response' end end end diff --git a/lib/optimizely/helpers/sdk_settings.rb b/lib/optimizely/helpers/sdk_settings.rb index 3ca2dc72..6ccc82f8 100644 --- a/lib/optimizely/helpers/sdk_settings.rb +++ b/lib/optimizely/helpers/sdk_settings.rb @@ -22,7 +22,7 @@ module Optimizely module Helpers class OptimizelySdkSettings attr_accessor :odp_disabled, :segments_cache_size, :segments_cache_timeout_in_secs, :odp_segments_cache, :odp_segment_manager, - :odp_event_manager, :fetch_segments_timeout, :odp_event_timeout, :odp_flush_interval + :odp_event_manager, :fetch_segments_timeout, :odp_event_timeout, :odp_flush_interval, :cmab_prediction_endpoint # Contains configuration used for Optimizely Project initialization. # @@ -35,6 +35,7 @@ class OptimizelySdkSettings # @param odp_segment_request_timeout - Time to wait in seconds for fetch_qualified_segments (optional. default = 10). # @param odp_event_request_timeout - Time to wait in seconds for send_odp_events (optional. default = 10). # @param odp_event_flush_interval - Time to wait in seconds for odp events to accumulate before sending (optional. default = 1). + # @param cmab_prediction_endpoint - Custom CMAB prediction endpoint URL template (optional). Use %s as placeholder for rule_id. Defaults to production endpoint if not provided. def initialize( disable_odp: false, segments_cache_size: Constants::ODP_SEGMENTS_CACHE_CONFIG[:DEFAULT_CAPACITY], @@ -44,7 +45,8 @@ def initialize( odp_event_manager: nil, odp_segment_request_timeout: nil, odp_event_request_timeout: nil, - odp_event_flush_interval: nil + odp_event_flush_interval: nil, + cmab_prediction_endpoint: nil ) @odp_disabled = disable_odp @segments_cache_size = segments_cache_size @@ -55,6 +57,7 @@ def initialize( @fetch_segments_timeout = odp_segment_request_timeout @odp_event_timeout = odp_event_request_timeout @odp_flush_interval = odp_event_flush_interval + @cmab_prediction_endpoint = cmab_prediction_endpoint end end end diff --git a/lib/optimizely/helpers/validator.rb b/lib/optimizely/helpers/validator.rb index 3ae2350a..d3baa447 100644 --- a/lib/optimizely/helpers/validator.rb +++ b/lib/optimizely/helpers/validator.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2016-2019, 2022, Optimizely and contributors +# Copyright 2016-2019, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -190,14 +190,13 @@ def segments_cache_valid?(segments_cache) # segments_cache - custom cache to be validated. # # Returns boolean depending on whether cache has required methods. - ( - segments_cache.respond_to?(:reset) && + + segments_cache.respond_to?(:reset) && segments_cache.method(:reset)&.parameters&.empty? && segments_cache.respond_to?(:lookup) && segments_cache.method(:lookup)&.parameters&.length&.positive? && segments_cache.respond_to?(:save) && segments_cache.method(:save)&.parameters&.length&.positive? - ) end def segment_manager_valid?(segment_manager) @@ -206,13 +205,12 @@ def segment_manager_valid?(segment_manager) # segment_manager - custom manager to be validated. # # Returns boolean depending on whether manager has required methods. - ( - segment_manager.respond_to?(:odp_config) && + + segment_manager.respond_to?(:odp_config) && segment_manager.respond_to?(:reset) && segment_manager.method(:reset)&.parameters&.empty? && segment_manager.respond_to?(:fetch_qualified_segments) && (segment_manager.method(:fetch_qualified_segments)&.parameters&.length || 0) >= 3 - ) end def event_manager_valid?(event_manager) diff --git a/lib/optimizely/odp/lru_cache.rb b/lib/optimizely/odp/lru_cache.rb index 8ce61549..6d4c9af3 100644 --- a/lib/optimizely/odp/lru_cache.rb +++ b/lib/optimizely/odp/lru_cache.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2022, Optimizely and contributors +# Copyright 2022-2025, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -91,6 +91,19 @@ def peek(key) @cache_mutex.synchronize { @map[key]&.value } end + + # Remove the element associated with the provided key from the cache + # + # @param key - The key to remove + + def remove(key) + return if @capacity <= 0 + + @cache_mutex.synchronize do + @map.delete(key) + end + nil + end end class CacheElement diff --git a/lib/optimizely/odp/odp_event_manager.rb b/lib/optimizely/odp/odp_event_manager.rb index fc9084a1..ef199d91 100644 --- a/lib/optimizely/odp/odp_event_manager.rb +++ b/lib/optimizely/odp/odp_event_manager.rb @@ -239,7 +239,12 @@ def flush_batch! end break unless should_retry - @logger.log(Logger::DEBUG, 'Error dispatching ODP events, scheduled to retry.') if i < @retry_count + if i < @retry_count - 1 + # Exponential backoff: 200ms, 400ms, 800ms, ... capped at 1s + delay = calculate_retry_interval(i) + @logger.log(Logger::DEBUG, "Error dispatching ODP events, retrying (attempt #{i + 2} of #{@retry_count}) after #{delay}s") + sleep(delay) + end i += 1 end @@ -282,5 +287,16 @@ def process_config_update @api_key = @odp_config&.api_key @api_host = @odp_config&.api_host end + + # Calculate exponential backoff interval: 200ms, 400ms, 800ms, ... capped at 1s + # + # @param retry_count - Zero-based retry count + # @return [Float] - Delay in seconds + def calculate_retry_interval(retry_count) + initial_interval = Helpers::Constants::ODP_EVENT_MANAGER[:INITIAL_RETRY_INTERVAL] + max_interval = Helpers::Constants::ODP_EVENT_MANAGER[:MAX_RETRY_INTERVAL] + interval = initial_interval * (2**retry_count) + [interval, max_interval].min + end end end diff --git a/lib/optimizely/optimizely_config.rb b/lib/optimizely/optimizely_config.rb index 1ffbcd94..32a637ad 100644 --- a/lib/optimizely/optimizely_config.rb +++ b/lib/optimizely/optimizely_config.rb @@ -19,8 +19,9 @@ module Optimizely require 'json' class OptimizelyConfig include Optimizely::ConditionTreeEvaluator - def initialize(project_config) + def initialize(project_config, logger = nil) @project_config = project_config + @logger = logger || NoOpLogger.new @rollouts = @project_config.rollouts @audiences = [] audience_id_lookup_dict = {} @@ -91,6 +92,7 @@ def audiences_map def experiments_map experiments_id_map.values.reduce({}) do |experiments_key_map, experiment| + @logger.log(Logger::WARN, "Duplicate experiment keys found in datafile: #{experiment['key']}") if experiments_key_map.key? experiment['key'] experiments_key_map.update(experiment['key'] => experiment) end end diff --git a/lib/optimizely/optimizely_factory.rb b/lib/optimizely/optimizely_factory.rb index b6734872..dbf1410b 100644 --- a/lib/optimizely/optimizely_factory.rb +++ b/lib/optimizely/optimizely_factory.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2019, 2022, Optimizely and contributors +# Copyright 2019, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,6 +22,8 @@ require 'optimizely/event/batch_event_processor' require 'optimizely/logger' require 'optimizely/notification_center' +require 'optimizely/cmab/cmab_client' +require 'optimizely/cmab/cmab_service' module Optimizely class OptimizelyFactory @@ -83,6 +85,46 @@ def self.blocking_timeout(blocking_timeout) @blocking_timeout = blocking_timeout end + # Convenience method for setting CMAB cache size. + # @param cache_size Integer - Maximum number of items in CMAB cache. + # @param logger - Optional LoggerInterface Provides a log method to log messages. + def self.cmab_cache_size(cache_size, logger = NoOpLogger.new) + unless cache_size.is_a?(Integer) && cache_size.positive? + logger.log( + Logger::ERROR, + "CMAB cache size is invalid, setting to default size #{Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_SIZE}." + ) + return + end + @cmab_cache_size = cache_size + end + + # Convenience method for setting CMAB cache TTL. + # @param cache_ttl Numeric - Time in seconds for cache entries to live. + # @param logger - Optional LoggerInterface Provides a log method to log messages. + def self.cmab_cache_ttl(cache_ttl, logger = NoOpLogger.new) + unless cache_ttl.is_a?(Numeric) && cache_ttl.positive? + logger.log( + Logger::ERROR, + "CMAB cache TTL is invalid, setting to default TTL #{Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_TIMEOUT}." + ) + return + end + @cmab_cache_ttl = cache_ttl + end + + # Convenience method for setting custom CMAB cache. + # @param custom_cache - Cache implementation responding to lookup, save, remove, and reset methods. + def self.cmab_custom_cache(custom_cache) + @cmab_custom_cache = custom_cache + end + + # Convenience method for setting custom CMAB prediction endpoint. + # @param prediction_endpoint String - Custom URL template for CMAB prediction API. Use %s as placeholder for rule_id. + def self.cmab_prediction_endpoint(prediction_endpoint) + @cmab_prediction_endpoint = prediction_endpoint + end + # Returns a new optimizely instance. # # @params sdk_key - Required String uniquely identifying the fallback datafile corresponding to project. @@ -103,7 +145,7 @@ def self.default_instance(sdk_key, datafile = nil) ) Optimizely::Project.new( - datafile, nil, logger, error_handler, nil, nil, sdk_key, config_manager, notification_center + datafile: datafile, logger: logger, error_handler: error_handler, sdk_key: sdk_key, config_manager: config_manager, notification_center: notification_center ) end @@ -111,7 +153,7 @@ def self.default_instance(sdk_key, datafile = nil) # # @param config_manager - Required ConfigManagerInterface Responds to 'config' method. def self.default_instance_with_config_manager(config_manager) - Optimizely::Project.new(nil, nil, nil, nil, nil, nil, nil, config_manager) + Optimizely::Project.new(config_manager: config_manager) end # Returns a new optimizely instance. @@ -142,7 +184,6 @@ def self.custom_instance( # rubocop:disable Metrics/ParameterLists notification_center = nil, settings = nil ) - error_handler ||= NoOpErrorHandler.new logger ||= NoOpLogger.new notification_center = notification_center.is_a?(Optimizely::NotificationCenter) ? notification_center : NotificationCenter.new(logger, error_handler) @@ -166,20 +207,31 @@ def self.custom_instance( # rubocop:disable Metrics/ParameterLists notification_center: notification_center ) + # Initialize CMAB components + cmab_prediction_endpoint = nil + cmab_prediction_endpoint = settings.cmab_prediction_endpoint if settings&.cmab_prediction_endpoint + cmab_prediction_endpoint ||= @cmab_prediction_endpoint + + cmab_client = DefaultCmabClient.new(logger: logger, prediction_endpoint: cmab_prediction_endpoint) + cmab_cache = @cmab_custom_cache || LRUCache.new( + @cmab_cache_size || Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_SIZE, + @cmab_cache_ttl || Optimizely::DefaultCmabCacheOptions::DEFAULT_CMAB_CACHE_TIMEOUT + ) + cmab_service = DefaultCmabService.new(cmab_cache, cmab_client, logger) + Optimizely::Project.new( - datafile, - event_dispatcher, - logger, - error_handler, - skip_json_validation, - user_profile_service, - sdk_key, - config_manager, - notification_center, - event_processor, - [], - {}, - settings + datafile: datafile, + event_dispatcher: event_dispatcher, + logger: logger, + error_handler: error_handler, + skip_json_validation: skip_json_validation, + user_profile_service: user_profile_service, + sdk_key: sdk_key, + config_manager: config_manager, + notification_center: notification_center, + event_processor: event_processor, + settings: settings, + cmab_service: cmab_service ) end end diff --git a/lib/optimizely/project_config.rb b/lib/optimizely/project_config.rb index b0d43aa3..b5094b62 100644 --- a/lib/optimizely/project_config.rb +++ b/lib/optimizely/project_config.rb @@ -62,6 +62,8 @@ def host_for_odp; end def all_segments; end + def region; end + def experiment_running?(experiment); end def get_experiment_from_key(experiment_key); end @@ -86,6 +88,10 @@ def get_whitelisted_variations(experiment_id); end def get_attribute_id(attribute_key); end + def get_attribute_by_key(attribute_key); end + + def get_attribute_key_by_id(attribute_id); end + def variation_id_exists?(experiment_id, variation_id); end def get_feature_flag_from_key(feature_flag_key); end diff --git a/lib/optimizely/user_profile_tracker.rb b/lib/optimizely/user_profile_tracker.rb new file mode 100644 index 00000000..082576b0 --- /dev/null +++ b/lib/optimizely/user_profile_tracker.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +require_relative 'logger' + +module Optimizely + class UserProfileTracker + attr_reader :user_profile + + def initialize(user_id, user_profile_service = nil, logger = nil) + @user_id = user_id + @user_profile_service = user_profile_service + @logger = logger || NoOpLogger.new + @profile_updated = false + @user_profile = { + user_id: user_id, + experiment_bucket_map: {} + } + end + + def load_user_profile(reasons = [], error_handler = nil) + return if reasons.nil? + + begin + @user_profile = @user_profile_service.lookup(@user_id) if @user_profile_service + if @user_profile.nil? + @user_profile = { + user_id: @user_id, + experiment_bucket_map: {} + } + end + rescue => e + message = "Error while looking up user profile for user ID '#{@user_id}': #{e}." + reasons << message + @logger.log(Logger::ERROR, message) + error_handler&.handle_error(e) + end + end + + def update_user_profile(experiment_id, variation_id) + user_id = @user_profile[:user_id] + begin + @user_profile[:experiment_bucket_map][experiment_id] = { + variation_id: variation_id + } + @profile_updated = true + @logger.log(Logger::INFO, "Updated variation ID #{variation_id} of experiment ID #{experiment_id} for user '#{user_id}'.") + rescue => e + @logger.log(Logger::ERROR, "Error while updating user profile for user ID '#{user_id}': #{e}.") + end + end + + def save_user_profile(error_handler = nil) + return unless @profile_updated && @user_profile_service + + begin + @user_profile_service.save(@user_profile) + @logger.log(Logger::INFO, "Saved user profile for user '#{@user_profile[:user_id]}'.") + rescue => e + @logger.log(Logger::ERROR, "Failed to save user profile for user '#{@user_profile[:user_id]}': #{e}.") + error_handler&.handle_error(e) + end + end + end +end diff --git a/lib/optimizely/version.rb b/lib/optimizely/version.rb index 43d4f749..af2eb8ff 100644 --- a/lib/optimizely/version.rb +++ b/lib/optimizely/version.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2016-2023, Optimizely and contributors +# Copyright 2016-2024, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,5 +17,5 @@ # module Optimizely CLIENT_ENGINE = 'ruby-sdk' - VERSION = '5.0.0-beta' + VERSION = '5.2.1' end diff --git a/optimizely-sdk.gemspec b/optimizely-sdk.gemspec index c1c5b881..2a3c87c5 100644 --- a/optimizely-sdk.gemspec +++ b/optimizely-sdk.gemspec @@ -3,19 +3,23 @@ require_relative 'lib/optimizely/version' Gem::Specification.new do |spec| - spec.name = 'optimizely-sdk' - spec.version = Optimizely::VERSION - spec.authors = ['Optimizely'] - spec.email = ['developers@optimizely.com'] - spec.required_ruby_version = '>= 2.7' + spec.name = 'optimizely-sdk' + spec.version = Optimizely::VERSION + spec.authors = ['Optimizely'] + spec.email = ['developers@optimizely.com'] + spec.required_ruby_version = '>= 3.0' - spec.summary = "Ruby SDK for Optimizely's testing framework" - spec.description = 'A Ruby SDK for use with Optimizely Feature Experimentation, Optimizely Full Stack (legacy), and Optimizely Rollouts' - spec.homepage = 'https://www.optimizely.com/' - spec.license = 'Apache-2.0' + spec.summary = "Ruby SDK for Optimizely's testing framework" + spec.description = 'A Ruby SDK for use with Optimizely Feature Experimentation, Optimizely Full Stack (legacy), and Optimizely Rollouts' + spec.homepage = 'https://github.com/optimizely/ruby-sdk' + spec.license = 'Apache-2.0' + spec.metadata = { + 'source_code_uri' => 'https://github.com/optimizely/ruby-sdk', + 'changelog_uri' => 'https://github.com/optimizely/ruby-sdk/blob/master/CHANGELOG.md' + } - spec.files = Dir['lib/**/*', 'LICENSE'] - spec.require_paths = ['lib'] + spec.files = Dir['lib/**/*', 'LICENSE'] + spec.require_paths = ['lib'] spec.add_development_dependency 'bundler' spec.add_development_dependency 'coveralls_reborn' @@ -24,6 +28,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency 'rubocop' spec.add_development_dependency 'webmock' - spec.add_runtime_dependency 'json-schema', '~> 2.6' + spec.add_runtime_dependency 'json-schema', '>= 2.6' spec.add_runtime_dependency 'murmurhash3', '~> 0.1' end diff --git a/spec/audience_spec.rb b/spec/audience_spec.rb index 73560aff..eb997f0e 100644 --- a/spec/audience_spec.rb +++ b/spec/audience_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2016-2017, 2019-2020, 2022, Optimizely and contributors +# Copyright 2016-2017, 2019-2020, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } let(:typed_audience_config) { Optimizely::DatafileProjectConfig.new(config_typed_audience_JSON, spy_logger, error_handler) } let(:integration_config) { Optimizely::DatafileProjectConfig.new(config_integration_JSON, spy_logger, error_handler) } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) } let(:user_context) { project_instance.create_user_context('some-user', {}) } after(:example) { project_instance.close } @@ -47,7 +47,7 @@ user_meets_audience_conditions, reasons = Optimizely::Audience.user_meets_audience_conditions?(config, experiment, user_context, spy_logger) expect(user_meets_audience_conditions).to be true - expect(reasons).to eq(["Audiences for experiment 'test_experiment' collectively evaluated to TRUE."]) + expect(reasons).to eq(["Audiences for experiment 'test_experiment' collectively evaluated to TRUE."]) # Audience Ids is Empty and Audience Conditions is nil experiment = config.experiment_key_map['test_experiment'] diff --git a/spec/bucketing_holdout_spec.rb b/spec/bucketing_holdout_spec.rb new file mode 100644 index 00000000..4ddf1f67 --- /dev/null +++ b/spec/bucketing_holdout_spec.rb @@ -0,0 +1,318 @@ +# frozen_string_literal: true + +# +# Copyright 2025 Optimizely and contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +require 'spec_helper' +require 'optimizely/bucketer' +require 'optimizely/error_handler' +require 'optimizely/logger' + +# Helper class for testing with controlled bucket values +class TestBucketer < Optimizely::Bucketer + def initialize(logger) + super(logger) + @bucket_values = [] + @bucket_index = 0 + end + + def bucket_values(values) + @bucket_values = values + @bucket_index = 0 + end + + def generate_bucket_value(bucketing_id) + return super(bucketing_id) if @bucket_values.empty? + + value = @bucket_values[@bucket_index] + @bucket_index = (@bucket_index + 1) % @bucket_values.length + value + end +end + +describe 'Optimizely::Bucketer - Holdout Tests' do + let(:config_body) { OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS } + let(:config_body_JSON) { OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON } + let(:error_handler) { Optimizely::NoOpErrorHandler.new } + let(:spy_logger) { spy('logger') } + let(:test_user_id) { 'test_user_id' } + let(:test_bucketing_id) { 'test_bucketing_id' } + let(:config) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + spy_logger, + error_handler + ) + end + let(:test_bucketer) { TestBucketer.new(spy_logger) } + + before do + # Verify that the config contains holdouts + expect(config.holdouts).not_to be_nil + expect(config.holdouts.length).to be > 0 + end + + describe '#bucket with holdouts' do + it 'should bucket user within valid traffic allocation range' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Set bucket value to be within first variation's traffic allocation (0-5000 range) + test_bucketer.bucket_values([2500]) + + variation, _reasons = test_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + expect(variation).not_to be_nil + expect(variation['id']).to eq('var_1') + expect(variation['key']).to eq('control') + + # Verify logging + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Assigned bucket 2500 to user '#{test_user_id}' with bucketing ID: '#{test_bucketing_id}'." + ) + end + + it 'should return nil when user is outside traffic allocation range' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Modify traffic allocation to be smaller by creating a modified holdout + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['trafficAllocation'] = [ + { + 'entityId' => 'var_1', + 'endOfRange' => 1000 + } + ] + + # Set bucket value outside traffic allocation range + test_bucketer.bucket_values([1500]) + + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + expect(variation).to be_nil + + # Verify user was assigned bucket value but no variation was found + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Assigned bucket 1500 to user '#{test_user_id}' with bucketing ID: '#{test_bucketing_id}'." + ) + end + + it 'should return nil when holdout has no traffic allocation' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Clear traffic allocation + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['trafficAllocation'] = [] + + test_bucketer.bucket_values([5000]) + + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + expect(variation).to be_nil + + # Verify bucket was assigned + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Assigned bucket 5000 to user '#{test_user_id}' with bucketing ID: '#{test_bucketing_id}'." + ) + end + + it 'should return nil when traffic allocation points to invalid variation ID' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Set traffic allocation to point to non-existent variation + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['trafficAllocation'] = [ + { + 'entityId' => 'invalid_variation_id', + 'endOfRange' => 10_000 + } + ] + + test_bucketer.bucket_values([5000]) + + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + expect(variation).to be_nil + + # Verify bucket was assigned + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Assigned bucket 5000 to user '#{test_user_id}' with bucketing ID: '#{test_bucketing_id}'." + ) + end + + it 'should return nil when holdout has no variations' do + holdout = config.get_holdout('holdout_empty_1') + expect(holdout).not_to be_nil + expect(holdout['variations']&.length || 0).to eq(0) + + test_bucketer.bucket_values([5000]) + + variation, _reasons = test_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + expect(variation).to be_nil + + # Verify bucket was assigned + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Assigned bucket 5000 to user '#{test_user_id}' with bucketing ID: '#{test_bucketing_id}'." + ) + end + + it 'should return nil when holdout has empty key' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Clear holdout key + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['key'] = '' + + test_bucketer.bucket_values([5000]) + + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + # Should return nil for invalid experiment key + expect(variation).to be_nil + end + + it 'should return nil when holdout has null key' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Set holdout key to nil + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['key'] = nil + + test_bucketer.bucket_values([5000]) + + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + # Should return nil for null experiment key + expect(variation).to be_nil + end + + it 'should bucket user into first variation with multiple variations' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Verify holdout has multiple variations + expect(holdout['variations'].length).to be >= 2 + + # Test user buckets into first variation + test_bucketer.bucket_values([2500]) + variation, _reasons = test_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + expect(variation).not_to be_nil + expect(variation['id']).to eq('var_1') + expect(variation['key']).to eq('control') + end + + it 'should bucket user into second variation with multiple variations' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Verify holdout has multiple variations + expect(holdout['variations'].length).to be >= 2 + expect(holdout['variations'][0]['id']).to eq('var_1') + expect(holdout['variations'][1]['id']).to eq('var_2') + + # Test user buckets into second variation (bucket value 7500 should be in 5000-10000 range) + test_bucketer.bucket_values([7500]) + variation, _reasons = test_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + expect(variation).not_to be_nil + expect(variation['id']).to eq('var_2') + expect(variation['key']).to eq('treatment') + end + + it 'should handle edge case boundary values correctly' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Modify traffic allocation to be 5000 (50%) + modified_holdout = OptimizelySpec.deep_clone(holdout) + modified_holdout['trafficAllocation'] = [ + { + 'entityId' => 'var_1', + 'endOfRange' => 5000 + } + ] + + # Test exact boundary value (should be included) + test_bucketer.bucket_values([4999]) + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + expect(variation).not_to be_nil + expect(variation['id']).to eq('var_1') + + # Test value just outside boundary (should not be included) + test_bucketer.bucket_values([5000]) + variation, _reasons = test_bucketer.bucket(config, modified_holdout, test_bucketing_id, test_user_id) + + expect(variation).to be_nil + end + + it 'should produce consistent results with same inputs' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Create a real bucketer (not test bucketer) for consistent hashing + real_bucketer = Optimizely::Bucketer.new(spy_logger) + variation1, _reasons1 = real_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + variation2, _reasons2 = real_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + # Results should be identical + if variation1 + expect(variation2).not_to be_nil + expect(variation1['id']).to eq(variation2['id']) + expect(variation1['key']).to eq(variation2['key']) + else + expect(variation2).to be_nil + end + end + + it 'should handle different bucketing IDs without exceptions' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + # Create a real bucketer (not test bucketer) for real hashing behavior + real_bucketer = Optimizely::Bucketer.new(spy_logger) + + # These calls should not raise exceptions + expect do + real_bucketer.bucket(config, holdout, 'bucketingId1', test_user_id) + real_bucketer.bucket(config, holdout, 'bucketingId2', test_user_id) + end.not_to raise_error + end + + it 'should populate decision reasons properly' do + holdout = config.get_holdout('holdout_1') + expect(holdout).not_to be_nil + + test_bucketer.bucket_values([5000]) + _variation, reasons = test_bucketer.bucket(config, holdout, test_bucketing_id, test_user_id) + + expect(reasons).not_to be_nil + # Decision reasons should be populated from the bucketing process + # The exact content depends on whether the user was bucketed or not + end + end +end diff --git a/spec/cmab/cmab_client_spec.rb b/spec/cmab/cmab_client_spec.rb new file mode 100644 index 00000000..ae348fab --- /dev/null +++ b/spec/cmab/cmab_client_spec.rb @@ -0,0 +1,248 @@ +# frozen_string_literal: true + +# +# Copyright 2025 Optimizely and contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +require 'spec_helper' +require 'optimizely/logger' +require 'optimizely/cmab/cmab_client' +require 'webmock/rspec' + +describe Optimizely::DefaultCmabClient do + let(:spy_logger) { spy('logger') } + let(:retry_config) { Optimizely::CmabRetryConfig.new(max_retries: 3, initial_backoff: 0.01, max_backoff: 1, backoff_multiplier: 2) } + let(:rule_id) { 'test_rule' } + let(:user_id) { 'user123' } + let(:attributes) { {'attr1': 'value1', 'attr2': 'value2'} } + let(:cmab_uuid) { 'uuid-1234' } + let(:expected_url) { "https://prediction.cmab.optimizely.com/predict/#{rule_id}" } + let(:expected_body_for_webmock) do + { + instances: [{ + visitorId: user_id, + experimentId: rule_id, + attributes: [ + {'id' => 'attr1', 'value' => 'value1', 'type' => 'custom_attribute'}, + {'id' => 'attr2', 'value' => 'value2', 'type' => 'custom_attribute'} + ], + cmabUUID: cmab_uuid + }] + }.to_json + end + let(:expected_headers) { {'Content-Type' => 'application/json'} } + + before do + allow(Kernel).to receive(:sleep) + WebMock.disable_net_connect! + end + + after do + RSpec::Mocks.space.proxy_for(spy_logger).reset + WebMock.reset! + WebMock.allow_net_connect! + end + + context 'when client is configured without retries' do + let(:client) { described_class.new(http_client: nil, retry_config: Optimizely::CmabRetryConfig.new(max_retries: 0), logger: spy_logger) } + + it 'should return the variation id on success' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'predictions' => [{'variation_id' => 'abc123'}]}.to_json, headers: {'Content-Type' => 'application/json'}) + + result = client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('abc123') + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(Kernel).not_to have_received(:sleep) + end + + it 'should raise error on http client exception' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_raise(StandardError.new('Connection error')) + + expect do + client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + end.to raise_error(Optimizely::CmabFetchError, /Connection error/) + + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(spy_logger).to have_received(:log).with(Logger::ERROR, a_string_including('Connection error')) + expect(Kernel).not_to have_received(:sleep) + end + + it 'should raise error on non success status' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 500) + + expect do + client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + end.to raise_error(Optimizely::CmabFetchError, /500/) + + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(spy_logger).to have_received(:log).with(Logger::ERROR, a_string_including('500')) + expect(Kernel).not_to have_received(:sleep) + end + + it 'should raise error on invalid json response' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: 'this is not json', headers: {'Content-Type' => 'text/plain'}) + + expect do + client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + end.to raise_error(Optimizely::CmabInvalidResponseError, /Invalid CMAB fetch response/) + + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(spy_logger).to have_received(:log).with(Logger::ERROR, a_string_including('Invalid CMAB fetch response')) + expect(Kernel).not_to have_received(:sleep) + end + + it 'should raise error on invalid response structure' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'no_predictions' => []}.to_json, headers: {'Content-Type' => 'application/json'}) + + expect do + client.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + end.to raise_error(Optimizely::CmabInvalidResponseError, /Invalid CMAB fetch response/) + + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(spy_logger).to have_received(:log).with(Logger::ERROR, a_string_including('Invalid CMAB fetch response')) + expect(Kernel).not_to have_received(:sleep) + end + end + + context 'when client is configured with retries' do + let(:client_with_retry) { described_class.new(http_client: nil, retry_config: retry_config, logger: spy_logger) } + + it 'should return the variation id on first try' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'predictions' => [{'variation_id' => 'abc123'}]}.to_json, headers: {'Content-Type' => 'application/json'}) + + result = client_with_retry.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('abc123') + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + expect(Kernel).not_to have_received(:sleep) + end + + it 'should return the variation id on third try' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return({status: 500}, + {status: 500}, + {status: 200, body: {'predictions' => [{'variation_id' => 'xyz456'}]}.to_json, headers: {'Content-Type' => 'application/json'}}) + + result = client_with_retry.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('xyz456') + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).times(3) + + expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Retrying CMAB request (attempt 1) after 0.01 seconds...').once + expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Retrying CMAB request (attempt 2) after 0.02 seconds...').once + expect(spy_logger).not_to have_received(:log).with(Logger::INFO, a_string_including('Retrying CMAB request (attempt 3)')) + + expect(Kernel).to have_received(:sleep).with(0.01).once + expect(Kernel).to have_received(:sleep).with(0.02).once + expect(Kernel).not_to have_received(:sleep).with(0.04) + end + + it 'should exhaust all retry attempts' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return({status: 500}, + {status: 500}, + {status: 500}, + {status: 500}) + + expect do + client_with_retry.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + end.to raise_error(Optimizely::CmabFetchError) + + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).times(4) + + expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Retrying CMAB request (attempt 1) after 0.01 seconds...').once + expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Retrying CMAB request (attempt 2) after 0.02 seconds...').once + expect(spy_logger).to have_received(:log).with(Logger::INFO, 'Retrying CMAB request (attempt 3) after 0.04 seconds...').once + + expect(Kernel).to have_received(:sleep).with(0.01).once + expect(Kernel).to have_received(:sleep).with(0.02).once + expect(Kernel).to have_received(:sleep).with(0.04).once + + expect(spy_logger).to have_received(:log).with(Logger::ERROR, a_string_including('Max retries exceeded for CMAB request')) + end + end + + context 'when custom prediction endpoint is configured' do + let(:custom_endpoint) { 'https://custom.endpoint.com/predict/%s' } + let(:custom_url) { 'https://custom.endpoint.com/predict/test_rule' } + let(:client_with_custom_endpoint) { described_class.new(http_client: nil, retry_config: Optimizely::CmabRetryConfig.new(max_retries: 0), logger: spy_logger, prediction_endpoint: custom_endpoint) } + + it 'should use the custom prediction endpoint' do + WebMock.stub_request(:post, custom_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'predictions' => [{'variation_id' => 'custom123'}]}.to_json, headers: {'Content-Type' => 'application/json'}) + + result = client_with_custom_endpoint.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('custom123') + expect(WebMock).to have_requested(:post, custom_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + end + end + + context 'when no prediction endpoint is provided' do + let(:client_with_default) { described_class.new(http_client: nil, retry_config: Optimizely::CmabRetryConfig.new(max_retries: 0), logger: spy_logger, prediction_endpoint: nil) } + + it 'should use the default prediction endpoint' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'predictions' => [{'variation_id' => 'default123'}]}.to_json, headers: {'Content-Type' => 'application/json'}) + + result = client_with_default.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('default123') + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + end + end + + context 'when empty string prediction endpoint is provided' do + let(:client_with_empty_endpoint) { described_class.new(http_client: nil, retry_config: Optimizely::CmabRetryConfig.new(max_retries: 0), logger: spy_logger, prediction_endpoint: '') } + + it 'should fall back to the default prediction endpoint' do + WebMock.stub_request(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers) + .to_return(status: 200, body: {'predictions' => [{'variation_id' => 'fallback123'}]}.to_json, headers: {'Content-Type' => 'application/json'}) + + result = client_with_empty_endpoint.fetch_decision(rule_id, user_id, attributes, cmab_uuid) + + expect(result).to eq('fallback123') + expect(WebMock).to have_requested(:post, expected_url) + .with(body: expected_body_for_webmock, headers: expected_headers).once + end + end +end diff --git a/spec/cmab/cmab_service_spec.rb b/spec/cmab/cmab_service_spec.rb new file mode 100644 index 00000000..b4b40086 --- /dev/null +++ b/spec/cmab/cmab_service_spec.rb @@ -0,0 +1,281 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'optimizely/cmab/cmab_service' +require 'optimizely/odp/lru_cache' +require 'optimizely/cmab/cmab_client' +require 'optimizely/decide/optimizely_decide_option' +require 'optimizely/logger' + +describe Optimizely::DefaultCmabService do + let(:mock_cmab_cache) { instance_double(Optimizely::LRUCache) } + let(:mock_cmab_client) { instance_double(Optimizely::DefaultCmabClient) } + let(:mock_logger) { Optimizely::NoOpLogger.new } + let(:cmab_service) { described_class.new(mock_cmab_cache, mock_cmab_client, mock_logger) } + + let(:mock_project_config) { double('project_config') } + let(:mock_user_context) { double('user_context') } + let(:user_id) { 'user123' } + let(:rule_id) { 'exp1' } + let(:user_attributes) { {'age' => 25, 'location' => 'USA'} } + + let(:mock_experiment) { {'cmab' => {'attributeIds' => %w[66 77]}} } + let(:mock_attr1) { {'key' => 'age'} } + let(:mock_attr2) { {'key' => 'location'} } + + before do + allow(mock_user_context).to receive(:user_id).and_return(user_id) + allow(mock_user_context).to receive(:user_attributes).and_return(user_attributes) + + allow(mock_project_config).to receive(:experiment_id_map).and_return({rule_id => mock_experiment}) + allow(mock_project_config).to receive(:attribute_id_map).and_return({ + '66' => mock_attr1, + '77' => mock_attr2 + }) + end + + describe '#get_decision' do + it 'returns decision from cache when valid' do + expected_key = cmab_service.send(:get_cache_key, user_id, rule_id) + expected_attributes = {'age' => 25, 'location' => 'USA'} + expected_hash = cmab_service.send(:hash_attributes, expected_attributes) + + cached_value = Optimizely::CmabCacheValue.new( + attributes_hash: expected_hash, + variation_id: 'varA', + cmab_uuid: 'uuid-123' + ) + + allow(mock_cmab_cache).to receive(:lookup).with(expected_key).and_return(cached_value) + + decision, reasons = cmab_service.get_decision(mock_project_config, mock_user_context, rule_id, []) + + expect(mock_cmab_cache).to have_received(:lookup).with(expected_key) + expect(decision.variation_id).to eq('varA') + expect(decision.cmab_uuid).to eq('uuid-123') + expect(reasons).to include(match(/CMAB cache hit for user '#{user_id}' and rule '#{rule_id}'/)) + end + + it 'ignores cache when option given' do + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varB') + expected_attributes = {'age' => 25, 'location' => 'USA'} + + decision, reasons = cmab_service.get_decision( + mock_project_config, + mock_user_context, + rule_id, + [Optimizely::Decide::OptimizelyDecideOption::IGNORE_CMAB_CACHE] + ) + + expect(decision.variation_id).to eq('varB') + expect(decision.cmab_uuid).to be_a(String) + expect(mock_cmab_client).to have_received(:fetch_decision).with( + rule_id, + user_id, + expected_attributes, + decision.cmab_uuid + ) + expect(reasons).to include(match(/Ignoring CMAB cache for user '#{user_id}' and rule '#{rule_id}'/)) + end + + it 'invalidates user cache when option given' do + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varC') + allow(mock_cmab_cache).to receive(:lookup).and_return(nil) + allow(mock_cmab_cache).to receive(:remove) + allow(mock_cmab_cache).to receive(:save) + + cmab_service.get_decision( + mock_project_config, + mock_user_context, + rule_id, + [Optimizely::Decide::OptimizelyDecideOption::INVALIDATE_USER_CMAB_CACHE] + ) + + key = cmab_service.send(:get_cache_key, user_id, rule_id) + expect(mock_cmab_cache).to have_received(:remove).with(key) + end + + it 'resets cache when option given' do + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varD') + allow(mock_cmab_cache).to receive(:reset) + allow(mock_cmab_cache).to receive(:lookup).and_return(nil) + allow(mock_cmab_cache).to receive(:save) + + decision, reasons = cmab_service.get_decision( + mock_project_config, + mock_user_context, + rule_id, + [Optimizely::Decide::OptimizelyDecideOption::RESET_CMAB_CACHE] + ) + + expect(mock_cmab_cache).to have_received(:reset) + expect(decision.variation_id).to eq('varD') + expect(decision.cmab_uuid).to be_a(String) + expect(reasons).to include(match(/Resetting CMAB cache for user '#{user_id}' and rule '#{rule_id}'/)) + end + + it 'fetches new decision when hash changes' do + old_cached_value = Optimizely::CmabCacheValue.new( + attributes_hash: 'old_hash', + variation_id: 'varA', + cmab_uuid: 'uuid-123' + ) + + allow(mock_cmab_cache).to receive(:lookup).and_return(old_cached_value) + allow(mock_cmab_cache).to receive(:remove) + allow(mock_cmab_cache).to receive(:save) + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varE') + + expected_attributes = {'age' => 25, 'location' => 'USA'} + cmab_service.send(:hash_attributes, expected_attributes) + expected_key = cmab_service.send(:get_cache_key, user_id, rule_id) + + decision, reasons = cmab_service.get_decision(mock_project_config, mock_user_context, rule_id, []) + + expect(mock_cmab_cache).to have_received(:remove).with(expected_key) + expect(mock_cmab_cache).to have_received(:save).with( + expected_key, + an_instance_of(Optimizely::CmabCacheValue) + ) + expect(decision.variation_id).to eq('varE') + expect(mock_cmab_client).to have_received(:fetch_decision).with( + rule_id, + user_id, + expected_attributes, + decision.cmab_uuid + ) + expect(reasons).to include(match(/CMAB cache attributes mismatch for user '#{user_id}' and rule '#{rule_id}', fetching new decision./)) + end + + it 'only passes cmab attributes to client' do + allow(mock_user_context).to receive(:user_attributes).and_return({ + 'age' => 25, + 'location' => 'USA', + 'extra_attr' => 'value', + 'another_extra' => 123 + }) + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varF') + + decision, reasons = cmab_service.get_decision( + mock_project_config, + mock_user_context, + rule_id, + [Optimizely::Decide::OptimizelyDecideOption::IGNORE_CMAB_CACHE] + ) + + # Verify only age and location are passed + expect(mock_cmab_client).to have_received(:fetch_decision).with( + rule_id, + user_id, + {'age' => 25, 'location' => 'USA'}, + decision.cmab_uuid + ) + expect(reasons).to include(match(/Ignoring CMAB cache for user '#{user_id}' and rule '#{rule_id}'/)) + end + end + + describe '#filter_attributes' do + it 'returns correct subset of attributes' do + filtered = cmab_service.send(:filter_attributes, mock_project_config, mock_user_context, rule_id) + + expect(filtered['age']).to eq(25) + expect(filtered['location']).to eq('USA') + end + + it 'returns empty hash when no cmab config' do + allow(mock_project_config).to receive(:experiment_id_map).and_return({rule_id => {'cmab' => nil}}) + + filtered = cmab_service.send(:filter_attributes, mock_project_config, mock_user_context, rule_id) + + expect(filtered).to eq({}) + end + + it 'returns empty hash when experiment not found' do + allow(mock_project_config).to receive(:experiment_id_map).and_return({}) + + filtered = cmab_service.send(:filter_attributes, mock_project_config, mock_user_context, rule_id) + + expect(filtered).to eq({}) + end + end + + describe '#hash_attributes' do + it 'produces stable output regardless of key order' do + attrs1 = {'b' => 2, 'a' => 1} + attrs2 = {'a' => 1, 'b' => 2} + + hash1 = cmab_service.send(:hash_attributes, attrs1) + hash2 = cmab_service.send(:hash_attributes, attrs2) + + expect(hash1).to eq(hash2) + end + end + + describe '#get_cache_key' do + it 'generates correct cache key format' do + key = cmab_service.send(:get_cache_key, 'user123', 'exp1') + + expect(key).to eq('7-user123-exp1') + end + end + + describe '#fetch_decision' do + it 'generates uuid and calls client' do + allow(mock_cmab_client).to receive(:fetch_decision).and_return('varX') + attributes = {'age' => 25} + + decision = cmab_service.send(:fetch_decision, rule_id, user_id, attributes) + + expect(decision.variation_id).to eq('varX') + expect(decision.cmab_uuid).to match(/\A[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}\z/) + expect(mock_cmab_client).to have_received(:fetch_decision).with( + rule_id, + user_id, + attributes, + decision.cmab_uuid + ) + end + end + + describe 'lock striping behavior' do + describe '#get_lock_index' do + it 'returns consistent lock index for same user/rule combination' do + user_id = 'test_user' + rule_id = 'test_rule' + + # Get lock index multiple times + index1 = cmab_service.send(:get_lock_index, user_id, rule_id) + index2 = cmab_service.send(:get_lock_index, user_id, rule_id) + index3 = cmab_service.send(:get_lock_index, user_id, rule_id) + + # All should be the same + expect(index1).to eq(index2), 'Same user/rule should always use same lock' + expect(index2).to eq(index3), 'Same user/rule should always use same lock' + end + + it 'distributes different user/rule combinations across multiple locks' do + test_cases = [ + %w[user1 rule1], + %w[user2 rule1], + %w[user1 rule2], + %w[user3 rule3], + %w[user4 rule4] + ] + + lock_indices = Set.new + test_cases.each do |user_id, rule_id| + index = cmab_service.send(:get_lock_index, user_id, rule_id) + + # Verify index is within expected range + expect(index).to be >= 0, 'Lock index should be non-negative' + expect(index).to be < Optimizely::DefaultCmabService::NUM_LOCK_STRIPES, 'Lock index should be less than NUM_LOCK_STRIPES' + + lock_indices.add(index) + end + + # We should have multiple different lock indices (though not necessarily all unique due to hash collisions) + expect(lock_indices.size).to be > 1, 'Different user/rule combinations should generally use different locks' + end + end + end +end diff --git a/spec/condition_tree_evaluator_spec.rb b/spec/condition_tree_evaluator_spec.rb index 68e99844..28dda143 100644 --- a/spec/condition_tree_evaluator_spec.rb +++ b/spec/condition_tree_evaluator_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2019, Optimizely and contributors +# Copyright 2019, 2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,19 +27,19 @@ describe 'evaluate' do it 'should return true for a leaf condition when the leaf condition evaluator returns true' do - leaf_callback = ->(_condition) { return true } + leaf_callback = ->(_condition) { true } expect(Optimizely::ConditionTreeEvaluator.evaluate(@browser_condition, leaf_callback)).to be true end it 'should return false for a leaf condition when the leaf condition evaluator returns false' do - leaf_callback = ->(_condition) { return false } + leaf_callback = ->(_condition) { false } expect(Optimizely::ConditionTreeEvaluator.evaluate(@browser_condition, leaf_callback)).to be false end end describe 'and evaluation' do it 'should return true when ALL conditions evaluate to true' do - leaf_callback = ->(_condition) { return true } + leaf_callback = ->(_condition) { true } expect(Optimizely::ConditionTreeEvaluator.evaluate(['and', @browser_condition, @device_condition], leaf_callback)).to be true end @@ -51,7 +51,7 @@ describe 'nil handling' do it 'should return nil when all operands evaluate to nil' do - leaf_callback = ->(_condition) { return nil } + leaf_callback = ->(_condition) { nil } expect(Optimizely::ConditionTreeEvaluator.evaluate(['and', @browser_condition, @device_condition], leaf_callback)).to eq(nil) end @@ -83,7 +83,7 @@ describe 'or evaluation' do it 'should return false if all conditions evaluate to false' do - leaf_callback = ->(_condition) { return false } + leaf_callback = ->(_condition) { false } expect(Optimizely::ConditionTreeEvaluator.evaluate(['or', @browser_condition, @device_condition], leaf_callback)).to be false end @@ -95,7 +95,7 @@ describe 'nil handling' do it 'should return nil when all operands evaluate to nil' do - leaf_callback = ->(_condition) { return nil } + leaf_callback = ->(_condition) { nil } expect(Optimizely::ConditionTreeEvaluator.evaluate(['or', @browser_condition, @device_condition], leaf_callback)).to eq(nil) end @@ -127,34 +127,34 @@ describe 'not evaluation' do it 'should return true if the condition evaluates to false' do - leaf_callback = ->(_condition) { return false } + leaf_callback = ->(_condition) { false } expect(Optimizely::ConditionTreeEvaluator.evaluate(['not', @browser_condition], leaf_callback)).to be true end it 'should return false if the condition evaluates to true' do - leaf_callback = ->(_condition) { return true } + leaf_callback = ->(_condition) { true } expect(Optimizely::ConditionTreeEvaluator.evaluate(['not', @browser_condition], leaf_callback)).to be false end it 'should return the result of negating the first condition, and ignore any additional conditions' do - leaf_callback = ->(id) { return id == '1' } + leaf_callback = ->(id) { id == '1' } expect(Optimizely::ConditionTreeEvaluator.evaluate(%w[not 1 2 1], leaf_callback)).to be false - leaf_callback2 = ->(id) { return id == '2' } + leaf_callback2 = ->(id) { id == '2' } expect(Optimizely::ConditionTreeEvaluator.evaluate(%w[not 1 2 1], leaf_callback2)).to be true - leaf_callback3 = ->(id) { return id == '1' ? nil : id == '3' } + leaf_callback3 = ->(id) { id == '1' ? nil : id == '3' } expect(Optimizely::ConditionTreeEvaluator.evaluate(%w[not 1 2 3], leaf_callback3)).to eq(nil) end describe 'nil handling' do it 'should return nil when operand evaluates to nil' do - leaf_callback = ->(_condition) { return nil } + leaf_callback = ->(_condition) { nil } expect(Optimizely::ConditionTreeEvaluator.evaluate(['not', @browser_condition, @device_condition], leaf_callback)).to eq(nil) end it 'should return nil when there are no operands' do - leaf_callback = ->(_condition) { return nil } + leaf_callback = ->(_condition) { nil } expect(Optimizely::ConditionTreeEvaluator.evaluate(['not'], leaf_callback)).to eq(nil) end end @@ -166,7 +166,7 @@ allow(leaf_callback).to receive(:call).and_return(true, false) expect(Optimizely::ConditionTreeEvaluator.evaluate([@browser_condition, @device_condition], leaf_callback)).to be true - leaf_callback = ->(_condition) { return false } + leaf_callback = ->(_condition) { false } allow(leaf_callback).to receive(:call).and_return(false, true) expect(Optimizely::ConditionTreeEvaluator.evaluate([@browser_condition, @device_condition], leaf_callback)).to be true end diff --git a/spec/config/datafile_project_config_spec.rb b/spec/config/datafile_project_config_spec.rb index 3cf2bd31..ea47fe6f 100644 --- a/spec/config/datafile_project_config_spec.rb +++ b/spec/config/datafile_project_config_spec.rb @@ -57,6 +57,7 @@ expect(project_config.sdk_key).to eq(config_body['sdkKey']) expect(project_config.environment_key).to eq(config_body['environmentKey']) expect(project_config.send_flag_decisions).to eq(config_body['sendFlagDecisions']) + expect(project_config.region).to eq(config_body['region']) expected_attribute_key_map = { 'browser_type' => config_body['attributes'][0], @@ -756,6 +757,23 @@ expect(project_config.rollout_experiment_id_map).to eq(expected_rollout_experiment_id_map) end + it 'should use US region when no region is specified in datafile' do + project_config = Optimizely::DatafileProjectConfig.new(config_body_JSON, logger, error_handler) + expect(project_config.region).to eq('US') + end + + it 'should parse region specified in datafile correctly' do + project_config_us = Optimizely::DatafileProjectConfig.new(config_body_JSON, logger, error_handler) + expect(project_config_us.region).to eq('US') + + config_body_eu = config_body.dup + config_body_eu['region'] = 'EU' + config_body_json = JSON.dump(config_body_eu) + project_config_eu = Optimizely::DatafileProjectConfig.new(config_body_json, logger, error_handler) + + expect(project_config_eu.region).to eq('EU') + end + it 'should initialize properties correctly upon creating project with typed audience dict' do project_config = Optimizely::DatafileProjectConfig.new(JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), logger, error_handler) config_body = OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES @@ -837,14 +855,14 @@ describe 'get_event_from_key' do it 'should log a message when provided event key is invalid' do config.get_event_from_key('invalid_key') - expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Event 'invalid_key' is not in datafile.") + expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Event key 'invalid_key' is not in datafile.") end end describe 'get_audience_from_id' do it 'should log a message when provided audience ID is invalid' do config.get_audience_from_id('invalid_id') - expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Audience 'invalid_id' is not in datafile.") + expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Audience id 'invalid_id' is not in datafile.") end end @@ -948,7 +966,7 @@ it 'should log a message when there is no experiment key map for the experiment' do config.get_whitelisted_variations('invalid_key') expect(spy_logger).to have_received(:log).with(Logger::ERROR, - "Experiment ID 'invalid_key' is not in datafile.") + "Experiment id 'invalid_key' is not in datafile.") end end @@ -1078,6 +1096,67 @@ end end + describe '#test_cmab_field_population' do + it 'Should return CMAB details' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0]['cmab'] = {'attributeIds' => %w[808797688 808797689], 'trafficAllocation' => 4000} + config_dict['experiments'][0]['trafficAllocation'] = [] + + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + + experiment = project_config.get_experiment_from_key('test_experiment') + expect(experiment['cmab']).to eq({'attributeIds' => %w[808797688 808797689], 'trafficAllocation' => 4000}) + + experiment2 = project_config.get_experiment_from_key('test_experiment_with_audience') + expect(experiment2['cmab']).to eq(nil) + end + it 'should return nil if cmab field is missing' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0].delete('cmab') + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + experiment = project_config.get_experiment_from_key('test_experiment') + expect(experiment['cmab']).to eq(nil) + end + + it 'should handle empty cmab object' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0]['cmab'] = {} + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + experiment = project_config.get_experiment_from_key('test_experiment') + expect(experiment['cmab']).to eq({}) + end + + it 'should handle cmab with only attributeIds' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0]['cmab'] = {'attributeIds' => %w[808797688]} + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + experiment = project_config.get_experiment_from_key('test_experiment') + expect(experiment['cmab']).to eq({'attributeIds' => %w[808797688]}) + end + + it 'should handle cmab with only trafficAllocation' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0]['cmab'] = {'trafficAllocation' => 1234} + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + experiment = project_config.get_experiment_from_key('test_experiment') + expect(experiment['cmab']).to eq({'trafficAllocation' => 1234}) + end + + it 'should not affect other experiments when cmab is set' do + config_dict = Marshal.load(Marshal.dump(OptimizelySpec::VALID_CONFIG_BODY)) + config_dict['experiments'][0]['cmab'] = {'attributeIds' => %w[808797688 808797689], 'trafficAllocation' => 4000} + config_json = JSON.dump(config_dict) + project_config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + experiment2 = project_config.get_experiment_from_key('test_experiment_with_audience') + expect(experiment2['cmab']).to eq(nil) + end + end + describe '#feature_experiment' do let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, logger, error_handler) } @@ -1155,4 +1234,571 @@ expect(config.send(:generate_feature_variation_map, config.feature_flags)).to eq(expected_feature_variation_map) end end + + describe '#get_holdouts_for_flag' do + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger, + error_handler + ) + end + + it 'should return empty array for non-existent flag' do + holdouts = config_with_holdouts.get_holdouts_for_flag('non_existent_flag') + expect(holdouts).to eq([]) + end + + it 'should return global holdouts that do not exclude the flag' do + multi_variate_feature_id = '155559' + holdouts = config_with_holdouts.get_holdouts_for_flag(multi_variate_feature_id) + expect(holdouts.length).to eq(3) + + global_holdout = holdouts.find { |h| h['key'] == 'global_holdout' } + expect(global_holdout).not_to be_nil + expect(global_holdout['id']).to eq('holdout_1') + + specific_holdout = holdouts.find { |h| h['key'] == 'specific_holdout' } + expect(specific_holdout).not_to be_nil + expect(specific_holdout['id']).to eq('holdout_2') + end + + it 'should not return global holdouts that exclude the flag' do + boolean_feature_id = '155554' + holdouts = config_with_holdouts.get_holdouts_for_flag(boolean_feature_id) + expect(holdouts.length).to eq(1) + + global_holdout = holdouts.find { |h| h['key'] == 'global_holdout' } + expect(global_holdout).to be_nil + end + + it 'should cache results for subsequent calls' do + multi_variate_feature_id = '155559' + holdouts1 = config_with_holdouts.get_holdouts_for_flag(multi_variate_feature_id) + holdouts2 = config_with_holdouts.get_holdouts_for_flag(multi_variate_feature_id) + expect(holdouts1).to equal(holdouts2) + expect(holdouts1.length).to eq(3) + end + + it 'should return only global holdouts for flags not specifically targeted' do + string_feature_id = '155557' + holdouts = config_with_holdouts.get_holdouts_for_flag(string_feature_id) + + # Should only include global holdout (not excluded and no specific targeting) + expect(holdouts.length).to eq(2) + expect(holdouts.first['key']).to eq('global_holdout') + end + end + + describe '#get_holdout' do + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger, + error_handler + ) + end + + it 'should return holdout when valid ID is provided' do + holdout = config_with_holdouts.get_holdout('holdout_1') + expect(holdout).not_to be_nil + expect(holdout['id']).to eq('holdout_1') + expect(holdout['key']).to eq('global_holdout') + expect(holdout['status']).to eq('Running') + end + + it 'should return holdout regardless of status when valid ID is provided' do + holdout = config_with_holdouts.get_holdout('holdout_2') + expect(holdout).not_to be_nil + expect(holdout['id']).to eq('holdout_2') + expect(holdout['key']).to eq('specific_holdout') + expect(holdout['status']).to eq('Running') + end + + it 'should return nil for non-existent holdout ID' do + holdout = config_with_holdouts.get_holdout('non_existent_holdout') + expect(holdout).to be_nil + end + end + + describe '#get_holdout with logging' do + let(:spy_logger) { spy('logger') } + let(:config_with_holdouts) do + config_body_with_holdouts = config_body.dup + config_body_with_holdouts['holdouts'] = [ + { + 'id' => 'holdout_1', + 'key' => 'test_holdout', + 'status' => 'Running', + 'includedFlags' => [], + 'excludedFlags' => [] + } + ] + config_json = JSON.dump(config_body_with_holdouts) + Optimizely::DatafileProjectConfig.new(config_json, spy_logger, error_handler) + end + + it 'should log error when holdout is not found' do + result = config_with_holdouts.get_holdout('invalid_holdout_id') + + expect(result).to be_nil + expect(spy_logger).to have_received(:log).with( + Logger::ERROR, + "Holdout with ID 'invalid_holdout_id' not found." + ) + end + + it 'should not log when holdout is found' do + result = config_with_holdouts.get_holdout('holdout_1') + + expect(result).not_to be_nil + expect(spy_logger).not_to have_received(:log).with( + Logger::ERROR, + anything + ) + end + end + + describe 'holdout initialization' do + let(:config_with_complex_holdouts) do + config_body_with_holdouts = config_body.dup + + # Use the correct feature flag IDs from the debug output + boolean_feature_id = '155554' + multi_variate_feature_id = '155559' + empty_feature_id = '155564' + string_feature_id = '155557' + + config_body_with_holdouts['holdouts'] = [ + { + 'id' => 'global_holdout', + 'key' => 'global', + 'status' => 'Running', + 'includedFlags' => [], + 'excludedFlags' => [boolean_feature_id, string_feature_id] + }, + { + 'id' => 'specific_holdout', + 'key' => 'specific', + 'status' => 'Running', + 'includedFlags' => [multi_variate_feature_id, empty_feature_id], + 'excludedFlags' => [] + }, + { + 'id' => 'inactive_holdout', + 'key' => 'inactive', + 'status' => 'Inactive', + 'includedFlags' => [boolean_feature_id], + 'excludedFlags' => [] + } + ] + config_json = JSON.dump(config_body_with_holdouts) + Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + end + + it 'should properly categorize holdouts during initialization' do + expect(config_with_complex_holdouts.holdout_id_map.keys).to contain_exactly('global_holdout', 'specific_holdout') + expect(config_with_complex_holdouts.global_holdouts.map { |h| h['id'] }).to contain_exactly('global_holdout') + + # Use the correct feature flag IDs + boolean_feature_id = '155554' + multi_variate_feature_id = '155559' + empty_feature_id = '155564' + string_feature_id = '155557' + + expect(config_with_complex_holdouts.included_holdouts[multi_variate_feature_id]).not_to be_nil + expect(config_with_complex_holdouts.included_holdouts[multi_variate_feature_id]).not_to be_empty + expect(config_with_complex_holdouts.included_holdouts[empty_feature_id]).not_to be_nil + expect(config_with_complex_holdouts.included_holdouts[empty_feature_id]).not_to be_empty + expect(config_with_complex_holdouts.included_holdouts[boolean_feature_id]).to be_nil + + expect(config_with_complex_holdouts.excluded_holdouts[boolean_feature_id]).not_to be_nil + expect(config_with_complex_holdouts.excluded_holdouts[boolean_feature_id]).not_to be_empty + expect(config_with_complex_holdouts.excluded_holdouts[string_feature_id]).not_to be_nil + expect(config_with_complex_holdouts.excluded_holdouts[string_feature_id]).not_to be_empty + end + + it 'should only process running holdouts during initialization' do + expect(config_with_complex_holdouts.holdout_id_map['inactive_holdout']).to be_nil + expect(config_with_complex_holdouts.global_holdouts.find { |h| h['id'] == 'inactive_holdout' }).to be_nil + + boolean_feature_id = '155554' + included_for_boolean = config_with_complex_holdouts.included_holdouts[boolean_feature_id] + expect(included_for_boolean).to be_nil + end + end + + describe 'Holdout Decision Functionality' do + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger, + error_handler + ) + end + + describe '#decide with global holdout' do + it 'should return valid decision for global holdout' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Verify holdouts are loaded + expect(config_with_holdouts.holdouts).not_to be_nil + expect(config_with_holdouts.holdouts.length).to be > 0 + end + + it 'should handle decision with global holdout configuration' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + expect(feature_flag['id']).not_to be_empty + end + end + + describe '#decide with included flags holdout' do + it 'should return valid decision for included flags' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Check if there's a holdout that includes this flag + included_holdout = config_with_holdouts.holdouts.find do |h| + h['includedFlags']&.include?(feature_flag['id']) + end + + if included_holdout + expect(included_holdout['key']).not_to be_empty + expect(included_holdout['status']).to eq('Running') + end + end + + it 'should properly filter holdouts based on includedFlags' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdouts_for_flag = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + expect(holdouts_for_flag).to be_an(Array) + end + end + + describe '#decide with excluded flags holdout' do + it 'should not return excluded holdout for excluded flag' do + # boolean_feature is excluded by holdout_excluded_1 + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + + if feature_flag + holdouts_for_flag = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + + # Should not include holdouts that exclude this flag + excluded_holdout = holdouts_for_flag.find { |h| h['key'] == 'excluded_holdout' } + expect(excluded_holdout).to be_nil + end + end + + it 'should return holdouts for non-excluded flag' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdouts_for_flag = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + expect(holdouts_for_flag).to be_an(Array) + end + end + + describe '#decide with multiple holdouts' do + it 'should handle multiple holdouts for different flags' do + flag_keys = %w[boolean_feature multi_variate_feature string_single_variable_feature empty_feature] + + flag_keys.each do |flag_key| + feature_flag = config_with_holdouts.feature_flag_key_map[flag_key] + next unless feature_flag + + holdouts = config_with_holdouts.get_holdouts_for_flag(flag_key) + expect(holdouts).to be_an(Array) + + # Each holdout should have proper structure + holdouts.each do |holdout| + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + expect(holdout).to have_key('status') + end + end + end + + it 'should properly cache holdout lookups' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + holdouts_1 = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + holdouts_2 = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + + expect(holdouts_1).to equal(holdouts_2) + end + end + + describe '#decide with inactive holdout' do + it 'should not include inactive holdouts in decision process' do + # Find a holdout and verify status handling + holdout = config_with_holdouts.holdouts.first + + if holdout + # Temporarily modify status to test behavior + original_status = holdout['status'] + holdout['status'] = 'Paused' + + # Recreate config to process the modified holdout + modified_config_body = OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS.dup + modified_config_body['holdouts'] = config_with_holdouts.holdouts.map(&:dup) + modified_config_body['holdouts'].first['status'] = 'Paused' + + modified_config = Optimizely::DatafileProjectConfig.new( + JSON.dump(modified_config_body), + logger, + error_handler + ) + + # Should not be in active holdouts map + expect(modified_config.holdout_id_map[holdout['id']]).to be_nil + + # Restore original status + holdout['status'] = original_status + end + end + + it 'should only process running holdouts' do + running_holdouts = config_with_holdouts.holdouts.select { |h| h['status'] == 'Running' } + + running_holdouts.each do |holdout| + expect(config_with_holdouts.holdout_id_map[holdout['id']]).not_to be_nil + end + end + end + + describe '#decide with empty user id' do + it 'should handle empty user id without error' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Empty user ID should be valid for bucketing + # This test verifies the config structure supports this + expect(feature_flag['key']).to eq('boolean_feature') + end + end + + describe '#holdout priority evaluation' do + it 'should evaluate global holdouts for flags without specific targeting' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + global_holdouts = config_with_holdouts.holdouts.select do |h| + h['includedFlags'].nil? || h['includedFlags'].empty? + end + + included_holdouts = config_with_holdouts.holdouts.select do |h| + h['includedFlags']&.include?(feature_flag['id']) + end + + # Should have either global or included holdouts + expect(global_holdouts.length + included_holdouts.length).to be >= 0 + end + + it 'should handle mixed holdout configurations' do + # Verify the config has properly categorized holdouts + expect(config_with_holdouts.global_holdouts).to be_a(Array) + expect(config_with_holdouts.included_holdouts).to be_a(Hash) + expect(config_with_holdouts.excluded_holdouts).to be_a(Hash) + end + end + end + + describe 'Holdout Decision Reasons' do + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger, + error_handler + ) + end + + describe 'decision reasons structure' do + it 'should support decision reasons for holdout decisions' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Verify the feature flag has proper structure for decision reasons + expect(feature_flag).to have_key('id') + expect(feature_flag).to have_key('key') + end + + it 'should include holdout information in config' do + expect(config_with_holdouts.holdouts).not_to be_empty + + config_with_holdouts.holdouts.each do |holdout| + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + expect(holdout).to have_key('status') + end + end + end + + describe 'holdout bucketing messages' do + it 'should have holdout configuration for bucketing decisions' do + holdout = config_with_holdouts.holdouts.first + + if holdout + expect(holdout['status']).to eq('Running').or eq('Inactive') + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + end + end + + it 'should support audience evaluation for holdouts' do + holdout = config_with_holdouts.holdouts.first + + if holdout + # Holdouts may or may not have audiences - both are valid + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + expect(holdout).to have_key('status') + end + end + end + + describe 'holdout status messages' do + it 'should differentiate between running and non-running holdouts' do + running_holdouts = config_with_holdouts.holdouts.select { |h| h['status'] == 'Running' } + non_running_holdouts = config_with_holdouts.holdouts.reject { |h| h['status'] == 'Running' } + + # Only running holdouts should be in the holdout_id_map + running_holdouts.each do |holdout| + expect(config_with_holdouts.holdout_id_map[holdout['id']]).not_to be_nil + end + + non_running_holdouts.each do |holdout| + expect(config_with_holdouts.holdout_id_map[holdout['id']]).to be_nil + end + end + end + + describe 'audience condition evaluation' do + it 'should support audience conditions in holdouts' do + holdout = config_with_holdouts.holdouts.first + + if holdout + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + expect(holdout.key?('audienceIds') || holdout.key?('audiences')).to be true + end + end + + it 'should handle holdouts with empty audience conditions' do + # Empty audience conditions should evaluate to TRUE (match everyone) + holdouts_with_empty_audiences = config_with_holdouts.holdouts.select do |h| + !h.key?('audiences') || h['audiences'].nil? || h['audiences'].empty? + end + + # These holdouts should match all users + holdouts_with_empty_audiences.each do |holdout| + expect(holdout['status']).to eq('Running').or eq('Inactive') + end + end + end + + describe 'holdout evaluation reasoning' do + it 'should provide holdout configuration for evaluation' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdouts_for_flag = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + + holdouts_for_flag.each do |holdout| + # Each holdout should have necessary info for decision reasoning + expect(holdout['id']).not_to be_empty + expect(holdout['key']).not_to be_empty + expect(holdout['status']).to eq('Running') + end + end + + it 'should support relevant holdout decision information' do + holdout = config_with_holdouts.holdouts.first + + if holdout + # Verify holdout has all necessary fields for decision reasoning + expect(holdout).to have_key('id') + expect(holdout).to have_key('key') + expect(holdout).to have_key('status') + end + end + end + end + + describe 'Holdout Edge Cases' do + let(:config_with_holdouts) do + config_body_with_holdouts = config_body.dup + config_body_with_holdouts['holdouts'] = [ + { + 'id' => 'holdout_1', + 'key' => 'test_holdout', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => [], + 'excludedFlags' => [] + }, + { + 'id' => 'holdout_2', + 'key' => 'paused_holdout', + 'status' => 'Paused', + 'audiences' => [], + 'includedFlags' => [], + 'excludedFlags' => [] + } + ] + config_json = JSON.dump(config_body_with_holdouts) + Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + end + + it 'should handle datafile without holdouts' do + config_without_holdouts = Optimizely::DatafileProjectConfig.new( + config_body_JSON, + logger, + error_handler + ) + + feature_flag = config_without_holdouts.feature_flag_key_map['boolean_feature'] + holdouts_for_flag = config_without_holdouts.get_holdouts_for_flag(feature_flag['id']) + expect(holdouts_for_flag).to eq([]) + end + + it 'should handle holdouts with nil included/excluded flags' do + config_body_with_nil = config_body.dup + config_body_with_nil['holdouts'] = [ + { + 'id' => 'holdout_nil', + 'key' => 'nil_holdout', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => nil, + 'excludedFlags' => nil + } + ] + config_json = JSON.dump(config_body_with_nil) + config = Optimizely::DatafileProjectConfig.new(config_json, logger, error_handler) + + # Should treat as global holdout + expect(config.global_holdouts.find { |h| h['id'] == 'holdout_nil' }).not_to be_nil + end + + it 'should only include running holdouts in maps' do + running_count = config_with_holdouts.holdout_id_map.length + total_count = config_with_holdouts.holdouts.length + + # Only running holdouts should be in the map + expect(running_count).to be < total_count + expect(config_with_holdouts.holdout_id_map['holdout_1']).not_to be_nil + expect(config_with_holdouts.holdout_id_map['holdout_2']).to be_nil + end + + it 'should handle mixed status holdouts correctly' do + running_holdouts = config_with_holdouts.holdouts.select { |h| h['status'] == 'Running' } + + running_holdouts.each do |holdout| + expect(config_with_holdouts.get_holdout(holdout['id'])).not_to be_nil + end + end + end end diff --git a/spec/decision_service_holdout_spec.rb b/spec/decision_service_holdout_spec.rb new file mode 100644 index 00000000..85191ce1 --- /dev/null +++ b/spec/decision_service_holdout_spec.rb @@ -0,0 +1,779 @@ +# frozen_string_literal: true + +# +# Copyright 2017-2020, 2023, Optimizely and contributors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +require 'spec_helper' +require 'optimizely/decision_service' +require 'optimizely/error_handler' +require 'optimizely/logger' + +describe Optimizely::DecisionService do + let(:config_body) { OptimizelySpec::VALID_CONFIG_BODY } + let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } + let(:error_handler) { Optimizely::NoOpErrorHandler.new } + let(:spy_logger) { spy('logger') } + let(:spy_user_profile_service) { spy('user_profile_service') } + let(:spy_cmab_service) { spy('cmab_service') } + let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } + let(:decision_service) { Optimizely::DecisionService.new(spy_logger, spy_cmab_service, spy_user_profile_service) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) } + let(:user_context) { project_instance.create_user_context('some-user', {}) } + after(:example) { project_instance.close } + + describe 'Holdout Decision Service Tests' do + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + spy_logger, + error_handler + ) + end + + let(:project_with_holdouts) do + Optimizely::Project.new( + datafile: OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger: spy_logger, + error_handler: error_handler + ) + end + + let(:decision_service_with_holdouts) do + Optimizely::DecisionService.new(spy_logger, spy_cmab_service, spy_user_profile_service) + end + + after(:example) do + project_with_holdouts&.close + end + + describe '#get_variations_for_feature_list with holdouts' do + describe 'when holdout is active and user is bucketed' do + it 'should return holdout decision with variation' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdout = config_with_holdouts.holdouts.first + expect(holdout).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + expect(result.length).to be > 0 + + # Check if any decision is from holdout source + _holdout_decision = result.find do |decision_result| + decision_result.decision&.source == Optimizely::DecisionService::DECISION_SOURCES['HOLDOUT'] + end + + # With real bucketer, we can't guarantee holdout bucketing + # but we can verify the result structure is valid + result.each do |decision_result| + expect(decision_result).to respond_to(:decision) + expect(decision_result).to respond_to(:reasons) + end + end + end + + describe 'when holdout is inactive' do + it 'should not bucket users and log appropriate message' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Find the most specific holdout for this flag (prefer explicitly included over global) + applicable_holdout = config_with_holdouts.holdouts.find do |holdout| + # First preference: holdout that explicitly includes this flag + holdout['includedFlags']&.include?(feature_flag['id']) + end + + # If no explicit holdout found, fall back to global holdouts + if applicable_holdout.nil? + applicable_holdout = config_with_holdouts.holdouts.find do |holdout| + # Global holdout (empty/nil includedFlags) that doesn't exclude this flag + (holdout['includedFlags'].nil? || holdout['includedFlags'].empty?) && + !holdout['excludedFlags']&.include?(feature_flag['id']) + end + end + + expect(applicable_holdout).not_to be_nil, 'No applicable holdout found for boolean_feature' + + # Mock holdout as inactive + original_status = applicable_holdout['status'] + applicable_holdout['status'] = 'Paused' + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # Use get_variation_for_holdout directly to test holdout evaluation + result = decision_service_with_holdouts.get_variation_for_holdout( + applicable_holdout, + user_context, + config_with_holdouts + ) + + # Assert that result is not nil and has expected structure + expect(result).not_to be_nil + expect(result).to respond_to(:decision) + expect(result).to respond_to(:reasons) + expect(result.decision).to be_nil + + # Verify log message for inactive holdout + expect(spy_logger).to have_received(:log).with( + Logger::INFO, + a_string_matching(/Holdout.*is not running/i) + ) + + # Restore original status + applicable_holdout['status'] = original_status + end + end + + describe 'when user is not bucketed into holdout' do + it 'should execute successfully with valid result structure' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdout = config_with_holdouts.holdouts.first + expect(holdout).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + # With real bucketer, we can't guarantee specific bucketing results + # but we can verify the method executes successfully + expect(result).not_to be_nil + expect(result).to be_an(Array) + end + end + + describe 'with user attributes for audience targeting' do + it 'should evaluate holdout with user attributes' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdout = config_with_holdouts.holdouts.first + expect(holdout).not_to be_nil + + user_attributes = { + 'browser' => 'chrome', + 'location' => 'us' + } + + user_context = project_with_holdouts.create_user_context('testUserId', user_attributes) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + user_attributes + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + + # With real bucketer, we can't guarantee specific variations + # but can verify execution completes successfully + end + end + + describe 'with multiple holdouts' do + it 'should handle multiple holdouts for a single feature flag' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + + # With real bucketer, we can't guarantee specific bucketing results + # but we can verify the method executes successfully + end + end + + describe 'with empty user ID' do + it 'should allow holdout bucketing with empty user ID' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Empty user ID should still be valid for bucketing + user_context = project_with_holdouts.create_user_context('', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + + # Empty user ID should not log error about invalid user ID + expect(spy_logger).not_to have_received(:log).with( + Logger::ERROR, + a_string_matching(/User ID.*(?:null|empty)/) + ) + end + end + + describe 'with decision reasons' do + it 'should populate decision reasons for holdouts' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + holdout = config_with_holdouts.holdouts.first + expect(holdout).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + + # With real bucketer, we expect proper decision reasons to be generated + # Find any decision with reasons + decision_with_reasons = result.find do |decision_result| + decision_result.reasons && !decision_result.reasons.empty? + end + + expect(decision_with_reasons.reasons).not_to be_empty if decision_with_reasons + end + end + end + + describe '#get_variation_for_feature with holdouts' do + describe 'when user is bucketed into holdout' do + it 'should return holdout decision before checking experiments or rollouts' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # The get_variation_for_feature method should check holdouts first + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context + ) + + expect(decision_result).not_to be_nil + + # Decision should be valid (from holdout, experiment, or rollout) + if decision_result.decision + expect(decision_result.decision).to respond_to(:experiment) + expect(decision_result.decision).to respond_to(:variation) + expect(decision_result.decision).to respond_to(:source) + end + end + end + + describe 'when holdout returns no decision' do + it 'should fall through to experiment and rollout evaluation' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Use a user ID that won't be bucketed into holdout + user_context = project_with_holdouts.create_user_context('non_holdout_user', {}) + + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context + ) + + # Should still get a valid decision result (even if decision is nil) + expect(decision_result).not_to be_nil + expect(decision_result).to respond_to(:decision) + expect(decision_result).to respond_to(:reasons) + end + end + + describe 'with decision options' do + it 'should respect decision options when evaluating holdouts' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # Test with INCLUDE_REASONS option + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context, + [Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS] + ) + + expect(decision_result).not_to be_nil + expect(decision_result.reasons).to be_an(Array) + end + end + end + + describe 'holdout priority and evaluation order' do + it 'should evaluate holdouts before experiments' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # Mock the get_variation_for_feature_experiment to track if it's called + allow(decision_service_with_holdouts).to receive(:get_variation_for_feature_experiment) + .and_call_original + + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context + ) + + expect(decision_result).not_to be_nil + + expect(decision_service_with_holdouts).not_to have_received(:get_variation_for_feature_experiment) if decision_result.decision && decision_result.decision.source == Optimizely::DecisionService::DECISION_SOURCES['HOLDOUT'] + end + + it 'should evaluate global holdouts for all flags' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Get global holdouts + global_holdouts = config_with_holdouts.holdouts.select do |h| + h['includedFlags'].nil? || h['includedFlags'].empty? + end + + unless global_holdouts.empty? + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + end + end + + it 'should respect included and excluded flags configuration' do + # Test that flags in excludedFlags are not affected by that holdout + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + + if feature_flag + # Get holdouts for this flag + holdouts_for_flag = config_with_holdouts.get_holdouts_for_flag(feature_flag['id']) + + # Should not include holdouts that exclude this flag + excluded_holdout = holdouts_for_flag.find { |h| h['key'] == 'excluded_holdout' } + expect(excluded_holdout).to be_nil + end + end + end + + describe 'holdout logging and error handling' do + it 'should log when holdout evaluation starts' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + # Verify that appropriate log messages are generated + # (specific messages depend on implementation) + expect(spy_logger).to have_received(:log).at_least(:once) + end + + it 'should handle missing holdout configuration gracefully' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Temporarily remove holdouts + original_holdouts = config_with_holdouts.instance_variable_get(:@holdouts) + config_with_holdouts.instance_variable_set(:@holdouts, []) + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + + # Restore original holdouts + config_with_holdouts.instance_variable_set(:@holdouts, original_holdouts) + end + + it 'should handle invalid holdout data gracefully' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # The method should handle invalid holdout data without crashing + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + end + end + + describe 'holdout bucketing behavior' do + it 'should use consistent bucketing for the same user' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_id = 'consistent_user' + user_context1 = project_with_holdouts.create_user_context(user_id, {}) + user_context2 = project_with_holdouts.create_user_context(user_id, {}) + + result1 = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context1, + {} + ) + + result2 = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context2, + {} + ) + + # Same user should get consistent results + expect(result1).not_to be_nil + expect(result2).not_to be_nil + + if !result1.empty? && !result2.empty? + # Compare the first decision from each result + decision1 = result1[0].decision + decision2 = result2[0].decision + + # If both have decisions, they should match + if decision1 && decision2 + expect(decision1.variation&.fetch('id', nil)).to eq(decision2.variation&.fetch('id', nil)) + expect(decision1.source).to eq(decision2.source) + end + end + end + + it 'should use bucketing ID when provided' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_attributes = { + Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BUCKETING_ID'] => 'custom_bucketing_id' + } + + user_context = project_with_holdouts.create_user_context('testUserId', user_attributes) + + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + user_attributes + ) + + expect(result).not_to be_nil + expect(result).to be_an(Array) + + # Bucketing should work with custom bucketing ID + end + + it 'should handle different traffic allocations' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + # Test with multiple users to see varying bucketing results + users = %w[user1 user2 user3 user4 user5] + results = [] + + users.each do |user_id| + user_context = project_with_holdouts.create_user_context(user_id, {}) + result = decision_service_with_holdouts.get_variations_for_feature_list( + config_with_holdouts, + [feature_flag], + user_context, + {} + ) + results << result + end + + # All results should be valid + results.each do |result| + expect(result).not_to be_nil + expect(result).to be_an(Array) + end + end + end + + describe 'holdout integration with feature experiments' do + it 'should check holdouts before feature experiments' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('testUserId', {}) + + # Mock feature experiment method to track calls + allow(decision_service_with_holdouts).to receive(:get_variation_for_feature_experiment) + .and_call_original + + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context + ) + + expect(decision_result).not_to be_nil + + # Holdout evaluation happens in get_variations_for_feature_list + # which is called before experiment evaluation + end + + it 'should fall back to experiments if no holdout decision' do + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_context = project_with_holdouts.create_user_context('non_holdout_user_123', {}) + + decision_result = decision_service_with_holdouts.get_variation_for_feature( + config_with_holdouts, + feature_flag, + user_context + ) + + # Should return a valid decision result + expect(decision_result).not_to be_nil + expect(decision_result).to respond_to(:decision) + expect(decision_result).to respond_to(:reasons) + end + end + end + + describe 'Holdout Impression Events' do + let(:spy_event_processor) { spy('event_processor') } + let(:config_with_holdouts) do + Optimizely::DatafileProjectConfig.new( + OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + spy_logger, + error_handler + ) + end + + let(:optimizely_with_mocked_events) do + Optimizely::Project.new( + datafile: OptimizelySpec::CONFIG_BODY_WITH_HOLDOUTS_JSON, + logger: spy_logger, + error_handler: error_handler, + event_processor: spy_event_processor + ) + end + + after(:example) do + optimizely_with_mocked_events&.close + end + + describe '#decide with holdout impression events' do + it 'should send impression event for holdout decision' do + # Use a specific user ID that will be bucketed into a holdout + # This is deterministic based on the bucketing algorithm + test_user_id = 'user_bucketed_into_holdout' + + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil, "Feature flag 'boolean_feature' should exist" + + user_attributes = {} + + allow(spy_event_processor).to receive(:process) + + user_context = optimizely_with_mocked_events.create_user_context(test_user_id, user_attributes) + decision = user_context.decide(feature_flag['key']) + + expect(decision).not_to be_nil, 'Decision should not be nil' + + actual_holdout = config_with_holdouts.holdouts&.find { |h| h['key'] == decision.rule_key } + + # Only continue if this is a holdout decision + if actual_holdout + expect(decision.flag_key).to eq(feature_flag['key']) + + holdout_variation = actual_holdout['variations'].find { |v| v['key'] == decision.variation_key } + + expect(holdout_variation).not_to be_nil, "Variation '#{decision.variation_key}' should be from the chosen holdout '#{actual_holdout['key']}'" + + expect(decision.enabled).to eq(holdout_variation['featureEnabled']), "Enabled flag should match holdout variation's featureEnabled value" + + expect(spy_event_processor).to have_received(:process) + .with(instance_of(Optimizely::ImpressionEvent)) + .at_least(:once) + + # Verify impression event contains correct holdout details + expect(spy_event_processor).to have_received(:process).with( + having_attributes( + user_id: test_user_id + ) + ).at_least(:once) + end + end + + it 'should not send impression event when DISABLE_DECISION_EVENT option is used' do + test_user_id = 'user_bucketed_into_holdout' + + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + expect(feature_flag).not_to be_nil + + user_attributes = {} + + allow(spy_event_processor).to receive(:process) + + user_context = optimizely_with_mocked_events.create_user_context(test_user_id, user_attributes) + decision = user_context.decide( + feature_flag['key'], + [Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT] + ) + + expect(decision).not_to be_nil, 'Decision should not be nil' + + chosen_holdout = config_with_holdouts.holdouts&.find { |h| h['key'] == decision.rule_key } + + if chosen_holdout + expect(decision.flag_key).to eq(feature_flag['key']) + + # Verify no impression event was sent + expect(spy_event_processor).not_to have_received(:process) + .with(instance_of(Optimizely::ImpressionEvent)) + end + end + end + + describe '#decide with holdout notification content' do + it 'should send correct notification content for holdout decision' do + captured_notifications = [] + + notification_callback = lambda do |_notification_type, _user_id, _user_attributes, decision_info| + captured_notifications << decision_info.dup + end + + optimizely_with_mocked_events.notification_center.add_notification_listener( + Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], + notification_callback + ) + + test_user_id = 'holdout_test_user' + feature_flag = config_with_holdouts.feature_flag_key_map['boolean_feature'] + holdout = config_with_holdouts.holdouts.first + expect(holdout).not_to be_nil, 'Should have at least one holdout configured' + + holdout_variation = holdout['variations'].first + expect(holdout_variation).not_to be_nil, 'Holdout should have at least one variation' + + # Mock the decision service to return a holdout decision + holdout_decision = Optimizely::DecisionService::Decision.new( + holdout, + holdout_variation, + Optimizely::DecisionService::DECISION_SOURCES['HOLDOUT'] + ) + + holdout_decision_result = Optimizely::DecisionService::DecisionResult.new( + holdout_decision, + false, + [] + ) + + # Mock get_variations_for_feature_list instead of get_variation_for_feature + allow(optimizely_with_mocked_events.decision_service).to receive(:get_variations_for_feature_list) + .and_return([holdout_decision_result]) + + user_context = optimizely_with_mocked_events.create_user_context(test_user_id, {}) + decision = user_context.decide(feature_flag['key']) + + expect(decision).not_to be_nil, 'Decision should not be nil' + expect(captured_notifications.length).to eq(1), 'Should have captured exactly one decision notification' + + notification = captured_notifications.first + rule_key = notification[:rule_key] + + expect(rule_key).to eq(holdout['key']), 'RuleKey should match holdout key' + + # Verify holdout notification structure + expect(notification).to have_key(:flag_key), 'Holdout notification should contain flag_key' + expect(notification).to have_key(:enabled), 'Holdout notification should contain enabled flag' + expect(notification).to have_key(:variation_key), 'Holdout notification should contain variation_key' + expect(notification).to have_key(:experiment_id), 'Holdout notification should contain experiment_id' + expect(notification).to have_key(:variation_id), 'Holdout notification should contain variation_id' + + flag_key = notification[:flag_key] + expect(flag_key).to eq('boolean_feature'), 'FlagKey should match the requested flag' + + experiment_id = notification[:experiment_id] + expect(experiment_id).to eq(holdout['id']), 'ExperimentId in notification should match holdout ID' + + variation_id = notification[:variation_id] + expect(variation_id).to eq(holdout_variation['id']), 'VariationId should match holdout variation ID' + + variation_key = notification[:variation_key] + expect(variation_key).to eq(holdout_variation['key']), 'VariationKey in notification should match holdout variation key' + + enabled = notification[:enabled] + expect(enabled).not_to be_nil, 'Enabled flag should be present in notification' + expect(enabled).to eq(holdout_variation['featureEnabled']), "Enabled flag should match holdout variation's featureEnabled value" + + expect(config_with_holdouts.feature_flag_key_map).to have_key(flag_key), "FlagKey '#{flag_key}' should exist in config" + + expect(notification).to have_key(:variables), 'Notification should contain variables' + expect(notification).to have_key(:reasons), 'Notification should contain reasons' + expect(notification).to have_key(:decision_event_dispatched), 'Notification should contain decision_event_dispatched' + end + end + end +end diff --git a/spec/decision_service_spec.rb b/spec/decision_service_spec.rb index 7646c032..eb70a9c9 100644 --- a/spec/decision_service_spec.rb +++ b/spec/decision_service_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2017-2020, Optimizely and contributors +# Copyright 2017-2020, 2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,9 +26,10 @@ let(:error_handler) { Optimizely::NoOpErrorHandler.new } let(:spy_logger) { spy('logger') } let(:spy_user_profile_service) { spy('user_profile_service') } + let(:spy_cmab_service) { spy('cmab_service') } let(:config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } - let(:decision_service) { Optimizely::DecisionService.new(spy_logger, spy_user_profile_service) } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:decision_service) { Optimizely::DecisionService.new(spy_logger, spy_cmab_service, spy_user_profile_service) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) } let(:user_context) { project_instance.create_user_context('some-user', {}) } after(:example) { project_instance.close } @@ -46,9 +47,9 @@ it 'should return the correct variation ID for a given user for whom a variation has been forced' do decision_service.set_forced_variation(config, 'test_experiment', 'test_user', 'variation') user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111129') - expect(reasons).to eq(["Variation 'variation' is mapped to experiment '111127' and user 'test_user' in the forced variation map"]) + variation_result = decision_service.get_variation(config, '111127', user_context) + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.reasons).to eq(["Variation 'variation' is mapped to experiment '111127' and user 'test_user' in the forced variation map"]) # Setting forced variation should short circuit whitelist check, bucketing and audience evaluation expect(decision_service).not_to have_received(:get_whitelisted_variation_id) expect(decision_service.bucketer).not_to have_received(:bucket) @@ -62,9 +63,9 @@ } decision_service.set_forced_variation(config, 'test_experiment_with_audience', 'test_user', 'control_with_audience') user_context = project_instance.create_user_context('test_user', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '122227', user_context) - expect(variation_received).to eq('122228') - expect(reasons).to eq(["Variation 'control_with_audience' is mapped to experiment '122227' and user 'test_user' in the forced variation map"]) + variation_result = decision_service.get_variation(config, '122227', user_context) + expect(variation_result.variation_id).to eq('122228') + expect(variation_result.reasons).to eq(["Variation 'control_with_audience' is mapped to experiment '122227' and user 'test_user' in the forced variation map"]) # Setting forced variation should short circuit whitelist check, bucketing and audience evaluation expect(decision_service).not_to have_received(:get_whitelisted_variation_id) expect(decision_service.bucketer).not_to have_received(:bucket) @@ -73,13 +74,14 @@ it 'should return the correct variation ID for a given user ID and key of a running experiment' do user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'control' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'test_user' is in variation 'control' of experiment '111127'.") @@ -90,12 +92,13 @@ it 'should return nil when user ID is not bucketed' do allow(decision_service.bucketer).to receive(:bucket).and_return(nil) user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in no variation." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq(nil) + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in no variation." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'test_user' is in no variation.") @@ -103,20 +106,20 @@ it 'should return correct variation ID if user ID is in whitelisted Variations and variation is valid' do user_context = project_instance.create_user_context('forced_user1') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'." - ]) + variation_result = decision_service.get_variation(config, '111127', user_context) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'.") user_context = project_instance.create_user_context('forced_user2') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111129') - expect(reasons).to eq([ - "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'." - ]) + variation_result = decision_service.get_variation(config, '111127', user_context) + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.reasons).to eq([ + "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'.") @@ -133,20 +136,20 @@ } user_context = project_instance.create_user_context('forced_user1', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'." - ]) + variation_result = decision_service.get_variation(config, '111127', user_context) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'forced_user1' is whitelisted into variation 'control' of experiment '111127'.") user_context = project_instance.create_user_context('forced_user2', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111129') - expect(reasons).to eq([ - "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'." - ]) + variation_result = decision_service.get_variation(config, '111127', user_context) + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.reasons).to eq([ + "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'forced_user2' is whitelisted into variation 'variation' of experiment '111127'.") @@ -159,11 +162,11 @@ it 'should return the correct variation ID for a user in a whitelisted variation (even when audience conditions do not match)' do user_attributes = {'browser_type' => 'wrong_browser'} user_context = project_instance.create_user_context('forced_audience_user', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '122227', user_context) - expect(variation_received).to eq('122229') - expect(reasons).to eq([ - "User 'forced_audience_user' is whitelisted into variation 'variation_with_audience' of experiment '122227'." - ]) + variation_result = decision_service.get_variation(config, '122227', user_context) + expect(variation_result.variation_id).to eq('122229') + expect(variation_result.reasons).to eq([ + "User 'forced_audience_user' is whitelisted into variation 'variation_with_audience' of experiment '122227'." + ]) expect(spy_logger).to have_received(:log) .once.with( Logger::INFO, @@ -178,9 +181,9 @@ it 'should return nil if the experiment key is invalid' do user_context = project_instance.create_user_context('test_user', {}) - variation_received, reasons = decision_service.get_variation(config, 'totally_invalid_experiment', user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([]) + variation_result = decision_service.get_variation(config, 'totally_invalid_experiment', user_context) + expect(variation_result.variation_id).to eq(nil) + expect(variation_result.reasons).to eq([]) expect(spy_logger).to have_received(:log) .once.with(Logger::ERROR, "Experiment id 'totally_invalid_experiment' is not in datafile.") @@ -189,14 +192,15 @@ it 'should return nil if the user does not meet the audience conditions for a given experiment' do user_attributes = {'browser_type' => 'chrome'} user_context = project_instance.create_user_context('test_user', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '122227', user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([ - "Starting to evaluate audience '11154' with conditions: [\"and\", [\"or\", [\"or\", {\"name\": \"browser_type\", \"type\": \"custom_attribute\", \"value\": \"firefox\"}]]].", - "Audience '11154' evaluated to FALSE.", - "Audiences for experiment 'test_experiment_with_audience' collectively evaluated to FALSE.", - "User 'test_user' does not meet the conditions to be in experiment 'test_experiment_with_audience'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + variation_result = decision_service.get_variation(config, '122227', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq(nil) + expect(variation_result.reasons).to eq([ + "Starting to evaluate audience '11154' with conditions: [\"and\", [\"or\", [\"or\", {\"name\": \"browser_type\", \"type\": \"custom_attribute\", \"value\": \"firefox\"}]]].", + "Audience '11154' evaluated to FALSE.", + "Audiences for experiment 'test_experiment_with_audience' collectively evaluated to FALSE.", + "User 'test_user' does not meet the conditions to be in experiment 'test_experiment_with_audience'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'test_user' does not meet the conditions to be in experiment 'test_experiment_with_audience'.") @@ -208,9 +212,9 @@ it 'should return nil if the given experiment is not running' do user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '100027', user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["Experiment 'test_experiment_not_started' is not running."]) + variation_result = decision_service.get_variation(config, '100027', user_context) + expect(variation_result.variation_id).to eq(nil) + expect(variation_result.reasons).to eq(["Experiment 'test_experiment_not_started' is not running."]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "Experiment 'test_experiment_not_started' is not running.") @@ -224,11 +228,11 @@ it 'should respect forced variations within mutually exclusive grouped experiments' do user_context = project_instance.create_user_context('forced_group_user1') - variation_received, reasons = decision_service.get_variation(config, '133332', user_context) - expect(variation_received).to eq('130004') - expect(reasons).to eq([ - "User 'forced_group_user1' is whitelisted into variation 'g1_e2_v2' of experiment '133332'." - ]) + variation_result = decision_service.get_variation(config, '133332', user_context) + expect(variation_result.variation_id).to eq('130004') + expect(variation_result.reasons).to eq([ + "User 'forced_group_user1' is whitelisted into variation 'g1_e2_v2' of experiment '133332'." + ]) expect(spy_logger).to have_received(:log) .once.with(Logger::INFO, "User 'forced_group_user1' is whitelisted into variation 'g1_e2_v2' of experiment '133332'.") @@ -240,13 +244,14 @@ it 'should bucket normally if user is whitelisted into a forced variation that is not in the datafile' do user_context = project_instance.create_user_context('forced_user_with_invalid_variation') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "User 'forced_user_with_invalid_variation' is whitelisted into variation 'invalid_variation', which is not in the datafile.", - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'forced_user_with_invalid_variation' is in variation 'control' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "User 'forced_user_with_invalid_variation' is whitelisted into variation 'invalid_variation', which is not in the datafile.", + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'forced_user_with_invalid_variation' is in variation 'control' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log) .once.with( Logger::INFO, @@ -259,65 +264,25 @@ end describe 'when a UserProfile service is provided' do - it 'should look up the UserProfile, bucket normally, and save the result if no saved profile is found' do - expected_user_profile = { - user_id: 'test_user', - experiment_bucket_map: { - '111127' => { - variation_id: '111128' - } - } - } - expect(spy_user_profile_service).to receive(:lookup).once.and_return(nil) - - user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) - - # bucketing should have occurred - expect(decision_service.bucketer).to have_received(:bucket).once - # bucketing decision should have been saved - expect(spy_user_profile_service).to have_received(:save).once.with(expected_user_profile) - expect(spy_logger).to have_received(:log).once - .with(Logger::INFO, "Saved variation ID 111128 of experiment ID 111127 for user 'test_user'.") - end - - it 'should look up the UserProfile, bucket normally (using Bucketing ID attribute), and save the result if no saved profile is found' do - expected_user_profile = { - user_id: 'test_user', - experiment_bucket_map: { - '111127' => { - variation_id: '111129' - } - } - } + it 'bucket normally (using Bucketing ID attribute)' do user_attributes = { 'browser_type' => 'firefox', Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BUCKETING_ID'] => 'pid' } - expect(spy_user_profile_service).to receive(:lookup).once.and_return(nil) - user_context = project_instance.create_user_context('test_user', user_attributes) - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111129') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'variation' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'variation' of experiment '111127'." + ]) # bucketing should have occurred expect(decision_service.bucketer).to have_received(:bucket).once - # bucketing decision should have been saved - expect(spy_user_profile_service).to have_received(:save).once.with(expected_user_profile) - expect(spy_logger).to have_received(:log).once - .with(Logger::INFO, "Saved variation ID 111129 of experiment ID 111127 for user 'test_user'.") end - it 'should look up the user profile and skip normal bucketing if a profile with a saved decision is found' do + it 'skip normal bucketing if a profile with a saved decision is found' do saved_user_profile = { user_id: 'test_user', experiment_bucket_map: { @@ -330,11 +295,13 @@ .with('test_user').once.and_return(saved_user_profile) user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111129') - expect(reasons).to eq([ - "Returning previously activated variation ID 111129 of experiment 'test_experiment' for user 'test_user' from user profile." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + user_profile_tracker.load_user_profile + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.reasons).to eq([ + "Returning previously activated variation ID 111129 of experiment 'test_experiment' for user 'test_user' from user profile." + ]) expect(spy_logger).to have_received(:log).once .with(Logger::INFO, "Returning previously activated variation ID 111129 of experiment 'test_experiment' for user 'test_user' from user profile.") @@ -346,7 +313,7 @@ expect(spy_user_profile_service).not_to have_received(:save) end - it 'should look up the user profile and bucket normally if a profile without a saved decision is found' do + it 'bucket normally if a profile without a saved decision is found' do saved_user_profile = { user_id: 'test_user', experiment_bucket_map: { @@ -360,29 +327,17 @@ .once.with('test_user').and_return(saved_user_profile) user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + user_profile_tracker.load_user_profile + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'control' of experiment '111127'." + ]) # bucketing should have occurred expect(decision_service.bucketer).to have_received(:bucket).once - - # user profile should have been updated with bucketing decision - expected_user_profile = { - user_id: 'test_user', - experiment_bucket_map: { - '111127' => { - variation_id: '111128' - }, - '122227' => { - variation_id: '122228' - } - } - } - expect(spy_user_profile_service).to have_received(:save).once.with(expected_user_profile) end it 'should bucket normally if the user profile contains a variation ID not in the datafile' do @@ -399,40 +354,33 @@ .once.with('test_user').and_return(saved_user_profile) user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "User 'test_user' was previously bucketed into variation ID '111111' for experiment '111127', but no matching variation was found. Re-bucketing user.", - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + user_profile_tracker.load_user_profile + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "User 'test_user' was previously bucketed into variation ID '111111' for experiment '111127', but no matching variation was found. Re-bucketing user.", + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'control' of experiment '111127'." + ]) # bucketing should have occurred expect(decision_service.bucketer).to have_received(:bucket).once - - # user profile should have been updated with bucketing decision - expected_user_profile = { - user_id: 'test_user', - experiment_bucket_map: { - '111127' => { - variation_id: '111128' - } - } - } - expect(spy_user_profile_service).to have_received(:save).with(expected_user_profile) end - it 'should bucket normally if the user profile service throws an error during lookup' do + it 'should bucket normally if the user profile tracker throws an error during lookup' do expect(spy_user_profile_service).to receive(:lookup).once.with('test_user').and_throw(:LookupError) user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Error while looking up user profile for user ID 'test_user': uncaught throw :LookupError.", - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + user_profile_tracker.load_user_profile + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + user_profile_tracker.save_user_profile + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'control' of experiment '111127'." + ]) expect(spy_logger).to have_received(:log).once .with(Logger::ERROR, "Error while looking up user profile for user ID 'test_user': uncaught throw :LookupError.") @@ -440,56 +388,23 @@ expect(decision_service.bucketer).to have_received(:bucket).once end - it 'should log an error if the user profile service throws an error during save' do - expect(spy_user_profile_service).to receive(:save).once.and_throw(:SaveError) - - user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) - - expect(spy_logger).to have_received(:log).once - .with(Logger::ERROR, "Error while saving user profile for user ID 'test_user': uncaught throw :SaveError.") - end - describe 'IGNORE_USER_PROFILE_SERVICE decide option' do it 'should ignore user profile service if this option is set' do allow(spy_user_profile_service).to receive(:lookup) .with('test_user').once.and_return(nil) user_context = project_instance.create_user_context('test_user', nil) - variation_received, reasons = decision_service.get_variation(config, '111127', user_context, [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) - - expect(decision_service.bucketer).to have_received(:bucket) - expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?) - expect(spy_user_profile_service).not_to have_received(:lookup) - expect(spy_user_profile_service).not_to have_received(:save) - end - - it 'should not ignore user profile service if this option is not set' do - allow(spy_user_profile_service).to receive(:lookup) - .with('test_user').once.and_return(nil) - - user_context = project_instance.create_user_context('test_user') - variation_received, reasons = decision_service.get_variation(config, '111127', user_context) - expect(variation_received).to eq('111128') - expect(reasons).to eq([ - "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", - "User 'test_user' is in variation 'control' of experiment '111127'." - ]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + user_profile_tracker.load_user_profile + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker, [Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE]) + expect(variation_result.variation_id).to eq('111128') + expect(variation_result.reasons).to eq([ + "Audiences for experiment 'test_experiment' collectively evaluated to TRUE.", + "User 'test_user' is in variation 'control' of experiment '111127'." + ]) expect(decision_service.bucketer).to have_received(:bucket) expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?) - expect(spy_user_profile_service).to have_received(:lookup) - expect(spy_user_profile_service).to have_received(:save) end end end @@ -497,15 +412,15 @@ describe '#get_variation_for_feature_experiment' do config_body_json = OptimizelySpec::VALID_CONFIG_BODY_JSON - project_instance = Optimizely::Project.new(config_body_json, nil, nil, nil) + project_instance = Optimizely::Project.new(datafile: config_body_json) user_context = project_instance.create_user_context('user_1', {}) - + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) describe 'when the feature flag\'s experiment ids array is empty' do it 'should return nil and log a message' do feature_flag = config.feature_flag_key_map['empty_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["The feature flag 'empty_feature' is not used in any experiments."]) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["The feature flag 'empty_feature' is not used in any experiments."]) expect(spy_logger).to have_received(:log).once .with(Logger::DEBUG, "The feature flag 'empty_feature' is not used in any experiments.") @@ -517,30 +432,31 @@ feature_flag = config.feature_flag_key_map['boolean_feature'].dup # any string that is not an experiment id in the data file feature_flag['experimentIds'] = ['1333333337'] - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["Feature flag experiment with ID '1333333337' is not in the datafile."]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["Feature flag experiment with ID '1333333337' is not in the datafile."]) expect(spy_logger).to have_received(:log).once .with(Logger::DEBUG, "Feature flag experiment with ID '1333333337' is not in the datafile.") end end describe 'when the feature flag is associated with a non-mutex experiment' do + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) describe 'and the user is not bucketed into the feature flag\'s experiments' do before(:each) do multivariate_experiment = config.experiment_key_map['test_experiment_multivariate'] - # make sure the user is not bucketed into the feature experiment allow(decision_service).to receive(:get_variation) - .with(config, multivariate_experiment['id'], user_context, []) - .and_return([nil, nil]) + .with(config, multivariate_experiment['id'], user_context, user_profile_tracker, []) + .and_return(Optimizely::DecisionService::VariationResult.new(nil, false, [], nil)) end it 'should return nil and log a message' do feature_flag = config.feature_flag_key_map['multi_variate_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, []) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["The user 'user_1' is not bucketed into any of the experiments on the feature 'multi_variate_feature'."]) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker, []) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["The user 'user_1' is not bucketed into any of the experiments on the feature 'multi_variate_feature'."]) expect(spy_logger).to have_received(:log).once .with(Logger::INFO, "The user 'user_1' is not bucketed into any of the experiments on the feature 'multi_variate_feature'.") @@ -550,7 +466,7 @@ describe 'and the user is bucketed into a variation for the experiment on the feature flag' do before(:each) do # mock and return the first variation of the `test_experiment_multivariate` experiment, which is attached to the `multi_variate_feature` - allow(decision_service).to receive(:get_variation).and_return('122231') + allow(decision_service).to receive(:get_variation).and_return(Optimizely::DecisionService::VariationResult.new(nil, false, [], '122231')) end it 'should return the variation' do @@ -560,10 +476,15 @@ config.variation_id_map['test_experiment_multivariate']['122231'], Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context) - expect(variation_received).to eq(expected_decision) - expect(reasons).to eq([]) + expected_decision_result = Optimizely::DecisionService::DecisionResult.new( + expected_decision, + false, + [] + ) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker) + expect(decision_result).to eq(expected_decision_result) + expect(decision_result.reasons).to eq([]) end end end @@ -581,34 +502,36 @@ Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) allow(decision_service).to receive(:get_variation) - .and_return(variation['id']) + .and_return(Optimizely::DecisionService::VariationResult.new(nil, false, [], variation['id'])) end it 'should return the variation the user is bucketed into' do feature_flag = config.feature_flag_key_map['mutex_group_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context) - expect(variation_received).to eq(expected_decision) - expect(reasons).to eq([]) + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq([]) end end describe 'and the user is not bucketed into any of the mutex experiments' do + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id) before(:each) do mutex_exp = config.experiment_key_map['group1_exp1'] mutex_exp2 = config.experiment_key_map['group1_exp2'] allow(decision_service).to receive(:get_variation) - .with(config, mutex_exp['id'], user_context, []) - .and_return([nil, nil]) + .with(config, mutex_exp['id'], user_context, user_profile_tracker, []) + .and_return(Optimizely::DecisionService::VariationResult.new(nil, false, [], nil)) allow(decision_service).to receive(:get_variation) - .with(config, mutex_exp2['id'], user_context, []) - .and_return([nil, nil]) + .with(config, mutex_exp2['id'], user_context, user_profile_tracker, []) + .and_return(Optimizely::DecisionService::VariationResult.new(nil, false, [], nil)) end it 'should return nil and log a message' do feature_flag = config.feature_flag_key_map['mutex_group_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["The user 'user_1' is not bucketed into any of the experiments on the feature 'mutex_group_feature'."]) + decision_result = decision_service.get_variation_for_feature_experiment(config, feature_flag, user_context, user_profile_tracker) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["The user 'user_1' is not bucketed into any of the experiments on the feature 'mutex_group_feature'."]) expect(spy_logger).to have_received(:log).once .with(Logger::INFO, "The user 'user_1' is not bucketed into any of the experiments on the feature 'mutex_group_feature'.") @@ -619,16 +542,16 @@ describe '#get_variation_for_feature_rollout' do config_body_json = OptimizelySpec::VALID_CONFIG_BODY_JSON - project_instance = Optimizely::Project.new(config_body_json, nil, nil, nil) + project_instance = Optimizely::Project.new(datafile: config_body_json) user_context = project_instance.create_user_context('user_1', {}) user_id = 'user_1' describe 'when the feature flag is not associated with a rollout' do it 'should log a message and return nil' do feature_flag = config.feature_flag_key_map['boolean_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["Feature flag '#{feature_flag['key']}' is not used in a rollout."]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["Feature flag '#{feature_flag['key']}' is not used in a rollout."]) expect(spy_logger).to have_received(:log).once .with(Logger::DEBUG, "Feature flag '#{feature_flag['key']}' is not used in a rollout.") end @@ -638,9 +561,9 @@ it 'should log a message and return nil' do feature_flag = config.feature_flag_key_map['boolean_feature'].dup feature_flag['rolloutId'] = 'invalid_rollout_id' - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq(["Rollout with ID 'invalid_rollout_id' is not in the datafile 'boolean_feature'"]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq(["Rollout with ID 'invalid_rollout_id' is not in the datafile 'boolean_feature'"]) expect(spy_logger).to have_received(:log).once .with(Logger::ERROR, "Rollout with ID 'invalid_rollout_id' is not in the datafile.") @@ -653,9 +576,9 @@ experimentless_rollout['experiments'] = [] allow(config).to receive(:get_rollout_from_id).and_return(experimentless_rollout) feature_flag = config.feature_flag_key_map['boolean_single_variable_feature'] - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq([]) end end @@ -670,10 +593,10 @@ allow(decision_service.bucketer).to receive(:bucket) .with(config, rollout_experiment, user_id, user_id) .and_return(variation) - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(expected_decision) - expect(reasons).to eq(["User 'user_1' meets the audience conditions for targeting rule '1'.", - "User 'user_1' is in the traffic group of targeting rule '1'."]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq(["User 'user_1' meets the audience conditions for targeting rule '1'.", + "User 'user_1' is in the traffic group of targeting rule '1'."]) end end @@ -692,13 +615,13 @@ .with(config, everyone_else_experiment, user_id, user_id) .and_return(nil) - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([ - "User 'user_1' meets the audience conditions for targeting rule '1'.", - "User 'user_1' is not in the traffic group for targeting rule '1'.", - "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'." - ]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq([ + "User 'user_1' meets the audience conditions for targeting rule '1'.", + "User 'user_1' is not in the traffic group for targeting rule '1'.", + "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'." + ]) # make sure we only checked the audience for the first rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once @@ -723,14 +646,14 @@ .with(config, everyone_else_experiment, user_id, user_id) .and_return(variation) - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(expected_decision) - expect(reasons).to eq([ - "User 'user_1' meets the audience conditions for targeting rule '1'.", - "User 'user_1' is not in the traffic group for targeting rule '1'.", - "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'.", - "User 'user_1' is in the traffic group of targeting rule 'Everyone Else'." - ]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq([ + "User 'user_1' meets the audience conditions for targeting rule '1'.", + "User 'user_1' is not in the traffic group for targeting rule '1'.", + "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'.", + "User 'user_1' is in the traffic group of targeting rule 'Everyone Else'." + ]) # make sure we only checked the audience for the first rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once @@ -758,14 +681,14 @@ .with(config, everyone_else_experiment, user_id, user_id) .and_return(variation) - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(expected_decision) - expect(reasons).to eq([ - "User 'user_1' does not meet the conditions for targeting rule '1'.", - "User 'user_1' does not meet the conditions for targeting rule '2'.", - "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'.", - "User 'user_1' is in the traffic group of targeting rule 'Everyone Else'." - ]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq([ + "User 'user_1' does not meet the conditions for targeting rule '1'.", + "User 'user_1' does not meet the conditions for targeting rule '2'.", + "User 'user_1' meets the audience conditions for targeting rule 'Everyone Else'.", + "User 'user_1' is in the traffic group of targeting rule 'Everyone Else'." + ]) # verify we tried to bucket in all targeting rules and the everyone else rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).exactly(3).times @@ -788,13 +711,13 @@ expect(decision_service.bucketer).not_to receive(:bucket) .with(config, everyone_else_experiment, user_id, user_id) - variation_received, reasons = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) - expect(variation_received).to eq(nil) - expect(reasons).to eq([ - "User 'user_1' does not meet the conditions for targeting rule '1'.", - "User 'user_1' does not meet the conditions for targeting rule '2'.", - "User 'user_1' does not meet the conditions for targeting rule 'Everyone Else'." - ]) + decision_result = decision_service.get_variation_for_feature_rollout(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq([ + "User 'user_1' does not meet the conditions for targeting rule '1'.", + "User 'user_1' does not meet the conditions for targeting rule '2'.", + "User 'user_1' does not meet the conditions for targeting rule 'Everyone Else'." + ]) # verify we tried to bucket in all targeting rules and the everyone else rule expect(Optimizely::Audience).to have_received(:user_meets_audience_conditions?).once @@ -816,7 +739,7 @@ describe '#get_variation_for_feature' do config_body_json = OptimizelySpec::VALID_CONFIG_BODY_JSON - project_instance = Optimizely::Project.new(config_body_json, nil, nil, nil) + project_instance = Optimizely::Project.new(datafile: config_body_json) user_context = project_instance.create_user_context('user_1', {}) describe 'when the user is bucketed into the feature experiment' do @@ -828,11 +751,11 @@ 'experiment' => expected_experiment, 'variation' => expected_variation } - allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return([expected_decision, nil]) + allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return(Optimizely::DecisionService::DecisionResult.new(expected_decision, false, [])) - decision_received, reasons = decision_service.get_variation_for_feature(config, feature_flag, user_context) - expect(decision_received).to eq(expected_decision) - expect(reasons).to eq([]) + decision_result = decision_service.get_variation_for_feature(config, feature_flag, user_context) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq([]) end end @@ -847,24 +770,24 @@ variation, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return([nil, nil]) - allow(decision_service).to receive(:get_variation_for_feature_rollout).and_return([expected_decision, nil]) + allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) + allow(decision_service).to receive(:get_variation_for_feature_rollout).and_return(Optimizely::DecisionService::DecisionResult.new(expected_decision, false, [])) - decision_received, reasons = decision_service.get_variation_for_feature(config, feature_flag, user_context) - expect(decision_received).to eq(expected_decision) - expect(reasons).to eq([]) + decision_result = decision_service.get_variation_for_feature(config, feature_flag, user_context) + expect(decision_result.decision).to eq(expected_decision) + expect(decision_result.reasons).to eq(["The user 'user_1' is bucketed into a rollout for feature flag 'string_single_variable_feature'."]) end end describe 'and the user is not bucketed into the feature rollout' do it 'should log a message and return nil' do feature_flag = config.feature_flag_key_map['string_single_variable_feature'] - allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return([nil, nil]) - allow(decision_service).to receive(:get_variation_for_feature_rollout).and_return([nil, nil]) + allow(decision_service).to receive(:get_variation_for_feature_experiment).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) + allow(decision_service).to receive(:get_variation_for_feature_rollout).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) - decision_received, reasons = decision_service.get_variation_for_feature(config, feature_flag, user_context) - expect(decision_received).to eq(nil) - expect(reasons).to eq([]) + decision_result = decision_service.get_variation_for_feature(config, feature_flag, user_context) + expect(decision_result.decision).to eq(nil) + expect(decision_result.reasons).to eq([]) end end end @@ -1014,4 +937,330 @@ expect(reasons).to eq(["Variation 'control' is mapped to experiment '111127' and user 'test_user_2' in the forced variation map"]) end end + describe 'CMAB experiments' do + describe 'when user is in traffic allocation' do + it 'should return correct variation and CMAB UUID from CMAB service' do + # Create a CMAB experiment configuration + cmab_experiment = { + 'id' => '111150', + 'key' => 'cmab_experiment', + 'status' => 'Running', + 'layerId' => '111150', + 'audienceIds' => [], + 'forcedVariations' => {}, + 'variations' => [ + {'id' => '111151', 'key' => 'variation_1'}, + {'id' => '111152', 'key' => 'variation_2'} + ], + 'trafficAllocation' => [ + {'entityId' => '111151', 'endOfRange' => 5000}, + {'entityId' => '111152', 'endOfRange' => 10_000} + ], + 'cmab' => {'trafficAllocation' => 5000} + } + user_context = project_instance.create_user_context('test_user', {}) + + # Mock experiment lookup to return our CMAB experiment + allow(config).to receive(:get_experiment_from_id).with('111150').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Mock audience evaluation to pass + allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return([true, []]) + + # Mock bucketer to return a valid entity ID (user is in traffic allocation) + allow(decision_service.bucketer).to receive(:bucket_to_entity_id) + .with(config, cmab_experiment, 'test_user', 'test_user') + .and_return(['$', []]) + + # Mock CMAB service to return a decision + allow(spy_cmab_service).to receive(:get_decision) + .with(config, user_context, '111150', []) + .and_return(Optimizely::CmabDecision.new(variation_id: '111151', cmab_uuid: 'test-cmab-uuid-123')) + + # Mock variation lookup + allow(config).to receive(:get_variation_from_id_by_experiment_id) + .with('111150', '111151') + .and_return({'id' => '111151', 'key' => 'variation_1'}) + + variation_result = decision_service.get_variation(config, '111150', user_context) + + expect(variation_result.variation_id).to eq('111151') + expect(variation_result.cmab_uuid).to eq('test-cmab-uuid-123') + expect(variation_result.error).to eq(false) + expect(variation_result.reasons).to include( + "User 'test_user' is in variation 'variation_1' of experiment '111150'." + ) + + # Verify CMAB service was called + expect(spy_cmab_service).to have_received(:get_decision).once + end + end + + describe 'when user is not in traffic allocation' do + it 'should return nil variation and log traffic allocation message' do + cmab_experiment = { + 'id' => '111150', + 'key' => 'cmab_experiment', + 'status' => 'Running', + 'layerId' => '111150', + 'audienceIds' => [], + 'forcedVariations' => {}, + 'variations' => [ + {'id' => '111151', 'key' => 'variation_1'} + ], + 'trafficAllocation' => [ + {'entityId' => '111151', 'endOfRange' => 10_000} + ], + 'cmab' => {'trafficAllocation' => 1000} + } + user_context = project_instance.create_user_context('test_user', {}) + + # Mock experiment lookup to return our CMAB experiment + allow(config).to receive(:get_experiment_from_id).with('111150').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Mock audience evaluation to pass + allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return([true, []]) + + variation_result = decision_service.get_variation(config, '111150', user_context) + + expect(variation_result.variation_id).to eq(nil) + expect(variation_result.cmab_uuid).to eq(nil) + expect(variation_result.error).to eq(false) + expect(variation_result.reasons).to include( + 'User "test_user" not in CMAB experiment "cmab_experiment" due to traffic allocation.' + ) + + # Verify CMAB service was not called since user is not in traffic allocation + expect(spy_cmab_service).not_to have_received(:get_decision) + end + end + + describe 'when CMAB service returns an error' do + it 'should return nil variation and include error in reasons' do + cmab_experiment = { + 'id' => '111150', + 'key' => 'cmab_experiment', + 'status' => 'Running', + 'layerId' => '111150', + 'audienceIds' => [], + 'forcedVariations' => {}, + 'variations' => [ + {'id' => '111151', 'key' => 'variation_1'} + ], + 'trafficAllocation' => [ + {'entityId' => '111151', 'endOfRange' => 10_000} + ], + 'cmab' => {'trafficAllocation' => 5000} + } + user_context = project_instance.create_user_context('test_user', {}) + + # Mock experiment lookup to return our CMAB experiment + allow(config).to receive(:get_experiment_from_id).with('111150').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Mock audience evaluation to pass + allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return([true, []]) + + # Mock bucketer to return a valid entity ID (user is in traffic allocation) + allow(decision_service.bucketer).to receive(:bucket_to_entity_id) + .with(config, cmab_experiment, 'test_user', 'test_user') + .and_return(['$', []]) + + # Mock CMAB service to return an error + allow(spy_cmab_service).to receive(:get_decision) + .with(config, user_context, '111150', []) + .and_raise(StandardError.new('CMAB service error')) + + variation_result = decision_service.get_variation(config, '111150', user_context) + + expect(variation_result.variation_id).to be_nil + expect(variation_result.cmab_uuid).to be_nil + expect(variation_result.error).to eq(true) + expect(variation_result.reasons).to include( + 'Failed to fetch CMAB data for experiment cmab_experiment.' + ) + + # Verify CMAB service was called but errored + expect(spy_cmab_service).to have_received(:get_decision).once + end + end + + describe 'when user has forced variation' do + it 'should return forced variation and skip CMAB service call' do + # Use a real experiment from the datafile and modify it to be a CMAB experiment + real_experiment = config.get_experiment_from_key('test_experiment') + cmab_experiment = real_experiment.dup + cmab_experiment['cmab'] = {'trafficAllocation' => 5000} + + user_context = project_instance.create_user_context('test_user', {}) + + # Set up forced variation first (using real experiment that exists in datafile) + decision_service.set_forced_variation(config, 'test_experiment', 'test_user', 'variation') + + # Mock the experiment to be a CMAB experiment after setting forced variation + allow(config).to receive(:get_experiment_from_id).with('111127').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Add spy for bucket_to_entity_id method + allow(decision_service.bucketer).to receive(:bucket_to_entity_id).and_call_original + + variation_result = decision_service.get_variation(config, '111127', user_context) + + expect(variation_result.variation_id).to eq('111129') + expect(variation_result.cmab_uuid).to be_nil + expect(variation_result.error).to eq(false) + expect(variation_result.reasons).to include( + "Variation 'variation' is mapped to experiment '111127' and user 'test_user' in the forced variation map" + ) + + # Verify CMAB service was not called since user has forced variation + expect(spy_cmab_service).not_to have_received(:get_decision) + # Verify bucketer was not called since forced variations short-circuit bucketing + expect(decision_service.bucketer).not_to have_received(:bucket_to_entity_id) + end + end + + describe 'when user has whitelisted variation' do + it 'should return whitelisted variation and skip CMAB service call' do + # Create a CMAB experiment with whitelisted users + cmab_experiment = { + 'id' => '111150', + 'key' => 'cmab_experiment', + 'status' => 'Running', + 'layerId' => '111150', + 'audienceIds' => [], + 'forcedVariations' => { + 'whitelisted_user' => '111151' # User is whitelisted to variation_1 + }, + 'variations' => [ + {'id' => '111151', 'key' => 'variation_1'}, + {'id' => '111152', 'key' => 'variation_2'} + ], + 'trafficAllocation' => [ + {'entityId' => '111151', 'endOfRange' => 5000}, + {'entityId' => '111152', 'endOfRange' => 10_000} + ], + 'cmab' => {'trafficAllocation' => 5000} + } + user_context = project_instance.create_user_context('whitelisted_user', {}) + + # Mock experiment lookup to return our CMAB experiment + allow(config).to receive(:get_experiment_from_id).with('111150').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Mock the get_whitelisted_variation_id method directly + allow(decision_service).to receive(:get_whitelisted_variation_id) + .with(config, '111150', 'whitelisted_user') + .and_return(['111151', "User 'whitelisted_user' is whitelisted into variation 'variation_1' of experiment '111150'."]) + + variation_result = decision_service.get_variation(config, '111150', user_context) + + expect(variation_result.variation_id).to eq('111151') + expect(variation_result.cmab_uuid).to be_nil + expect(variation_result.error).to eq(false) + expect(variation_result.reasons).to include( + "User 'whitelisted_user' is whitelisted into variation 'variation_1' of experiment '111150'." + ) + # Verify CMAB service was not called since user is whitelisted + expect(spy_cmab_service).not_to have_received(:get_decision) + end + end + + describe 'user profile service behavior' do + it 'should not save user profile for CMAB experiments' do + # Create a CMAB experiment configuration + cmab_experiment = { + 'id' => '111150', + 'key' => 'cmab_experiment', + 'status' => 'Running', + 'layerId' => '111150', + 'audienceIds' => [], + 'forcedVariations' => {}, + 'variations' => [ + {'id' => '111151', 'key' => 'variation_1'}, + {'id' => '111152', 'key' => 'variation_2'} + ], + 'trafficAllocation' => [ + {'entityId' => '111151', 'endOfRange' => 5000}, + {'entityId' => '111152', 'endOfRange' => 10_000} + ], + 'cmab' => {'trafficAllocation' => 5000} + } + user_context = project_instance.create_user_context('test_user', {}) + + # Create a user profile tracker + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + + # Mock experiment lookup to return our CMAB experiment + allow(config).to receive(:get_experiment_from_id).with('111150').and_return(cmab_experiment) + allow(config).to receive(:experiment_running?).with(cmab_experiment).and_return(true) + + # Mock audience evaluation to pass + allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return([true, []]) + + # Mock bucketer to return a valid entity ID (user is in traffic allocation) + allow(decision_service.bucketer).to receive(:bucket_to_entity_id) + .with(config, cmab_experiment, 'test_user', 'test_user') + .and_return(['$', []]) + + # Mock CMAB service to return a decision + allow(spy_cmab_service).to receive(:get_decision) + .with(config, user_context, '111150', []) + .and_return(Optimizely::CmabDecision.new(variation_id: '111151', cmab_uuid: 'test-cmab-uuid-123')) + + # Mock variation lookup + allow(config).to receive(:get_variation_from_id_by_experiment_id) + .with('111150', '111151') + .and_return({'id' => '111151', 'key' => 'variation_1'}) + + # Spy on update_user_profile method + allow(user_profile_tracker).to receive(:update_user_profile).and_call_original + + # Call get_variation with the CMAB experiment and user profile tracker + variation_result = decision_service.get_variation(config, '111150', user_context, user_profile_tracker) + + # Verify the variation and cmab_uuid are returned + expect(variation_result.variation_id).to eq('111151') + expect(variation_result.cmab_uuid).to eq('test-cmab-uuid-123') + + # Verify user profile was NOT updated for CMAB experiment + expect(user_profile_tracker).not_to have_received(:update_user_profile) + + # Verify debug log was called to explain CMAB exclusion + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, + "Skipping user profile service for CMAB experiment 'cmab_experiment'. CMAB decisions are dynamic and not stored for sticky bucketing." + ) + end + + it 'should save user profile for standard (non-CMAB) experiments' do + # Use a standard (non-CMAB) experiment + config.get_experiment_from_key('test_experiment') + user_context = project_instance.create_user_context('test_user', {}) + + # Create a user profile tracker + user_profile_tracker = Optimizely::UserProfileTracker.new(user_context.user_id, spy_user_profile_service, spy_logger) + + # Mock audience evaluation to pass + allow(Optimizely::Audience).to receive(:user_meets_audience_conditions?).and_return([true, []]) + + # Mock bucketer to return a variation + allow(decision_service.bucketer).to receive(:bucket) + .and_return([{'id' => '111129', 'key' => 'variation'}, []]) + + # Spy on update_user_profile method + allow(user_profile_tracker).to receive(:update_user_profile).and_call_original + + # Call get_variation with standard experiment and user profile tracker + variation_result = decision_service.get_variation(config, '111127', user_context, user_profile_tracker) + + # Verify variation was returned + expect(variation_result.variation_id).to eq('111129') + + # Verify user profile WAS updated for standard experiment + expect(user_profile_tracker).to have_received(:update_user_profile).with('111127', '111129') + end + end + end end diff --git a/spec/event/batch_event_processor_spec.rb b/spec/event/batch_event_processor_spec.rb index 14e0d01c..604784d6 100644 --- a/spec/event/batch_event_processor_spec.rb +++ b/spec/event/batch_event_processor_spec.rb @@ -293,9 +293,11 @@ @event_processor.flush # Wait until other thread has processed the event. sleep 0.1 until @event_processor.current_batch.empty? + sleep 0.7 # Wait for retries to complete (200ms + 400ms + processing time) expect(@notification_center).not_to have_received(:send_notifications) - expect(spy_logger).to have_received(:log).once.with( + # With retries, error will be logged 3 times (once per attempt) + expect(spy_logger).to have_received(:log).exactly(3).times.with( Logger::ERROR, "Error dispatching event: #{log_event} Timeout::Error." ) @@ -377,4 +379,93 @@ expect(@event_processor.event_queue.length).to eq(0) expect(spy_logger).to have_received(:log).with(Logger::WARN, 'Executor shutdown, not accepting tasks.').once end + + context 'retry logic with exponential backoff' do + it 'should retry on dispatch errors with exponential backoff' do + @event_processor = Optimizely::BatchEventProcessor.new( + event_dispatcher: @event_dispatcher, + batch_size: 1, + flush_interval: 10_000, + logger: spy_logger + ) + + user_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, nil) + log_event = Optimizely::EventFactory.create_log_event(user_event, spy_logger) + + # Simulate dispatch failure twice, then success + call_count = 0 + allow(@event_dispatcher).to receive(:dispatch_event) do + call_count += 1 + raise StandardError, 'Network error' if call_count < 3 + end + + start_time = Time.now + @event_processor.process(user_event) + + # Wait for processing to complete + sleep 0.1 until @event_processor.event_queue.empty? + sleep 0.7 # Wait for retries to complete (200ms + 400ms + processing time) + + elapsed_time = Time.now - start_time + + # Should make 3 attempts total (1 initial + 2 retries) + expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).exactly(3).times + + # Should have delays: 200ms + 400ms = 600ms minimum + expect(elapsed_time).to be >= 0.6 + + # Should log retry attempts + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, /Retrying event dispatch/ + ).at_least(:twice) + end + + it 'should give up after max retries' do + @event_processor = Optimizely::BatchEventProcessor.new( + event_dispatcher: @event_dispatcher, + batch_size: 1, + flush_interval: 10_000, + logger: spy_logger + ) + + user_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, nil) + log_event = Optimizely::EventFactory.create_log_event(user_event, spy_logger) + + # Simulate dispatch failure every time + allow(@event_dispatcher).to receive(:dispatch_event).and_raise(StandardError, 'Network error') + + @event_processor.process(user_event) + + # Wait for processing to complete + sleep 0.1 until @event_processor.event_queue.empty? + sleep 0.7 # Wait for all retries to complete + + # Should make 3 attempts total (1 initial + 2 retries) + expect(@event_dispatcher).to have_received(:dispatch_event).with(log_event).exactly(3).times + + # Should log error for each attempt + expect(spy_logger).to have_received(:log).with( + Logger::ERROR, /Error dispatching event/ + ).exactly(3).times + end + + it 'should calculate correct exponential backoff intervals' do + processor = Optimizely::BatchEventProcessor.new + + # First retry: 200ms + expect(processor.send(:calculate_retry_interval, 0)).to eq(0.2) + + # Second retry: 400ms + expect(processor.send(:calculate_retry_interval, 1)).to eq(0.4) + + # Third retry: 800ms + expect(processor.send(:calculate_retry_interval, 2)).to eq(0.8) + + # Fourth retry: capped at 1s + expect(processor.send(:calculate_retry_interval, 3)).to eq(1.0) + + # Fifth retry: still capped at 1s + expect(processor.send(:calculate_retry_interval, 4)).to eq(1.0) + end + end end diff --git a/spec/event/event_factory_spec.rb b/spec/event/event_factory_spec.rb index b92661be..f9771e4f 100644 --- a/spec/event/event_factory_spec.rb +++ b/spec/event/event_factory_spec.rb @@ -34,7 +34,10 @@ allow(Time).to receive(:now).and_return(time_now) allow(SecureRandom).to receive(:uuid).and_return('a68cf1ad-0393-4e18-af87-efe8f01a7c9c') - @expected_endpoint = 'https://logx.optimizely.com/v1/events' + @expected_endpoints = { + US: 'https://logx.optimizely.com/v1/events', + EU: 'https://eu.logx.optimizely.com/v1/events' + } @expected_impression_params = { account_id: '12001', project_id: '111001', @@ -111,7 +114,33 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', nil) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) + expect(log_event.http_verb).to eq(:post) + end + + it 'should create valid Event when create_impression_event is called without attributes and with EU' do + experiment = project_config.get_experiment_from_key('test_experiment') + metadata = { + flag_key: '', + rule_key: 'test_experiment', + rule_type: 'experiment', + variation_key: '111128' + } + allow_any_instance_of(Optimizely::ImpressionEvent).to receive(:event_context).and_return( + { + account_id: '12001', + project_id: '111001', + client_version: Optimizely::VERSION, + revision: '42', + client_name: Optimizely::CLIENT_ENGINE, + anonymize_ip: false, + region: 'EU' + } + ) + impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', nil) + log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) + expect(log_event.params).to eq(@expected_impression_params) + expect(log_event.url).to eq(@expected_endpoints[:EU]) expect(log_event.http_verb).to eq(:post) end @@ -134,7 +163,7 @@ 'browser_type' => 'firefox') log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -184,7 +213,7 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', attributes) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -225,7 +254,7 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', attributes) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -248,7 +277,7 @@ 'browser_type' => false) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -270,7 +299,7 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', 'browser_type' => 0) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -286,7 +315,7 @@ invalid_attribute: 'sorry_not_sorry') log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -294,7 +323,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, nil) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -309,7 +338,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', {'browser_type' => 'firefox'}, nil) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -322,7 +351,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -335,7 +364,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -347,7 +376,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -359,7 +388,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -375,7 +404,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -390,7 +419,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -404,7 +433,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -418,7 +447,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -432,7 +461,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -447,7 +476,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -461,7 +490,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', nil, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -487,7 +516,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', {'browser_type' => 'firefox'}, event_tags) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -520,7 +549,7 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', user_attributes) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -556,7 +585,7 @@ impression_event = Optimizely::UserEventFactory.create_impression_event(project_config, experiment, '111128', metadata, 'test_user', user_attributes) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -594,7 +623,7 @@ ) log_event = Optimizely::EventFactory.create_log_event(impression_event, spy_logger) expect(log_event.params).to eq(@expected_impression_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -620,7 +649,45 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', user_attributes, nil) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) + expect(log_event.http_verb).to eq(:post) + end + + it 'should create valid Event when create_conversion_event is called with Bucketing ID attribute and with EU' do + @expected_conversion_params[:visitors][0][:attributes].unshift( + { + entity_id: '111094', + key: 'browser_type', + type: 'custom', + value: 'firefox' + }, + entity_id: Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BUCKETING_ID'], + key: Optimizely::Helpers::Constants::CONTROL_ATTRIBUTES['BUCKETING_ID'], + type: 'custom', + value: 'variation' + ) + + user_attributes = { + 'browser_type' => 'firefox', + '$opt_bucketing_id' => 'variation' + } + + allow_any_instance_of(Optimizely::ConversionEvent).to receive(:event_context).and_return( + { + account_id: '12001', + project_id: '111001', + client_version: Optimizely::VERSION, + revision: '42', + client_name: Optimizely::CLIENT_ENGINE, + anonymize_ip: false, + region: 'EU' + } + ) + + conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', user_attributes, nil) + log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) + expect(log_event.params).to eq(@expected_conversion_params) + expect(log_event.url).to eq(@expected_endpoints[:EU]) expect(log_event.http_verb).to eq(:post) end @@ -642,7 +709,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', user_attributes, nil) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end @@ -671,7 +738,7 @@ conversion_event = Optimizely::UserEventFactory.create_conversion_event(project_config, event, 'test_user', user_attributes, nil) log_event = Optimizely::EventFactory.create_log_event(conversion_event, spy_logger) expect(log_event.params).to eq(@expected_conversion_params) - expect(log_event.url).to eq(@expected_endpoint) + expect(log_event.url).to eq(@expected_endpoints[:US]) expect(log_event.http_verb).to eq(:post) end end diff --git a/spec/event/user_event_factory_spec.rb b/spec/event/user_event_factory_spec.rb index f9876c23..f1ed533e 100644 --- a/spec/event/user_event_factory_spec.rb +++ b/spec/event/user_event_factory_spec.rb @@ -46,6 +46,7 @@ expect(impression_event.event_context[:project_id]).to eq(project_config.project_id) expect(impression_event.event_context[:revision]).to eq(project_config.revision) expect(impression_event.event_context[:anonymize_ip]).to eq(project_config.anonymize_ip) + expect(impression_event.event_context[:region]).to eq(project_config.region) expect(impression_event.bot_filtering).to eq(project_config.bot_filtering) expect(impression_event.experiment_id).to eq(experiment['id']) expect(impression_event.variation_id).to eq('111128') @@ -79,6 +80,7 @@ expect(impression_event.event_context[:project_id]).to eq(project_config.project_id) expect(impression_event.event_context[:revision]).to eq(project_config.revision) expect(impression_event.event_context[:anonymize_ip]).to eq(project_config.anonymize_ip) + expect(impression_event.event_context[:region]).to eq(project_config.region) expect(impression_event.bot_filtering).to eq(project_config.bot_filtering) expect(impression_event.experiment_id).to eq(experiment['id']) expect(impression_event.variation_id).to eq('111128') @@ -108,6 +110,7 @@ expect(conversion_event.event_context[:project_id]).to eq(project_config.project_id) expect(conversion_event.event_context[:revision]).to eq(project_config.revision) expect(conversion_event.event_context[:anonymize_ip]).to eq(project_config.anonymize_ip) + expect(conversion_event.event_context[:region]).to eq(project_config.region) expect(conversion_event.event['key']).to eq(event['key']) expect(conversion_event.bot_filtering).to eq(project_config.bot_filtering) expect(conversion_event.user_id).to eq('test_user') @@ -141,6 +144,7 @@ expect(conversion_event.event_context[:project_id]).to eq(project_config.project_id) expect(conversion_event.event_context[:revision]).to eq(project_config.revision) expect(conversion_event.event_context[:anonymize_ip]).to eq(project_config.anonymize_ip) + expect(conversion_event.event_context[:region]).to eq(project_config.region) expect(conversion_event.event['key']).to eq(event['key']) expect(conversion_event.bot_filtering).to eq(project_config.bot_filtering) expect(conversion_event.user_id).to eq('test_user') diff --git a/spec/event_builder_spec.rb b/spec/event_builder_spec.rb index 4201c579..62cf5ba1 100644 --- a/spec/event_builder_spec.rb +++ b/spec/event_builder_spec.rb @@ -39,7 +39,10 @@ allow(Time).to receive(:now).and_return(time_now) allow(SecureRandom).to receive(:uuid).and_return('a68cf1ad-0393-4e18-af87-efe8f01a7c9c') - @expected_endpoint = 'https://logx.optimizely.com/v1/events' + @expected_endpoints = { + US: 'https://logx.optimizely.com/v1/events', + EU: 'https://eu.logx.optimizely.com/v1/events' + } @expected_impression_params = { account_id: '12001', project_id: '111001', @@ -69,7 +72,8 @@ revision: '42', client_name: Optimizely::CLIENT_ENGINE, enrich_decisions: true, - client_version: Optimizely::VERSION + client_version: Optimizely::VERSION, + region: 'US' } @expected_conversion_params = { account_id: '12001', @@ -95,7 +99,8 @@ revision: '42', client_name: Optimizely::CLIENT_ENGINE, enrich_decisions: true, - client_version: Optimizely::VERSION + client_version: Optimizely::VERSION, + region: 'US' } end @@ -103,7 +108,7 @@ experiment = config.get_experiment_from_key('test_experiment') impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', nil) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -119,7 +124,7 @@ impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', 'browser_type' => 'firefox') expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -161,7 +166,7 @@ } impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', attributes) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -194,7 +199,7 @@ } impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', attributes) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -210,7 +215,7 @@ impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', 'browser_type' => false) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -225,7 +230,7 @@ experiment = config.get_experiment_from_key('test_experiment') impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', 'browser_type' => 0) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -234,14 +239,14 @@ impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', invalid_attribute: 'sorry_not_sorry') expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end it 'should create a valid Event when create_conversion_event is called' do conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, nil) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -255,7 +260,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', {'browser_type' => 'firefox'}, nil) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -267,7 +272,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -279,7 +284,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -290,7 +295,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -301,7 +306,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -316,7 +321,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -330,7 +335,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -343,7 +348,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -356,7 +361,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -369,7 +374,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -383,7 +388,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -396,7 +401,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', nil, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -421,7 +426,7 @@ conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', {'browser_type' => 'firefox'}, event_tags) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -447,7 +452,7 @@ experiment = config.get_experiment_from_key('test_experiment') impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', user_attributes) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -476,7 +481,7 @@ expect(config.send(:bot_filtering)).to eq(true) impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', user_attributes) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -505,7 +510,7 @@ allow(config).to receive(:bot_filtering).and_return(false) impression_event = @event_builder.create_impression_event(config, experiment, '111128', 'test_user', user_attributes) expect(impression_event.params).to eq(@expected_impression_params) - expect(impression_event.url).to eq(@expected_endpoint) + expect(impression_event.url).to eq(@expected_endpoints[:US]) expect(impression_event.http_verb).to eq(:post) end @@ -530,7 +535,7 @@ } conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', user_attributes, nil) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -551,7 +556,7 @@ } conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', user_attributes, nil) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end @@ -579,7 +584,7 @@ allow(config).to receive(:bot_filtering).and_return(false) conversion_event = @event_builder.create_conversion_event(config, @event, 'test_user', user_attributes, nil) expect(conversion_event.params).to eq(@expected_conversion_params) - expect(conversion_event.url).to eq(@expected_endpoint) + expect(conversion_event.url).to eq(@expected_endpoints[:US]) expect(conversion_event.http_verb).to eq(:post) end end diff --git a/spec/event_dispatcher_spec.rb b/spec/event_dispatcher_spec.rb index 193f584d..b061bcc0 100644 --- a/spec/event_dispatcher_spec.rb +++ b/spec/event_dispatcher_spec.rb @@ -47,16 +47,27 @@ it 'should pass the proxy_config to the HttpUtils helper class' do event = Optimizely::Event.new(:post, @url, @params, @post_headers) - expect(Optimizely::Helpers::HttpUtils).to receive(:make_request).with( + # Allow the method to be called (potentially multiple times due to retries) + allow(Optimizely::Helpers::HttpUtils).to receive(:make_request).with( event.url, event.http_verb, event.params.to_json, event.headers, Optimizely::Helpers::Constants::EVENT_DISPATCH_CONFIG[:REQUEST_TIMEOUT], proxy_config - ) + ).and_return(double(code: '200')) @customized_event_dispatcher.dispatch_event(event) + + # Verify it was called at least once with the correct parameters + expect(Optimizely::Helpers::HttpUtils).to have_received(:make_request).with( + event.url, + event.http_verb, + event.params.to_json, + event.headers, + Optimizely::Helpers::Constants::EVENT_DISPATCH_CONFIG[:REQUEST_TIMEOUT], + proxy_config + ).at_least(:once) end end @@ -171,10 +182,9 @@ stub_request(:post, @url).to_return(status: 399) event = Optimizely::Event.new(:post, @url, @params, @post_headers) - response = @customized_event_dispatcher.dispatch_event(event) + @customized_event_dispatcher.dispatch_event(event) - expect(response).to have_received(:log) - expect(spy_logger).to have_received(:log) + expect(spy_logger).to have_received(:log).with(Logger::DEBUG, 'event successfully sent with response code 399') expect(error_handler).to_not have_received(:handle_error) end @@ -182,10 +192,9 @@ stub_request(:post, @url).to_return(status: 600) event = Optimizely::Event.new(:post, @url, @params, @post_headers) - response = @customized_event_dispatcher.dispatch_event(event) + @customized_event_dispatcher.dispatch_event(event) - expect(response).to have_received(:log) - expect(spy_logger).to have_received(:log) + expect(spy_logger).to have_received(:log).with(Logger::DEBUG, 'event successfully sent with response code 600') expect(error_handler).not_to have_received(:handle_error) end end diff --git a/spec/notification_center_registry_spec.rb b/spec/notification_center_registry_spec.rb index ab783ef5..2a4521c7 100644 --- a/spec/notification_center_registry_spec.rb +++ b/spec/notification_center_registry_spec.rb @@ -42,7 +42,7 @@ stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_JSON) - project = Optimizely::Project.new(nil, nil, spy_logger, nil, false, nil, sdk_key) + project = Optimizely::Project.new(logger: spy_logger, sdk_key: sdk_key) notification_center = Optimizely::NotificationCenterRegistry.get_notification_center(sdk_key, spy_logger) expect(notification_center).to be_a Optimizely::NotificationCenter @@ -60,7 +60,7 @@ .to_return(status: 200, body: config_body_JSON) notification_center = Optimizely::NotificationCenterRegistry.get_notification_center(sdk_key, spy_logger) - project = Optimizely::Project.new(nil, nil, spy_logger, nil, false, nil, sdk_key) + project = Optimizely::Project.new(logger: spy_logger, sdk_key: sdk_key) expect(notification_center).to eq(Optimizely::NotificationCenterRegistry.get_notification_center(sdk_key, spy_logger)) expect(spy_logger).not_to have_received(:log).with(Logger::ERROR, anything) @@ -78,7 +78,7 @@ notification_center = Optimizely::NotificationCenterRegistry.get_notification_center(sdk_key, spy_logger) expect(notification_center).to receive(:send_notifications).once - project = Optimizely::Project.new(nil, nil, spy_logger, nil, false, nil, sdk_key) + project = Optimizely::Project.new(logger: spy_logger, sdk_key: sdk_key) project.config_manager.config Optimizely::NotificationCenterRegistry.remove_notification_center(sdk_key) diff --git a/spec/odp/lru_cache_spec.rb b/spec/odp/lru_cache_spec.rb index 46363c8b..32db021f 100644 --- a/spec/odp/lru_cache_spec.rb +++ b/spec/odp/lru_cache_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2022, Optimizely and contributors +# Copyright 2022-2025, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -149,4 +149,84 @@ cache.save('cow', 'crate') expect(cache.lookup('cow')).to eq 'crate' end + + it 'should remove existing key' do + cache = Optimizely::LRUCache.new(3, 1000) + + cache.save('1', 100) + cache.save('2', 200) + cache.save('3', 300) + + expect(cache.lookup('1')).to eq 100 + expect(cache.lookup('2')).to eq 200 + expect(cache.lookup('3')).to eq 300 + + cache.remove('2') + + expect(cache.lookup('1')).to eq 100 + expect(cache.lookup('2')).to be_nil + expect(cache.lookup('3')).to eq 300 + end + + it 'should handle removing non-existent key' do + cache = Optimizely::LRUCache.new(3, 1000) + cache.save('1', 100) + cache.save('2', 200) + + cache.remove('3') # Doesn't exist + + expect(cache.lookup('1')).to eq 100 + expect(cache.lookup('2')).to eq 200 + end + + it 'should handle removing from zero sized cache' do + cache = Optimizely::LRUCache.new(0, 1000) + cache.save('1', 100) + cache.remove('1') + + expect(cache.lookup('1')).to be_nil + end + + it 'should handle removing and adding back a key' do + cache = Optimizely::LRUCache.new(3, 1000) + cache.save('1', 100) + cache.save('2', 200) + cache.save('3', 300) + + cache.remove('2') + cache.save('2', 201) + + expect(cache.lookup('1')).to eq 100 + expect(cache.lookup('2')).to eq 201 + expect(cache.lookup('3')).to eq 300 + end + + it 'should handle thread safety' do + max_size = 100 + cache = Optimizely::LRUCache.new(max_size, 1000) + + (1..max_size).each do |i| + cache.save(i.to_s, i * 100) + end + + threads = [] + (1..(max_size / 2)).each do |i| + thread = Thread.new do + cache.remove(i.to_s) + end + threads << thread + end + + threads.each(&:join) + + (1..max_size).each do |i| + if i <= max_size / 2 + expect(cache.lookup(i.to_s)).to be_nil + else + expect(cache.lookup(i.to_s)).to eq(i * 100) + end + end + + expect(cache.instance_variable_get('@map').size).to eq(max_size / 2) + end end diff --git a/spec/odp/odp_event_manager_spec.rb b/spec/odp/odp_event_manager_spec.rb index 57402887..6729bea5 100644 --- a/spec/odp/odp_event_manager_spec.rb +++ b/spec/odp/odp_event_manager_spec.rb @@ -260,16 +260,20 @@ allow(SecureRandom).to receive(:uuid).and_return(test_uuid) event_manager = Optimizely::OdpEventManager.new(logger: spy_logger) retry_count = event_manager.instance_variable_get('@retry_count') - allow(event_manager.api_manager).to receive(:send_odp_events).exactly(retry_count + 1).times.with(api_key, api_host, odp_events).and_return(true) + allow(event_manager.api_manager).to receive(:send_odp_events).exactly(retry_count).times.with(api_key, api_host, odp_events).and_return(true) event_manager.start!(odp_config) event_manager.send_event(**events[0]) event_manager.send_event(**events[1]) event_manager.flush - sleep(0.1) until event_manager.instance_variable_get('@event_queue').empty? + # Need to wait longer for retries with exponential backoff (200ms + 400ms = 600ms) + sleep(1) until event_manager.instance_variable_get('@event_queue').empty? expect(event_manager.instance_variable_get('@current_batch').length).to eq 0 - expect(spy_logger).to have_received(:log).exactly(retry_count).times.with(Logger::DEBUG, 'Error dispatching ODP events, scheduled to retry.') + # Updated log message includes retry attempt and delay info + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, /Error dispatching ODP events, retrying/ + ).exactly(retry_count - 1).times expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, "ODP event send failed (Failed after 3 retries: #{processed_events.to_json}).") event_manager.stop! @@ -278,16 +282,20 @@ it 'should retry on network failure' do allow(SecureRandom).to receive(:uuid).and_return(test_uuid) event_manager = Optimizely::OdpEventManager.new(logger: spy_logger) - allow(event_manager.api_manager).to receive(:send_odp_events).once.with(api_key, api_host, odp_events).and_return(true, true, false) + allow(event_manager.api_manager).to receive(:send_odp_events).with(api_key, api_host, odp_events).and_return(true, true, false) event_manager.start!(odp_config) event_manager.send_event(**events[0]) event_manager.send_event(**events[1]) event_manager.flush - sleep(0.1) until event_manager.instance_variable_get('@event_queue').empty? + # Need to wait longer for retries with exponential backoff (200ms + 400ms = 600ms) + sleep(1) until event_manager.instance_variable_get('@event_queue').empty? expect(event_manager.instance_variable_get('@current_batch').length).to eq 0 - expect(spy_logger).to have_received(:log).twice.with(Logger::DEBUG, 'Error dispatching ODP events, scheduled to retry.') + # Updated log message includes retry attempt and delay info + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, /Error dispatching ODP events, retrying/ + ).twice expect(spy_logger).not_to have_received(:log).with(Logger::ERROR, anything) expect(event_manager.running?).to be true event_manager.stop! @@ -539,5 +547,52 @@ expect(spy_logger).to have_received(:log).once.with(Logger::DEBUG, 'ODP event queue: cannot send before config has been set.') expect(spy_logger).not_to have_received(:log).with(Logger::ERROR, anything) end + + it 'should use exponential backoff between retries' do + allow(SecureRandom).to receive(:uuid).and_return(test_uuid) + event_manager = Optimizely::OdpEventManager.new(logger: spy_logger) + + # All requests fail to trigger retries + allow(event_manager.api_manager).to receive(:send_odp_events).with(api_key, api_host, odp_events).and_return(true) + event_manager.start!(odp_config) + + start_time = Time.now + event_manager.send_event(**events[0]) + event_manager.send_event(**events[1]) + event_manager.flush + + # Wait for all retries to complete (need at least 600ms for 200ms + 400ms delays) + sleep(1) until event_manager.instance_variable_get('@event_queue').empty? + elapsed_time = Time.now - start_time + + # Should have delays: 200ms + 400ms = 600ms minimum for 3 total attempts + expect(elapsed_time).to be >= 0.5 # Allow some tolerance + + # Should log retry attempts with delay info + expect(spy_logger).to have_received(:log).with( + Logger::DEBUG, /retrying \(attempt \d+ of \d+\) after/ + ).at_least(:once) + + event_manager.stop! + end + + it 'should calculate correct exponential backoff intervals' do + event_manager = Optimizely::OdpEventManager.new + + # First retry: 200ms + expect(event_manager.send(:calculate_retry_interval, 0)).to eq(0.2) + + # Second retry: 400ms + expect(event_manager.send(:calculate_retry_interval, 1)).to eq(0.4) + + # Third retry: 800ms + expect(event_manager.send(:calculate_retry_interval, 2)).to eq(0.8) + + # Fourth retry: capped at 1s + expect(event_manager.send(:calculate_retry_interval, 3)).to eq(1.0) + + # Fifth retry: still capped at 1s + expect(event_manager.send(:calculate_retry_interval, 4)).to eq(1.0) + end end end diff --git a/spec/optimizely_config_spec.rb b/spec/optimizely_config_spec.rb index 8d364e1d..4164d3ca 100644 --- a/spec/optimizely_config_spec.rb +++ b/spec/optimizely_config_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2019-2021, Optimizely and contributors +# Copyright 2019-2021, 2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ require 'spec_helper' describe Optimizely::OptimizelyConfig do + let(:config_body) { OptimizelySpec::VALID_CONFIG_BODY } let(:config_body_JSON) { OptimizelySpec::VALID_CONFIG_BODY_JSON } let(:similar_exp_keys_JSON) { OptimizelySpec::SIMILAR_EXP_KEYS_JSON } let(:typed_audiences_JSON) { OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES_JSON } @@ -26,16 +27,16 @@ let(:error_handler) { Optimizely::NoOpErrorHandler.new } let(:spy_logger) { spy('logger') } let(:project_config) { Optimizely::DatafileProjectConfig.new(config_body_JSON, spy_logger, error_handler) } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) } let(:optimizely_config) { project_instance.get_optimizely_config } let(:project_config_sim_keys) { Optimizely::DatafileProjectConfig.new(similar_exp_keys_JSON, spy_logger, error_handler) } - let(:project_instance_sim_keys) { Optimizely::Project.new(similar_exp_keys_JSON, nil, spy_logger, error_handler) } + let(:project_instance_sim_keys) { Optimizely::Project.new(datafile: similar_exp_keys_JSON, logger: spy_logger, error_handler: error_handler) } let(:optimizely_config_sim_keys) { project_instance_sim_keys.get_optimizely_config } let(:project_config_typed_audiences) { Optimizely::DatafileProjectConfig.new(typed_audiences_JSON, spy_logger, error_handler) } - let(:project_instance_typed_audiences) { Optimizely::Project.new(typed_audiences_JSON, nil, spy_logger, error_handler) } + let(:project_instance_typed_audiences) { Optimizely::Project.new(datafile: typed_audiences_JSON, logger: spy_logger, error_handler: error_handler) } let(:optimizely_config_typed_audiences) { project_instance_typed_audiences.get_optimizely_config } let(:project_config_similar_rule_keys) { Optimizely::DatafileProjectConfig.new(similar_rule_key_JSON, spy_logger, error_handler) } - let(:project_instance_similar_rule_keys) { Optimizely::Project.new(similar_rule_key_JSON, nil, spy_logger, error_handler) } + let(:project_instance_similar_rule_keys) { Optimizely::Project.new(datafile: similar_rule_key_JSON, logger: spy_logger, error_handler: error_handler) } let(:optimizely_config_similar_rule_keys) { project_instance_similar_rule_keys.get_optimizely_config } after(:example) do project_instance.close @@ -768,7 +769,7 @@ '', '"exactString" OR "999999999"' ] - optimizely_config = Optimizely::OptimizelyConfig.new(project_instance_typed_audiences.send(:project_config)) + optimizely_config = Optimizely::OptimizelyConfig.new(project_instance_typed_audiences.send(:project_config), spy_logger) audiences_map = optimizely_config.send(:audiences_map) audience_conditions.each_with_index do |audience_condition, index| result = optimizely_config.send(:replace_ids_with_names, audience_condition, audiences_map) @@ -796,4 +797,64 @@ expect(optimizely_config_similar_rule_keys['sdkKey']).to eq('') expect(optimizely_config_similar_rule_keys['environmentKey']).to eq('') end + + it 'should use the newest of duplicate experiment keys' do + duplicate_experiment_key = 'test_experiment' + new_experiment = { + 'key': duplicate_experiment_key, + 'status': 'Running', + 'layerId': '8', + "audienceConditions": %w[ + or + 11160 + ], + 'audienceIds': ['11160'], + 'id': '111137', + 'forcedVariations': {}, + 'trafficAllocation': [ + {'entityId': '222242', 'endOfRange': 8000}, + {'entityId': '', 'endOfRange': 10_000} + ], + 'variations': [ + { + 'id': '222242', + 'key': 'control', + 'variables': [] + } + ] + } + + new_feature = { + 'id': '91117', + 'key': 'new_feature', + 'experimentIds': ['111137'], + 'rolloutId': '', + 'variables': [ + {'id': '127', 'key': 'is_working', 'defaultValue': 'true', 'type': 'boolean'}, + {'id': '128', 'key': 'environment', 'defaultValue': 'devel', 'type': 'string'}, + {'id': '129', 'key': 'cost', 'defaultValue': '10.99', 'type': 'double'}, + {'id': '130', 'key': 'count', 'defaultValue': '999', 'type': 'integer'}, + {'id': '131', 'key': 'variable_without_usage', 'defaultValue': '45', 'type': 'integer'}, + {'id': '132', 'key': 'object', 'defaultValue': '{"test": 12}', 'type': 'string', 'subType': 'json'}, + {'id': '133', 'key': 'true_object', 'defaultValue': '{"true_test": 23.54}', 'type': 'json'} + ] + } + + config = OptimizelySpec.deep_clone(config_body) + + config['experiments'].push(new_experiment) + config['featureFlags'].push(new_feature) + project_config = Optimizely::DatafileProjectConfig.new(JSON.dump(config), spy_logger, error_handler) + + opti_config = Optimizely::OptimizelyConfig.new(project_config, spy_logger) + + key_map = opti_config.config['experimentsMap'] + id_map = opti_config.send(:experiments_id_map) + + expected_warning_message = "Duplicate experiment keys found in datafile: #{duplicate_experiment_key}" + expect(spy_logger).to have_received(:log).once.with(Logger::WARN, expected_warning_message) + + expect(key_map[duplicate_experiment_key]['id']).to eq(new_experiment[:id]) + expect(id_map.values.count { |exp| exp['key'] == duplicate_experiment_key }).to eq(2) + end end diff --git a/spec/optimizely_user_context_spec.rb b/spec/optimizely_user_context_spec.rb index 6a99c57b..42d71065 100644 --- a/spec/optimizely_user_context_spec.rb +++ b/spec/optimizely_user_context_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2020, 2022, Optimizely and contributors +# Copyright 2020, 2022-2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -27,9 +27,9 @@ let(:integration_JSON) { OptimizelySpec::CONFIG_DICT_WITH_INTEGRATIONS_JSON } let(:error_handler) { Optimizely::RaiseErrorHandler.new } let(:spy_logger) { spy('logger') } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) } - let(:forced_decision_project_instance) { Optimizely::Project.new(forced_decision_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {batch_size: 1}) } - let(:integration_project_instance) { Optimizely::Project.new(integration_JSON, nil, spy_logger, error_handler) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) } + let(:forced_decision_project_instance) { Optimizely::Project.new(datafile: forced_decision_JSON, logger: spy_logger, error_handler: error_handler, event_processor_options: {batch_size: 1}) } + let(:integration_project_instance) { Optimizely::Project.new(datafile: integration_JSON, logger: spy_logger, error_handler: error_handler) } let(:impression_log_url) { 'https://logx.optimizely.com/v1/events' } let(:good_response_data) do { @@ -251,7 +251,9 @@ variation_key: '3324490562', rule_key: nil, reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: '3324490562' ) user_context_obj = forced_decision_project_instance.create_user_context(user_id) context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(feature_key, nil) @@ -347,7 +349,9 @@ variation_key: 'b', rule_key: 'exp_with_audience', reasons: ['Variation (b) is mapped to flag (feature_1), rule (exp_with_audience) and user (tester) in the forced decision map.'], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: '10390977673', + variation_id: '10416523121' ) user_context_obj = Optimizely::OptimizelyUserContext.new(forced_decision_project_instance, user_id, original_attributes) context = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(feature_key, 'exp_with_audience') @@ -464,7 +468,9 @@ variation_key: '3324490562', rule_key: nil, reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: '3324490562' ) user_context_obj = forced_decision_project_instance.create_user_context(user_id) context_with_flag = Optimizely::OptimizelyUserContext::OptimizelyDecisionContext.new(feature_key, nil) @@ -550,6 +556,7 @@ decision = user_context_obj.decide(feature_key, [Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS]) expect(decision.variation_key).to eq('18257766532') expect(decision.rule_key).to eq('18322080788') + # puts decision.reasons expect(decision.reasons).to include('Invalid variation is mapped to flag (feature_1), rule (exp_with_audience) and user (tester) in the forced decision map.') # delivery-rule-to-decision diff --git a/spec/project_spec.rb b/spec/project_spec.rb index d00f93c1..9abbc39f 100644 --- a/spec/project_spec.rb +++ b/spec/project_spec.rb @@ -48,7 +48,7 @@ let(:version) { Optimizely::VERSION } let(:impression_log_url) { 'https://logx.optimizely.com/v1/events' } let(:conversion_log_url) { 'https://logx.optimizely.com/v1/events' } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {batch_size: 1}) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, event_processor_options: {batch_size: 1}) } let(:project_config) { project_instance.config_manager.config } let(:time_now) { Time.now } let(:post_headers) { {'Content-Type' => 'application/json'} } @@ -71,7 +71,7 @@ def log(_level, log_message) end logger = CustomLogger.new - instance_with_logger = Optimizely::Project.new(config_body_JSON, nil, logger) + instance_with_logger = Optimizely::Project.new(datafile: config_body_JSON, logger: logger) expect(instance_with_logger.logger.log(Logger::INFO, 'test_message')).to eq('test_message') instance_with_logger.close end @@ -84,19 +84,19 @@ def handle_error(error) end error_handler = CustomErrorHandler.new - instance_with_error_handler = Optimizely::Project.new(config_body_JSON, nil, nil, error_handler) + instance_with_error_handler = Optimizely::Project.new(datafile: config_body_JSON, error_handler: error_handler) expect(instance_with_error_handler.error_handler.handle_error('test_message')).to eq('test_message') instance_with_error_handler.close end it 'should log an error when datafile is null' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') - Optimizely::Project.new(nil, nil, spy_logger).close + Optimizely::Project.new(logger: spy_logger).close end it 'should log an error when datafile is empty' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') - Optimizely::Project.new('', nil, spy_logger).close + Optimizely::Project.new(datafile: '', logger: spy_logger).close end it 'should log an error when given a datafile that does not conform to the schema' do @@ -104,7 +104,7 @@ def handle_error(error) allow(spy_logger).to receive(:log).with(Logger::DEBUG, anything) expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'SDK key not provided/cannot be found in the datafile. ODP may not work properly without it.') - Optimizely::Project.new('{"foo": "bar"}', nil, spy_logger).close + Optimizely::Project.new(datafile: '{"foo": "bar"}', logger: spy_logger).close end it 'should log an error when given an invalid logger' do @@ -114,7 +114,7 @@ def handle_error(error) expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided logger is in an invalid format.') class InvalidLogger; end # rubocop:disable Lint/ConstantDefinitionInBlock - Optimizely::Project.new(config_body_JSON, nil, InvalidLogger.new).close + Optimizely::Project.new(datafile: config_body_JSON, logger: InvalidLogger.new).close end it 'should log an error when given an invalid event_dispatcher' do @@ -123,7 +123,7 @@ class InvalidLogger; end # rubocop:disable Lint/ConstantDefinitionInBlock expect_any_instance_of(Optimizely::SimpleLogger).to receive(:log).once.with(Logger::ERROR, 'Provided event_dispatcher is in an invalid format.') class InvalidEventDispatcher; end # rubocop:disable Lint/ConstantDefinitionInBlock - Optimizely::Project.new(config_body_JSON, InvalidEventDispatcher.new).close + Optimizely::Project.new(datafile: config_body_JSON, event_dispatcher: InvalidEventDispatcher.new).close end it 'should log an error when given an invalid error_handler' do @@ -132,14 +132,14 @@ class InvalidEventDispatcher; end # rubocop:disable Lint/ConstantDefinitionInBlo expect_any_instance_of(Optimizely::SimpleLogger).to receive(:log).once.with(Logger::ERROR, 'Provided error_handler is in an invalid format.') class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock - Optimizely::Project.new(config_body_JSON, nil, nil, InvalidErrorHandler.new).close + Optimizely::Project.new(datafile: config_body_JSON, error_handler: InvalidErrorHandler.new).close end it 'should not validate the JSON schema of the datafile when skip_json_validation is true' do project_instance.close expect(Optimizely::Helpers::Validator).not_to receive(:datafile_valid?) - Optimizely::Project.new(config_body_JSON, nil, nil, nil, true).close + Optimizely::Project.new(datafile: config_body_JSON, skip_json_validation: true).close end it 'should be invalid when datafile contains integrations missing key' do @@ -152,7 +152,7 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock config['integrations'][0].delete('key') integrations_json = JSON.dump(config) - Optimizely::Project.new(integrations_json, nil, spy_logger) + Optimizely::Project.new(datafile: integrations_json, logger: spy_logger) end it 'should be valid when datafile contains integrations with only key' do @@ -161,7 +161,7 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock config['integrations'].push('key' => '123') integrations_json = JSON.dump(config) - project_instance = Optimizely::Project.new(integrations_json) + project_instance = Optimizely::Project.new(datafile: integrations_json) expect(project_instance.is_valid).to be true end @@ -171,7 +171,7 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock config['integrations'].push('key' => 'future', 'any-key-1' => 1, 'any-key-2' => 'any-value-2') integrations_json = JSON.dump(config) - project_instance = Optimizely::Project.new(integrations_json) + project_instance = Optimizely::Project.new(datafile: integrations_json) expect(project_instance.is_valid).to be true end @@ -179,20 +179,20 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect_any_instance_of(Optimizely::RaiseErrorHandler).to receive(:handle_error).once.with(Optimizely::InvalidInputError) - Optimizely::Project.new('this is not JSON', nil, spy_logger, Optimizely::RaiseErrorHandler.new, true) + Optimizely::Project.new(datafile: 'this is not JSON', logger: spy_logger, error_handler: Optimizely::RaiseErrorHandler.new, skip_json_validation: true) end it 'should log an error when provided an invalid JSON datafile and skip_json_validation is true' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') - Optimizely::Project.new('{"version": "2", "foo": "bar"}', nil, spy_logger, nil, true) + Optimizely::Project.new(datafile: '{"version": "2", "foo": "bar"}', logger: spy_logger, skip_json_validation: true) end it 'should log and raise an error when provided a datafile of unsupported version' do config_body_invalid_json = JSON.parse(config_body_invalid_JSON) expect(spy_logger).to receive(:log).once.with(Logger::ERROR, "This version of the Ruby SDK does not support the given datafile version: #{config_body_invalid_json['version']}.") - expect { Optimizely::Project.new(config_body_invalid_JSON, nil, spy_logger, Optimizely::RaiseErrorHandler.new, true) }.to raise_error(Optimizely::InvalidDatafileVersionError, 'This version of the Ruby SDK does not support the given datafile version: 5.') + expect { Optimizely::Project.new(datafile: config_body_invalid_JSON, logger: spy_logger, error_handler: Optimizely::RaiseErrorHandler.new, skip_json_validation: true) }.to raise_error(Optimizely::InvalidDatafileVersionError, 'This version of the Ruby SDK does not support the given datafile version: 5.') end end @@ -225,7 +225,7 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock end it 'should send identify event when called with odp enabled' do - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) expect(project.odp_manager).to receive(:identify_user).with({user_id: 'tester'}) project.create_user_context('tester') @@ -359,7 +359,7 @@ class InvalidErrorHandler; end # rubocop:disable Lint/ConstantDefinitionInBlock describe '.typed audiences' do before(:example) do - @project_typed_audience_instance = Optimizely::Project.new(JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {batch_size: 1}) + @project_typed_audience_instance = Optimizely::Project.new(datafile: JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), logger: spy_logger, error_handler: error_handler, event_processor_options: {batch_size: 1}) @project_config = @project_typed_audience_instance.config_manager.config @expected_activate_params = { account_id: '4879520872', @@ -900,7 +900,7 @@ def callback(_args); end end it 'should log an error when called with an invalid Project object' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) invalid_project.activate('test_exp', 'test_user') expect(spy_logger).to have_received(:log).with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Optimizely instance is not valid. Failing 'activate'.") @@ -976,8 +976,8 @@ def callback(_args); end ) custom_project_instance = Optimizely::Project.new( - nil, nil, spy_logger, error_handler, - false, nil, nil, http_project_config_manager, notification_center + logger: spy_logger, error_handler: error_handler, + config_manager: http_project_config_manager, notification_center: notification_center ) sleep 0.1 until http_project_config_manager.ready? @@ -1003,8 +1003,8 @@ def callback(_args); end ) custom_project_instance = Optimizely::Project.new( - nil, nil, spy_logger, error_handler, - false, nil, nil, http_project_config_manager, notification_center + logger: spy_logger, error_handler: error_handler, + config_manager: http_project_config_manager, notification_center: notification_center ) sleep 0.1 until http_project_config_manager.ready? @@ -1037,8 +1037,8 @@ def callback(_args); end expect(notification_center).to receive(:send_notifications).ordered custom_project_instance = Optimizely::Project.new( - nil, nil, spy_logger, error_handler, - false, nil, sdk_key, nil, notification_center + logger: spy_logger, error_handler: error_handler, + sdk_key: sdk_key, notification_center: notification_center ) sleep 0.1 until custom_project_instance.config_manager.ready? @@ -1132,7 +1132,7 @@ def callback(_args); end end it 'should properly track an event with tags even when the project does not have a custom logger' do - custom_project_instance = Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {batch_size: 1}) + custom_project_instance = Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, event_processor_options: {batch_size: 1}) params = @expected_track_event_params params[:visitors][0][:snapshots][0][:events][0][:tags] = {revenue: 42} @@ -1217,7 +1217,7 @@ def callback(_args); end describe '.typed audiences' do before(:example) do - @project_typed_audience_instance = Optimizely::Project.new(JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {batch_size: 1}) + @project_typed_audience_instance = Optimizely::Project.new(datafile: JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), logger: spy_logger, error_handler: error_handler, event_processor_options: {batch_size: 1}) @expected_event_params = { account_id: '4879520872', project_id: '11624721371', @@ -1424,7 +1424,7 @@ def callback(_args); end end it 'should log an error when called with an invalid Project object' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) invalid_project.track('test_event', 'test_user') expect(spy_logger).to have_received(:log).with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Optimizely instance is not valid. Failing 'track'.") @@ -1532,7 +1532,7 @@ def callback(_args); end end it 'should log an error when called with an invalid Project object' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) invalid_project.get_variation('test_exp', 'test_user') expect(spy_logger).to have_received(:log).with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Optimizely instance is not valid. Failing 'get_variation'.") @@ -1620,7 +1620,7 @@ def callback(_args); end end it 'should return false when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.is_feature_enabled('totally_invalid_feature_key', 'test_user')).to be false expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, "Optimizely instance is not valid. Failing 'is_feature_enabled'.") @@ -1681,7 +1681,7 @@ def callback(_args); end it 'should return false and send an impression when the user is not bucketed into any variation' do allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.is_feature_enabled('multi_variate_feature', 'test_user')).to be(false) @@ -1703,7 +1703,7 @@ def callback(_args); end ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(project_instance.is_feature_enabled('boolean_single_variable_feature', 'test_user')).to be true @@ -1723,7 +1723,7 @@ def callback(_args); end Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(variation_to_return['featureEnabled']).to be false expect(project_instance.is_feature_enabled('boolean_single_variable_feature', 'test_user')).to be false @@ -1744,7 +1744,7 @@ def callback(_args); end Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(variation_to_return['featureEnabled']).to be true expect(project_instance.is_feature_enabled('boolean_single_variable_feature', 'test_user')).to be true @@ -1758,7 +1758,7 @@ def callback(_args); end describe '.typed audiences' do before(:example) do - @project_typed_audience_instance = Optimizely::Project.new(JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), nil, spy_logger, error_handler) + @project_typed_audience_instance = Optimizely::Project.new(datafile: JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), logger: spy_logger, error_handler: error_handler) stub_request(:post, impression_log_url) end after(:example) do @@ -1841,7 +1841,7 @@ def callback(_args); end Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], any_args ).ordered - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(project_instance.is_feature_enabled('multi_variate_feature', 'test_user')).to be true @@ -1862,7 +1862,7 @@ def callback(_args); end Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) expect(variation_to_return['featureEnabled']).to be false - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(project_instance.is_feature_enabled('multi_variate_feature', 'test_user')).to be false @@ -1888,7 +1888,7 @@ def callback(_args); end Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) # Activate listener expect(project_instance.notification_center).to receive(:send_notifications).once.with( @@ -1925,7 +1925,7 @@ def callback(_args); end Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args @@ -1959,7 +1959,7 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) # DECISION listener called when the user is in rollout with variation feature true. expect(variation_to_return['featureEnabled']).to be true @@ -1983,8 +1983,21 @@ def callback(_args); end end it 'should call decision listener when user is bucketed into rollout with featureEnabled property is false' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::Decision) + experiment_to_return = config_body['rollouts'][0]['experiments'][1] + variation_to_return = experiment_to_return['variations'][0] + decision_to_return = Optimizely::DecisionService::Decision.new( + experiment_to_return, + variation_to_return, + Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] + ) + # Ensure featureEnabled is false for this test + expect(variation_to_return['featureEnabled']).to be false + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, [])) + + expect(project_instance.notification_center).to receive(:send_notifications).once.with( + Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args + ).ordered # DECISION listener called when the user is in rollout with variation feature off. expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -1999,7 +2012,7 @@ def callback(_args); end end it 'call decision listener when the user is not bucketed into any experiment or rollout' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args ).ordered @@ -2023,7 +2036,7 @@ def callback(_args); end describe '#get_enabled_features' do it 'should return empty when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_enabled_features('test_user')).to be_empty expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, "Optimizely instance is not valid. Failing 'get_enabled_features'.") @@ -2079,7 +2092,7 @@ def callback(_args); end it 'should return only enabled feature flags keys' do # Sets all feature-flags keys with randomly assigned status features_keys = project_config.feature_flags.map do |item| - {key: (item['key']).to_s, value: [true, false].sample} # '[true, false].sample' generates random boolean + {key: item['key'].to_s, value: [true, false].sample} # '[true, false].sample' generates random boolean end enabled_features = features_keys.map { |x| x[:key] if x[:value] == true }.compact @@ -2113,26 +2126,42 @@ def callback(_args); end rollout_to_return = config_body['rollouts'][0]['experiments'][0] allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return( - Optimizely::DecisionService::Decision.new( - experiment_to_return, - experiment_to_return['variations'][0], - Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] + Optimizely::DecisionService::DecisionResult.new( + Optimizely::DecisionService::Decision.new( + experiment_to_return, + experiment_to_return['variations'][0], + Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] + ), false, [] ), - nil, - Optimizely::DecisionService::Decision.new( - rollout_to_return, - rollout_to_return['variations'][0], - Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] + Optimizely::DecisionService::DecisionResult.new( + nil, false, [] ), - Optimizely::DecisionService::Decision.new( - experiment_to_return, - experiment_to_return['variations'][1], - Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] + Optimizely::DecisionService::DecisionResult.new( + Optimizely::DecisionService::Decision.new( + rollout_to_return, + rollout_to_return['variations'][0], + Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] + ), false, [] + ), + Optimizely::DecisionService::DecisionResult.new( + Optimizely::DecisionService::Decision.new( + experiment_to_return, + experiment_to_return['variations'][1], + Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] + ), false, [] + ), + Optimizely::DecisionService::DecisionResult.new( + nil, false, [] ), - nil, - nil, - nil, - nil + Optimizely::DecisionService::DecisionResult.new( + nil, false, [] + ), + Optimizely::DecisionService::DecisionResult.new( + nil, false, [] + ), + Optimizely::DecisionService::DecisionResult.new( + nil, false, [] + ) ) expect(project_instance.notification_center).to receive(:send_notifications).exactly(10).times.with( @@ -2249,7 +2278,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable_string('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2274,7 +2303,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_string('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('wingardium leviosa') @@ -2294,7 +2324,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_string('boolean_single_variable_feature', 'boolean_variable', user_id, user_attributes)) .to eq(nil) @@ -2315,7 +2346,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_string('integer_single_variable_feature', 'integer_variable', user_id, user_attributes)) .to eq(nil) @@ -2334,7 +2366,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_string('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('cta_1') @@ -2351,7 +2384,7 @@ def callback(_args); end describe 'when the feature flag is not enabled for the user' do it 'should return the default variable value' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.get_feature_variable_string('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('wingardium leviosa') @@ -2399,7 +2432,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable_json('json_single_variable_feature', 'json_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2424,7 +2457,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -2496,7 +2530,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -2525,7 +2560,7 @@ def callback(_args); end describe 'when the feature flag is not enabled for the user' do it 'should return the default variable value' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -2582,7 +2617,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable_boolean('boolean_single_variable_feature', 'boolean_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2608,7 +2643,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_boolean('boolean_single_variable_feature', 'boolean_variable', user_id, user_attributes)) .to eq(true) @@ -2626,7 +2662,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable_double('double_single_variable_feature', 'double_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2652,8 +2688,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_double('double_single_variable_feature', 'double_variable', user_id, user_attributes)) .to eq(42.42) @@ -2672,7 +2708,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable_integer('integer_single_variable_feature', 'integer_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2698,8 +2734,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable_integer('integer_single_variable_feature', 'integer_variable', user_id, user_attributes)) .to eq(42) @@ -2718,7 +2754,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_all_feature_variables('all_variables_feature', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2741,7 +2777,8 @@ def callback(_args); end Decision = Struct.new(:experiment, :variation, :source) # rubocop:disable Lint/ConstantDefinitionInBlock variation_to_return = project_config.rollout_id_map['166661']['experiments'][0]['variations'][0] decision_to_return = Decision.new({'key' => 'test-exp'}, variation_to_return, 'feature-test') - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decisiont_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decisiont_result_to_return) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -2814,7 +2851,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decisiont_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decisiont_result_to_return) allow(project_config).to receive(:variation_id_to_variable_usage_map).and_return(variation_id_to_variable_usage_map) expect(project_instance.notification_center).to receive(:send_notifications).once.with( @@ -2873,7 +2911,7 @@ def callback(_args); end describe 'when the feature flag is not enabled for the user' do it 'should return the default variable value' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -2951,7 +2989,7 @@ def callback(_args); end user_attributes = {} it 'should return nil when called with invalid project config' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.get_feature_variable('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq(nil) expect(spy_logger).to have_received(:log).once.with(Logger::ERROR, 'Provided datafile is in an invalid format.') @@ -2976,7 +3014,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('wingardium leviosa') @@ -2996,7 +3035,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('cta_1') @@ -3017,7 +3057,8 @@ def callback(_args); end 'experiment' => nil, 'variation' => variation_to_return } - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable('boolean_single_variable_feature', 'boolean_variable', user_id, user_attributes)) .to eq(true) @@ -3038,8 +3079,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable('double_single_variable_feature', 'double_variable', user_id, user_attributes)) .to eq(42.42) @@ -3060,8 +3101,8 @@ def callback(_args); end 'experiment' => experiment_to_return, 'variation' => variation_to_return } - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) expect(project_instance.get_feature_variable('integer_single_variable_feature', 'integer_variable', user_id, user_attributes)) .to eq(42) @@ -3078,7 +3119,7 @@ def callback(_args); end describe 'when the feature flag is not enabled for the user' do it 'should return the default variable value' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.get_feature_variable('string_single_variable_feature', 'string_variable', user_id, user_attributes)) .to eq('wingardium leviosa') @@ -3161,7 +3202,7 @@ def callback(_args); end describe '.typed audiences' do before(:example) do - @project_typed_audience_instance = Optimizely::Project.new(JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), nil, spy_logger, error_handler) + @project_typed_audience_instance = Optimizely::Project.new(datafile: JSON.dump(OptimizelySpec::CONFIG_DICT_WITH_TYPED_AUDIENCES), logger: spy_logger, error_handler: error_handler) end after(:example) do @project_typed_audience_instance.close @@ -3243,8 +3284,8 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) # DECISION listener called when the user is in experiment with variation feature off. expect(project_instance.notification_center).to receive(:send_notifications).once.with( @@ -3287,8 +3328,8 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) # DECISION listener called when the user is in experiment with variation feature on. expect(project_instance.notification_center).to receive(:send_notifications).once.with( @@ -3325,8 +3366,8 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) # DECISION listener called when the user is in rollout with variation feature on. expect(variation_to_return['featureEnabled']).to be true @@ -3360,7 +3401,8 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_result_to_return) # DECISION listener called when the user is in rollout with variation feature on. expect(variation_to_return['featureEnabled']).to be false @@ -3392,7 +3434,7 @@ def callback(_args); end end it 'should call listener with default variable type and value, when user neither in experiment nor in rollout' do - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(nil) + allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(Optimizely::DecisionService::DecisionResult.new(nil, false, [])) expect(project_instance.notification_center).to receive(:send_notifications).once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -3466,7 +3508,7 @@ def callback(_args); end valid_variation = {id: '111128', key: 'control'} it 'should log an error when called with an invalid Project object' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) invalid_project.set_forced_variation(valid_experiment[:key], user_id, valid_variation[:key]) expect(spy_logger).to have_received(:log).with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Optimizely instance is not valid. Failing 'set_forced_variation'.") @@ -3523,7 +3565,7 @@ def callback(_args); end valid_experiment = {id: '111127', key: 'test_experiment'} it 'should log an error when called with an invalid Project object' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) invalid_project.get_forced_variation(valid_experiment[:key], user_id) expect(spy_logger).to have_received(:log).with(Logger::ERROR, 'Provided datafile is in an invalid format.') expect(spy_logger).to have_received(:log).with(Logger::ERROR, "Optimizely instance is not valid. Failing 'get_forced_variation'.") @@ -3569,7 +3611,7 @@ def callback(_args); end describe '#is_valid' do it 'should return false when called with an invalid datafile' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) expect(invalid_project.is_valid).to be false invalid_project.close end @@ -3595,9 +3637,9 @@ def callback(_args); end event_processor = Optimizely::BatchEventProcessor.new(event_dispatcher: Optimizely::EventDispatcher.new) - Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler).close + Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler).close - project_instance = Optimizely::Project.new(nil, nil, nil, nil, true, nil, nil, config_manager, nil, event_processor) + project_instance = Optimizely::Project.new(skip_json_validation: true, config_manager: config_manager, event_processor: event_processor) expect(config_manager.stopped).to be false expect(event_processor.started).to be false @@ -3617,8 +3659,8 @@ def callback(_args); end ) project_instance = Optimizely::Project.new( - nil, nil, spy_logger, error_handler, - false, nil, nil, http_project_config_manager + logger: spy_logger, error_handler: error_handler, + config_manager: http_project_config_manager ) project_instance.close @@ -3631,8 +3673,8 @@ def callback(_args); end ) project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, http_project_config_manager + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + config_manager: http_project_config_manager ) sleep 0.1 until http_project_config_manager.ready? @@ -3652,8 +3694,8 @@ def callback(_args); end ) project_instance = Optimizely::Project.new( - nil, nil, spy_logger, error_handler, - false, nil, nil, static_project_config_manager + logger: spy_logger, error_handler: error_handler, + config_manager: static_project_config_manager ) project_instance.close @@ -3666,8 +3708,8 @@ def callback(_args); end ) project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, static_project_config_manager + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + config_manager: static_project_config_manager ) project_instance.close @@ -3696,7 +3738,7 @@ def callback(_args); end describe '#decide' do describe 'should return empty decision object with correct reason when sdk is not ready' do it 'when sdk is not ready' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) user_context = project_instance.create_user_context('user1') decision = invalid_project.decide(user_context, 'dummy_flag') expect(decision.as_json).to eq( @@ -3758,7 +3800,9 @@ def callback(_args); end variation_key: 'Fred', rule_key: 'test_experiment_multivariate', reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: experiment_to_return['id'], + variation_id: variation_to_return['id'] ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) decision_to_return = Optimizely::DecisionService::Decision.new( @@ -3766,7 +3810,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_be_returned = [] + decision_list_to_be_returned << decision_result_to_return + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_be_returned) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature') expect(decision.as_json).to include( @@ -3799,7 +3846,9 @@ def callback(_args); end variation_key: 'Fred', rule_key: 'test_experiment_multivariate', reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: experiment_to_return['id'], + variation_id: variation_to_return['id'] ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) decision_to_return = Optimizely::DecisionService::Decision.new( @@ -3807,7 +3856,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_be_returned = [] + decision_list_to_be_returned << decision_result_to_return + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_be_returned) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature') @@ -3879,7 +3931,9 @@ def callback(_args); end variation_key: 'Fred', rule_key: 'test_experiment_multivariate', reasons: [], - decision_event_dispatched: false + decision_event_dispatched: false, + experiment_id: experiment_to_return['id'], + variation_id: variation_to_return['id'] ) allow(project_config).to receive(:send_flag_decisions).and_return(false) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) @@ -3888,7 +3942,9 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['ROLLOUT'] ) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_return = [decision_result_to_return] + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_return) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature') expect(decision.as_json).to include( @@ -3916,7 +3972,9 @@ def callback(_args); end variation_key: nil, rule_key: nil, reasons: [], - decision_event_dispatched: false + decision_event_dispatched: false, + experiment_id: nil, + variation_id: nil ) allow(project_config).to receive(:send_flag_decisions).and_return(false) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) @@ -3953,7 +4011,9 @@ def callback(_args); end variation_key: nil, rule_key: nil, reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: nil ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) decision_to_return = nil @@ -4055,8 +4115,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_be_returned = [decision_result_to_return] allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_be_returned) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature', [Optimizely::Decide::OptimizelyDecideOption::EXCLUDE_VARIABLES]) expect(decision.as_json).to include( @@ -4078,8 +4140,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_return = [decision_result_to_return] allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_return) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature') expect(decision.as_json).to include( @@ -4096,8 +4160,6 @@ def callback(_args); end describe 'INCLUDE_REASONS' do it 'should include reasons when the option is set' do - expect(project_instance.notification_center).to receive(:send_notifications) - .once.with(Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args) expect(project_instance.notification_center).to receive(:send_notifications) .once.with( Optimizely::NotificationCenter::NOTIFICATION_TYPES[:DECISION], @@ -4117,8 +4179,12 @@ def callback(_args); end "The user 'user1' is not bucketed into any of the experiments on the feature 'multi_variate_feature'.", "Feature flag 'multi_variate_feature' is not used in a rollout." ], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: nil ) + expect(project_instance.notification_center).to receive(:send_notifications) + .once.with(Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) user_context = project_instance.create_user_context('user1') decision = project_instance.decide(user_context, 'multi_variate_feature', [Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS]) @@ -4155,7 +4221,9 @@ def callback(_args); end variation_key: nil, rule_key: nil, reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: nil ) allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) user_context = project_instance.create_user_context('user1') @@ -4180,23 +4248,24 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_return = [decision_result_to_return] allow(project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_return) user_context = project_instance.create_user_context('user1') - expect(project_instance.decision_service).to receive(:get_variation_for_feature) + expect(project_instance.decision_service).to receive(:get_variations_for_feature_list) .with(anything, anything, anything, []).once project_instance.decide(user_context, 'multi_variate_feature') - expect(project_instance.decision_service).to receive(:get_variation_for_feature) + expect(project_instance.decision_service).to receive(:get_variations_for_feature_list) .with(anything, anything, anything, [Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT]).once project_instance.decide(user_context, 'multi_variate_feature', [Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT]) - expect(project_instance.decision_service).to receive(:get_variation_for_feature) + expect(project_instance.decision_service).to receive(:get_variations_for_feature_list) .with(anything, anything, anything, [ Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT, Optimizely::Decide::OptimizelyDecideOption::EXCLUDE_VARIABLES, - Optimizely::Decide::OptimizelyDecideOption::ENABLED_FLAGS_ONLY, Optimizely::Decide::OptimizelyDecideOption::IGNORE_USER_PROFILE_SERVICE, Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS, Optimizely::Decide::OptimizelyDecideOption::EXCLUDE_VARIABLES @@ -4212,11 +4281,135 @@ def callback(_args); end ]) end end + describe 'when decision service fails with CMAB error' do + it 'should return error decision when CMAB decision service fails' do + # Add the HTTP stub to prevent real requests + stub_request(:post, 'https://logx.optimizely.com/v1/events') + .to_return(status: 200, body: '', headers: {}) + + feature_flag_key = 'boolean_single_variable_feature' + + # Mock the decision service to return an error result + error_decision_result = double('DecisionResult') + allow(error_decision_result).to receive(:decision).and_return(nil) + allow(error_decision_result).to receive(:error).and_return(true) + allow(error_decision_result).to receive(:reasons).and_return(['CMAB service failed to fetch decision']) + + # Mock get_variations_for_feature_list instead of get_variation_for_feature + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list) + .and_return([error_decision_result]) + + user_context = project_instance.create_user_context('test_user') + decision = user_context.decide(feature_flag_key) + + expect(decision.enabled).to eq(false) + expect(decision.variation_key).to be_nil + expect(decision.flag_key).to eq(feature_flag_key) + expect(decision.reasons).to include('CMAB service failed to fetch decision') + end + end + describe 'CMAB experiments' do + it 'should include CMAB UUID in dispatched event when decision service returns CMAB result' do + # Use an existing feature flag from the test config + feature_flag_key = 'boolean_single_variable_feature' + + # Get an existing experiment that actually exists in the datafile + # Looking at the test config, let's use experiment ID '122230' which exists + existing_experiment = project_config.get_experiment_from_id('122230') + + # Modify the existing experiment to be a CMAB experiment + cmab_experiment = existing_experiment.dup + cmab_experiment['trafficAllocation'] = [] # Empty for CMAB + cmab_experiment['cmab'] = {'attributeIds' => %w[808797688 808797689], 'trafficAllocation' => 4000} + + # Mock the config to return our modified CMAB experiment + allow(project_instance.config_manager.config).to receive(:get_experiment_from_id) + .with('122230') + .and_return(cmab_experiment) + + allow(project_instance.config_manager.config).to receive(:experiment_running?) + .with(cmab_experiment) + .and_return(true) + + # Get the feature flag and update it to reference our CMAB experiment + feature_flag = project_instance.config_manager.config.get_feature_flag_from_key(feature_flag_key) + feature_flag['experimentIds'] = ['122230'] + + # Use existing variations from the original experiment + variation_to_use = existing_experiment['variations'][0] + + # Create a decision with CMAB UUID + expected_cmab_uuid = 'uuid-cmab' + decision_with_cmab = Optimizely::DecisionService::Decision.new( + cmab_experiment, + variation_to_use, + Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'], + expected_cmab_uuid + ) + + decision_result_with_cmab = Optimizely::DecisionService::DecisionResult.new( + decision_with_cmab, + false, + [] + ) + + # Mock get_variations_for_feature_list to return CMAB result + allow(project_instance.decision_service).to receive(:get_variations_for_feature_list) + .and_return([decision_result_with_cmab]) + + # Set up time and UUID mocks for consistent event data + allow(Time).to receive(:now).and_return(time_now) + allow(SecureRandom).to receive(:uuid).and_return('a68cf1ad-0393-4e18-af87-efe8f01a7c9c') + + # Create array to capture dispatched events + dispatched_events = [] + allow(project_instance.event_dispatcher).to receive(:dispatch_event) do |event| + dispatched_events << event + end + + user_context = project_instance.create_user_context('test_user') + decision = user_context.decide(feature_flag_key) + + # Wait for batch processing thread to send event + sleep 0.1 until project_instance.event_processor.event_queue.empty? + + # Verify the decision contains expected information + expect(decision.enabled).to eq(true) + expect(decision.variation_key).to eq(variation_to_use['key']) + expect(decision.rule_key).to eq(existing_experiment['key']) + expect(decision.flag_key).to eq(feature_flag_key) + + # Verify an event was dispatched + expect(dispatched_events.length).to eq(1) + + dispatched_event = dispatched_events[0] + + # Remove the puts statement and verify the event structure and CMAB UUID + expect(dispatched_event.params).to have_key(:visitors) + expect(dispatched_event.params[:visitors].length).to be > 0 + expect(dispatched_event.params[:visitors][0]).to have_key(:snapshots) + expect(dispatched_event.params[:visitors][0][:snapshots].length).to be > 0 + expect(dispatched_event.params[:visitors][0][:snapshots][0]).to have_key(:decisions) + expect(dispatched_event.params[:visitors][0][:snapshots][0][:decisions].length).to be > 0 + + # Get the metadata and assert CMAB UUID + metadata = dispatched_event.params[:visitors][0][:snapshots][0][:decisions][0][:metadata] + expect(metadata).to have_key(:cmab_uuid) + expect(metadata[:cmab_uuid]).to eq(expected_cmab_uuid) + + # Also verify other expected metadata fields + expect(metadata[:flag_key]).to eq(feature_flag_key) + expect(metadata[:rule_key]).to eq('test_experiment_multivariate') + expect(metadata[:rule_type]).to eq('feature-test') + expect(metadata[:variation_key]).to eq('Fred') + expect(metadata[:enabled]).to eq(true) + end + end end describe '#decide_all' do it 'should get empty object when sdk is not ready' do - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) user_context = project_instance.create_user_context('user1') decisions = invalid_project.decide_all(user_context) expect(decisions).to eq({}) @@ -4283,7 +4476,7 @@ def callback(_args); end boolean_feature empty_feature ] - invalid_project = Optimizely::Project.new('invalid', nil, spy_logger) + invalid_project = Optimizely::Project.new(datafile: 'invalid', logger: spy_logger) user_context = project_instance.create_user_context('user1') decisions = invalid_project.decide_for_keys(user_context, keys) expect(decisions).to eq({}) @@ -4354,8 +4547,8 @@ def callback(_args); end it 'should get only enabled decisions for keys when ENABLED_FLAGS_ONLY is true in default_decide_options' do custom_project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, nil, nil, nil, [Optimizely::Decide::OptimizelyDecideOption::ENABLED_FLAGS_ONLY] + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + default_decide_options: [Optimizely::Decide::OptimizelyDecideOption::ENABLED_FLAGS_ONLY] ) keys = %w[ boolean_single_variable_feature @@ -4392,7 +4585,7 @@ def callback(_args); end describe 'default_decide_options' do describe 'EXCLUDE_VARIABLES' do it 'should include variables when the option is not set in default_decide_options' do - custom_project_instance = Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) + custom_project_instance = Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) experiment_to_return = config_body['experiments'][3] variation_to_return = experiment_to_return['variations'][0] decision_to_return = Optimizely::DecisionService::Decision.new( @@ -4400,8 +4593,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_return = [decision_result_to_return] allow(custom_project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(custom_project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(custom_project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_return) user_context = custom_project_instance.create_user_context('user1') decision = custom_project_instance.decide(user_context, 'multi_variate_feature') expect(decision.as_json).to include( @@ -4418,8 +4613,8 @@ def callback(_args); end it 'should exclude variables when the option is set in default_decide_options' do custom_project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, nil, nil, nil, [Optimizely::Decide::OptimizelyDecideOption::EXCLUDE_VARIABLES] + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + default_decide_options: [Optimizely::Decide::OptimizelyDecideOption::EXCLUDE_VARIABLES] ) experiment_to_return = config_body['experiments'][3] variation_to_return = experiment_to_return['variations'][0] @@ -4428,8 +4623,10 @@ def callback(_args); end variation_to_return, Optimizely::DecisionService::DECISION_SOURCES['FEATURE_TEST'] ) + decision_result_to_return = Optimizely::DecisionService::DecisionResult.new(decision_to_return, false, []) + decision_list_to_return = [decision_result_to_return] allow(custom_project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) - allow(custom_project_instance.decision_service).to receive(:get_variation_for_feature).and_return(decision_to_return) + allow(custom_project_instance.decision_service).to receive(:get_variations_for_feature_list).and_return(decision_list_to_return) user_context = custom_project_instance.create_user_context('user1') decision = custom_project_instance.decide(user_context, 'multi_variate_feature') expect(decision.as_json).to include( @@ -4448,8 +4645,8 @@ def callback(_args); end describe 'INCLUDE_REASONS' do it 'should include reasons when the option is set in default_decide_options' do custom_project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, nil, nil, nil, [Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS] + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + default_decide_options: [Optimizely::Decide::OptimizelyDecideOption::INCLUDE_REASONS] ) expect(custom_project_instance.notification_center).to receive(:send_notifications) .once.with(Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args) @@ -4472,7 +4669,9 @@ def callback(_args); end "The user 'user1' is not bucketed into any of the experiments on the feature 'multi_variate_feature'.", "Feature flag 'multi_variate_feature' is not used in a rollout." ], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: nil ) allow(custom_project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) user_context = custom_project_instance.create_user_context('user1') @@ -4497,7 +4696,7 @@ def callback(_args); end end it 'should not include reasons when the option is not set in default_decide_options' do - custom_project_instance = Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) + custom_project_instance = Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) expect(custom_project_instance.notification_center).to receive(:send_notifications) .once.with(Optimizely::NotificationCenter::NOTIFICATION_TYPES[:LOG_EVENT], any_args) expect(custom_project_instance.notification_center).to receive(:send_notifications) @@ -4512,7 +4711,9 @@ def callback(_args); end variation_key: nil, rule_key: nil, reasons: [], - decision_event_dispatched: true + decision_event_dispatched: true, + experiment_id: nil, + variation_id: nil ) allow(custom_project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) user_context = custom_project_instance.create_user_context('user1') @@ -4532,7 +4733,7 @@ def callback(_args); end describe 'DISABLE_DECISION_EVENT' do it 'should send event when option is not set in default_decide_options' do - custom_project_instance = Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler) + custom_project_instance = Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler) experiment_to_return = config_body['experiments'][3] variation_to_return = experiment_to_return['variations'][0] expect(custom_project_instance.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) @@ -4549,8 +4750,8 @@ def callback(_args); end it 'should not send event when option is set in default_decide_options' do custom_project_instance = Optimizely::Project.new( - config_body_JSON, nil, spy_logger, error_handler, - false, nil, nil, nil, nil, nil, [Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT] + datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, + default_decide_options: [Optimizely::Decide::OptimizelyDecideOption::DISABLE_DECISION_EVENT] ) experiment_to_return = config_body['experiments'][3] variation_to_return = experiment_to_return['variations'][0] @@ -4574,7 +4775,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(disable_odp: true) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) expect(project.odp_manager.instance_variable_get('@event_manager')).to be_nil expect(project.odp_manager.instance_variable_get('@segment_manager')).to be_nil project.close @@ -4587,7 +4788,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_event_flush_interval: 0) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) event_manager = project.odp_manager.instance_variable_get('@event_manager') expect(event_manager.instance_variable_get('@flush_interval')).to eq 0 project.close @@ -4599,7 +4800,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_event_flush_interval: nil) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) event_manager = project.odp_manager.instance_variable_get('@event_manager') expect(event_manager.instance_variable_get('@flush_interval')).to eq 1 project.close @@ -4612,7 +4813,7 @@ def callback(_args); end .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(segments_cache_size: 5) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager.instance_variable_get('@segments_cache').capacity).to eq 5 project.close @@ -4624,7 +4825,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(segments_cache_timeout_in_secs: 5) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager.instance_variable_get('@segments_cache').timeout).to eq 5 project.close @@ -4636,7 +4837,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(segments_cache_size: 10, segments_cache_timeout_in_secs: 5) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') segments_cache = segment_manager.instance_variable_get('@segments_cache') expect(segments_cache.capacity).to eq 10 @@ -4650,7 +4851,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') segments_cache = segment_manager.instance_variable_get('@segments_cache') expect(segments_cache.capacity).to eq 10_000 @@ -4664,7 +4865,7 @@ def callback(_args); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(segments_cache_size: 0, segments_cache_timeout_in_secs: 0) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') segments_cache = segment_manager.instance_variable_get('@segments_cache') expect(segments_cache.capacity).to eq 0 @@ -4684,7 +4885,7 @@ def save(key, value); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_segments_cache: CustomCache.new) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager.instance_variable_get('@segments_cache')).to be_a CustomCache project.close @@ -4698,7 +4899,7 @@ class InvalidCustomCache; end # rubocop:disable Lint/ConstantDefinitionInBlock stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_segments_cache: InvalidCustomCache.new) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager.instance_variable_get('@segments_cache')).to be_a Optimizely::LRUCache @@ -4722,7 +4923,7 @@ def fetch_qualified_segments(user_key, user_value, options); end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_segment_manager: CustomSegmentManager.new) - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger, error_handler: error_handler, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager).to be_a CustomSegmentManager project.fetch_qualified_segments(user_id: 'test') @@ -4738,7 +4939,7 @@ class InvalidSegmentManager; end # rubocop:disable Lint/ConstantDefinitionInBloc stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_segment_manager: InvalidSegmentManager.new) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) segment_manager = project.odp_manager.instance_variable_get('@segment_manager') expect(segment_manager).to be_a Optimizely::OdpSegmentManager @@ -4761,7 +4962,7 @@ def running?; end stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_event_manager: CustomEventManager.new) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) event_manager = project.odp_manager.instance_variable_get('@event_manager') expect(event_manager).to be_a CustomEventManager project.send_odp_event(action: 'test', identifiers: {wow: 'great'}) @@ -4776,7 +4977,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: config_body_integrations_JSON) sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(odp_event_manager: InvalidEventManager.new) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) event_manager = project.odp_manager.instance_variable_get('@event_manager') expect(event_manager).to be_a Optimizely::OdpEventManager @@ -4791,7 +4992,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock stub_request(:post, 'https://api.zaius.com/v3/events').to_return(status: 200) expect(spy_logger).to receive(:log).once.with(Logger::DEBUG, 'ODP event queue: flushing batch size 1.') expect(spy_logger).not_to receive(:log).with(Logger::ERROR, anything) - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) project.close end @@ -4803,7 +5004,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock stub_request(:post, 'https://api.zaius.com/v3/events').to_return(status: 200) expect(spy_logger).to receive(:log).once.with(Logger::DEBUG, 'ODP event queue: flushing batch size 1.') expect(spy_logger).not_to receive(:log).with(Logger::ERROR, anything) - project = Optimizely::Project.new(nil, nil, spy_logger, nil, false, nil, sdk_key) + project = Optimizely::Project.new(logger: spy_logger, sdk_key: sdk_key) sleep 0.1 until project.odp_manager.instance_variable_get('@event_manager').instance_variable_get('@event_queue').empty? @@ -4814,7 +5015,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock it 'should log error when odp disabled' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP is not enabled.') sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(disable_odp: true) - custom_project_instance = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, nil, [], {}, sdk_settings) + custom_project_instance = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger, error_handler: error_handler, settings: sdk_settings) custom_project_instance.send_odp_event(type: 'wow', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) custom_project_instance.close end @@ -4823,7 +5024,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock stub_request(:get, "https://cdn.optimizely.com/datafiles/#{sdk_key}.json") .to_return(status: 200, body: nil) expect(spy_logger).to receive(:log).once.with(Logger::ERROR, "Optimizely instance is not valid. Failing 'send_odp_event'.") - project = Optimizely::Project.new(nil, nil, spy_logger, nil, false, nil, sdk_key) + project = Optimizely::Project.new(logger: spy_logger, sdk_key: sdk_key) project.send_odp_event(type: 'wow', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) project.close end @@ -4833,28 +5034,28 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock .to_return(status: 200, body: config_body_integrations_JSON) expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP is not enabled.') sdk_settings = Optimizely::Helpers::OptimizelySdkSettings.new(disable_odp: true) - project = Optimizely::Project.new(nil, nil, spy_logger, error_handler, false, nil, sdk_key, nil, nil, nil, [], {}, sdk_settings) + project = Optimizely::Project.new(logger: spy_logger, error_handler: error_handler, sdk_key: sdk_key, settings: sdk_settings) project.send_odp_event(type: 'wow', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) project.close end it 'should log error with invalid data' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP data is not valid.') - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: 'great', identifiers: {amazing: 'fantastic'}, data: {'wow': {}}) project.close end it 'should log error with empty identifiers' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP events must have at least one key-value pair in identifiers.') - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: 'great', identifiers: {}, data: {'wow': {}}) project.close end it 'should log error with nil identifiers' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP events must have at least one key-value pair in identifiers.') - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: 'great', identifiers: nil, data: {'wow': {}}) project.close end @@ -4864,7 +5065,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock feature_key = 'flag-segment' user_id = 'test_user' - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) allow(project.event_dispatcher).to receive(:dispatch_event).with(instance_of(Optimizely::Event)) expect(project.odp_manager).not_to receive(:send_event) @@ -4881,20 +5082,20 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock it 'should log error with nil action' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP action is not valid (cannot be empty).') - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: nil, identifiers: {amazing: 'fantastic'}, data: {}) project.close end it 'should log error with empty string action' do expect(spy_logger).to receive(:log).once.with(Logger::ERROR, 'ODP action is not valid (cannot be empty).') - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) project.send_odp_event(type: 'wow', action: '', identifiers: {amazing: 'fantastic'}, data: {}) project.close end it 'should use default with nil type' do - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) expect(project.odp_manager).to receive('send_event').with(type: 'fullstack', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) project.send_odp_event(type: nil, action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) @@ -4904,7 +5105,7 @@ class InvalidEventManager; end # rubocop:disable Lint/ConstantDefinitionInBlock end it 'should use default with empty string type' do - project = Optimizely::Project.new(config_body_integrations_JSON, nil, spy_logger) + project = Optimizely::Project.new(datafile: config_body_integrations_JSON, logger: spy_logger) expect(project.odp_manager).to receive('send_event').with(type: 'fullstack', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) project.send_odp_event(type: '', action: 'great', identifiers: {amazing: 'fantastic'}, data: {}) diff --git a/spec/spec_params.rb b/spec/spec_params.rb index e43ce3cc..824e2ac7 100644 --- a/spec/spec_params.rb +++ b/spec/spec_params.rb @@ -19,6 +19,7 @@ module OptimizelySpec VALID_CONFIG_BODY = { + 'region' => 'US', 'accountId' => '12001', 'projectId' => '111001', 'anonymizeIP' => false, @@ -1938,6 +1939,118 @@ module OptimizelySpec CONFIG_DICT_WITH_INTEGRATIONS_JSON = JSON.dump(CONFIG_DICT_WITH_INTEGRATIONS) + CONFIG_BODY_WITH_HOLDOUTS = VALID_CONFIG_BODY.merge( + { + 'holdouts' => [ + { + 'id' => 'holdout_1', + 'key' => 'global_holdout', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => [], + 'excludedFlags' => ['155554'], + 'variations' => [ + { + 'id' => 'var_1', + 'key' => 'control', + 'featureEnabled' => true + }, + { + 'id' => 'var_2', + 'key' => 'treatment', + 'featureEnabled' => true + } + ], + 'trafficAllocation' => [ + { + 'entityId' => 'var_1', + 'endOfRange' => 5_000 + }, + { + 'entityId' => 'var_2', + 'endOfRange' => 10_000 + } + ] + }, + { + 'id' => 'holdout_boolean_feature', + 'key' => 'boolean_feature_holdout', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => ['155549'], + 'excludedFlags' => [], + 'variations' => [ + { + 'id' => 'var_boolean', + 'key' => 'control', + 'featureEnabled' => false + } + ], + 'trafficAllocation' => [ + { + 'entityId' => 'var_boolean', + 'endOfRange' => 10_000 + } + ] + }, + { + 'id' => 'holdout_empty_1', + 'key' => 'holdout_empty_1', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => [], + 'excludedFlags' => [], + 'variations' => [], + 'trafficAllocation' => [] + }, + { + 'id' => 'holdout_2', + 'key' => 'specific_holdout', + 'status' => 'Running', + 'audiences' => [], + 'includedFlags' => ['155559'], + 'excludedFlags' => [], + 'variations' => [ + { + 'id' => 'var_3', + 'key' => 'control', + 'featureEnabled' => false + } + ], + 'trafficAllocation' => [ + { + 'entityId' => 'var_3', + 'endOfRange' => 10_000 + } + ] + }, + { + 'id' => 'holdout_3', + 'key' => 'inactive_holdout', + 'status' => 'Inactive', + 'audiences' => [], + 'includedFlags' => ['155554'], + 'excludedFlags' => [], + 'variations' => [ + { + 'id' => 'var_4', + 'key' => 'off', + 'featureEnabled' => false + } + ], + 'trafficAllocation' => [ + { + 'entityId' => 'var_4', + 'endOfRange' => 10_000 + } + ] + } + ] + } + ).freeze + + CONFIG_BODY_WITH_HOLDOUTS_JSON = JSON.dump(CONFIG_BODY_WITH_HOLDOUTS).freeze + def self.deep_clone(obj) obj.dup.tap do |new_obj| case new_obj diff --git a/spec/user_condition_evaluator_spec.rb b/spec/user_condition_evaluator_spec.rb index 7aef929e..0d74e514 100644 --- a/spec/user_condition_evaluator_spec.rb +++ b/spec/user_condition_evaluator_spec.rb @@ -1,7 +1,7 @@ # frozen_string_literal: true # -# Copyright 2019-2020, Optimizely and contributors +# Copyright 2019-2020, 2023, Optimizely and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,7 +28,7 @@ let(:error_handler) { Optimizely::NoOpErrorHandler.new } let(:spy_logger) { spy('logger') } let(:event_processor) { Optimizely::ForwardingEventProcessor.new(Optimizely::EventDispatcher.new) } - let(:project_instance) { Optimizely::Project.new(config_body_JSON, nil, spy_logger, error_handler, false, nil, nil, nil, nil, event_processor) } + let(:project_instance) { Optimizely::Project.new(datafile: config_body_JSON, logger: spy_logger, error_handler: error_handler, event_processor: event_processor) } let(:user_context) { project_instance.create_user_context('some-user', {}) } after(:example) { project_instance.close } diff --git a/spec/user_profile_tracker_spec.rb b/spec/user_profile_tracker_spec.rb new file mode 100644 index 00000000..85515bb1 --- /dev/null +++ b/spec/user_profile_tracker_spec.rb @@ -0,0 +1,101 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'rspec' + +RSpec.describe Optimizely::UserProfileTracker do + let(:user_id) { 'test_user' } + let(:mock_user_profile_service) { instance_double('UserProfileService') } + let(:mock_logger) { instance_double('Logger') } + let(:user_profile_tracker) { described_class.new(user_id, mock_user_profile_service, mock_logger) } + + describe '#initialize' do + it 'initializes with a user ID and default values' do + tracker = described_class.new(user_id) + expect(tracker.user_profile[:user_id]).to eq(user_id) + expect(tracker.user_profile[:experiment_bucket_map]).to eq({}) + end + + it 'accepts a user profile service and logger' do + expect(user_profile_tracker.instance_variable_get(:@user_profile_service)).to eq(mock_user_profile_service) + expect(user_profile_tracker.instance_variable_get(:@logger)).to eq(mock_logger) + end + end + + describe '#load_user_profile' do + it 'loads the user profile from the service if provided' do + expected_profile = { + user_id: user_id, + experiment_bucket_map: {'111127' => {variation_id: '111128'}} + } + allow(mock_user_profile_service).to receive(:lookup).with(user_id).and_return(expected_profile) + user_profile_tracker.load_user_profile + expect(user_profile_tracker.user_profile).to eq(expected_profile) + end + + it 'handles errors during lookup and logs them' do + allow(mock_user_profile_service).to receive(:lookup).with(user_id).and_raise(StandardError.new('lookup error')) + allow(mock_logger).to receive(:log) + + reasons = [] + user_profile_tracker.load_user_profile(reasons) + expect(reasons).to include("Error while looking up user profile for user ID 'test_user': lookup error.") + expect(mock_logger).to have_received(:log).with(Logger::ERROR, "Error while looking up user profile for user ID 'test_user': lookup error.") + end + + it 'does nothing if reasons array is nil' do + expect(mock_user_profile_service).not_to receive(:lookup) + user_profile_tracker.load_user_profile(nil) + end + end + + describe '#update_user_profile' do + let(:experiment_id) { '111127' } + let(:variation_id) { '111128' } + + before do + allow(mock_logger).to receive(:log) + end + + it 'updates the experiment bucket map with the given experiment and variation IDs' do + user_profile_tracker.update_user_profile(experiment_id, variation_id) + + # Verify the experiment and variation were added + expect(user_profile_tracker.user_profile[:experiment_bucket_map][experiment_id][:variation_id]).to eq(variation_id) + # Verify the profile_updated flag was set + expect(user_profile_tracker.instance_variable_get(:@profile_updated)).to eq(true) + # Verify a log message was recorded + expect(mock_logger).to have_received(:log).with(Logger::INFO, "Updated variation ID #{variation_id} of experiment ID #{experiment_id} for user 'test_user'.") + end + end + + describe '#save_user_profile' do + it 'saves the user profile if updates were made and service is available' do + allow(mock_user_profile_service).to receive(:save) + allow(mock_logger).to receive(:log) + + user_profile_tracker.update_user_profile('111127', '111128') + user_profile_tracker.save_user_profile + + expect(mock_user_profile_service).to have_received(:save).with(user_profile_tracker.user_profile) + expect(mock_logger).to have_received(:log).with(Logger::INFO, "Saved user profile for user 'test_user'.") + end + + it 'does not save the user profile if no updates were made' do + allow(mock_user_profile_service).to receive(:save) + + user_profile_tracker.save_user_profile + expect(mock_user_profile_service).not_to have_received(:save) + end + + it 'handles errors during save and logs them' do + allow(mock_user_profile_service).to receive(:save).and_raise(StandardError.new('save error')) + allow(mock_logger).to receive(:log) + + user_profile_tracker.update_user_profile('111127', '111128') + user_profile_tracker.save_user_profile + + expect(mock_logger).to have_received(:log).with(Logger::ERROR, "Failed to save user profile for user 'test_user': save error.") + end + end +end